aarch64.md (*adds_<optab><mode>_multp2): New pattern.
[official-gcc.git] / gcc / asan.c
blob36eccf93cbb3dff95931b3091360340af2f296e1
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
40 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
41 with <2x slowdown on average.
43 The tool consists of two parts:
44 instrumentation module (this file) and a run-time library.
45 The instrumentation module adds a run-time check before every memory insn.
46 For a 8- or 16- byte load accessing address X:
47 ShadowAddr = (X >> 3) + Offset
48 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
49 if (ShadowValue)
50 __asan_report_load8(X);
51 For a load of N bytes (N=1, 2 or 4) from address X:
52 ShadowAddr = (X >> 3) + Offset
53 ShadowValue = *(char*)ShadowAddr;
54 if (ShadowValue)
55 if ((X & 7) + N - 1 > ShadowValue)
56 __asan_report_loadN(X);
57 Stores are instrumented similarly, but using __asan_report_storeN functions.
58 A call too __asan_init() is inserted to the list of module CTORs.
60 The run-time library redefines malloc (so that redzone are inserted around
61 the allocated memory) and free (so that reuse of free-ed memory is delayed),
62 provides __asan_report* and __asan_init functions.
64 Read more:
65 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
67 The current implementation supports detection of out-of-bounds and
68 use-after-free in the heap, on the stack and for global variables.
70 [Protection of stack variables]
72 To understand how detection of out-of-bounds and use-after-free works
73 for stack variables, lets look at this example on x86_64 where the
74 stack grows downward:
76 int
77 foo ()
79 char a[23] = {0};
80 int b[2] = {0};
82 a[5] = 1;
83 b[1] = 2;
85 return a[5] + b[1];
88 For this function, the stack protected by asan will be organized as
89 follows, from the top of the stack to the bottom:
91 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
93 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
94 the next slot be 32 bytes aligned; this one is called Partial
95 Redzone; this 32 bytes alignment is an asan constraint]
97 Slot 3/ [24 bytes for variable 'a']
99 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
101 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
103 Slot 6/ [8 bytes for variable 'b']
105 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
106 'LEFT RedZone']
108 The 32 bytes of LEFT red zone at the bottom of the stack can be
109 decomposed as such:
111 1/ The first 8 bytes contain a magical asan number that is always
112 0x41B58AB3.
114 2/ The following 8 bytes contains a pointer to a string (to be
115 parsed at runtime by the runtime asan library), which format is
116 the following:
118 "<function-name> <space> <num-of-variables-on-the-stack>
119 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
120 <length-of-var-in-bytes> ){n} "
122 where '(...){n}' means the content inside the parenthesis occurs 'n'
123 times, with 'n' being the number of variables on the stack.
125 3/ The following 16 bytes of the red zone have no particular
126 format.
128 The shadow memory for that stack layout is going to look like this:
130 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
131 The F1 byte pattern is a magic number called
132 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
133 the memory for that shadow byte is part of a the LEFT red zone
134 intended to seat at the bottom of the variables on the stack.
136 - content of shadow memory 8 bytes for slots 6 and 5:
137 0xF4F4F400. The F4 byte pattern is a magic number
138 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
139 memory region for this shadow byte is a PARTIAL red zone
140 intended to pad a variable A, so that the slot following
141 {A,padding} is 32 bytes aligned.
143 Note that the fact that the least significant byte of this
144 shadow memory content is 00 means that 8 bytes of its
145 corresponding memory (which corresponds to the memory of
146 variable 'b') is addressable.
148 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
149 The F2 byte pattern is a magic number called
150 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
151 region for this shadow byte is a MIDDLE red zone intended to
152 seat between two 32 aligned slots of {variable,padding}.
154 - content of shadow memory 8 bytes for slot 3 and 2:
155 0xF4000000. This represents is the concatenation of
156 variable 'a' and the partial red zone following it, like what we
157 had for variable 'b'. The least significant 3 bytes being 00
158 means that the 3 bytes of variable 'a' are addressable.
160 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
161 The F3 byte pattern is a magic number called
162 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
163 region for this shadow byte is a RIGHT red zone intended to seat
164 at the top of the variables of the stack.
166 Note that the real variable layout is done in expand_used_vars in
167 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
168 stack variables as well as the different red zones, emits some
169 prologue code to populate the shadow memory as to poison (mark as
170 non-accessible) the regions of the red zones and mark the regions of
171 stack variables as accessible, and emit some epilogue code to
172 un-poison (mark as accessible) the regions of red zones right before
173 the function exits.
175 [Protection of global variables]
177 The basic idea is to insert a red zone between two global variables
178 and install a constructor function that calls the asan runtime to do
179 the populating of the relevant shadow memory regions at load time.
181 So the global variables are laid out as to insert a red zone between
182 them. The size of the red zones is so that each variable starts on a
183 32 bytes boundary.
185 Then a constructor function is installed so that, for each global
186 variable, it calls the runtime asan library function
187 __asan_register_globals_with an instance of this type:
189 struct __asan_global
191 // Address of the beginning of the global variable.
192 const void *__beg;
194 // Initial size of the global variable.
195 uptr __size;
197 // Size of the global variable + size of the red zone. This
198 // size is 32 bytes aligned.
199 uptr __size_with_redzone;
201 // Name of the global variable.
202 const void *__name;
204 // This is always set to NULL for now.
205 uptr __has_dynamic_init;
208 A destructor function that calls the runtime asan library function
209 _asan_unregister_globals is also installed. */
211 alias_set_type asan_shadow_set = -1;
213 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
214 alias set is used for all shadow memory accesses. */
215 static GTY(()) tree shadow_ptr_types[2];
217 /* Hashtable support for memory references used by gimple
218 statements. */
220 /* This type represents a reference to a memory region. */
221 struct asan_mem_ref
223 /* The expression of the begining of the memory region. */
224 tree start;
226 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
227 char access_size;
230 static alloc_pool asan_mem_ref_alloc_pool;
232 /* This creates the alloc pool used to store the instances of
233 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
235 static alloc_pool
236 asan_mem_ref_get_alloc_pool ()
238 if (asan_mem_ref_alloc_pool == NULL)
239 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
240 sizeof (asan_mem_ref),
241 10);
242 return asan_mem_ref_alloc_pool;
246 /* Initializes an instance of asan_mem_ref. */
248 static void
249 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
251 ref->start = start;
252 ref->access_size = access_size;
255 /* Allocates memory for an instance of asan_mem_ref into the memory
256 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
257 START is the address of (or the expression pointing to) the
258 beginning of memory reference. ACCESS_SIZE is the size of the
259 access to the referenced memory. */
261 static asan_mem_ref*
262 asan_mem_ref_new (tree start, char access_size)
264 asan_mem_ref *ref =
265 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
267 asan_mem_ref_init (ref, start, access_size);
268 return ref;
271 /* This builds and returns a pointer to the end of the memory region
272 that starts at START and of length LEN. */
274 tree
275 asan_mem_ref_get_end (tree start, tree len)
277 if (len == NULL_TREE || integer_zerop (len))
278 return start;
280 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
283 /* Return a tree expression that represents the end of the referenced
284 memory region. Beware that this function can actually build a new
285 tree expression. */
287 tree
288 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
290 return asan_mem_ref_get_end (ref->start, len);
293 struct asan_mem_ref_hasher
294 : typed_noop_remove <asan_mem_ref>
296 typedef asan_mem_ref value_type;
297 typedef asan_mem_ref compare_type;
299 static inline hashval_t hash (const value_type *);
300 static inline bool equal (const value_type *, const compare_type *);
303 /* Hash a memory reference. */
305 inline hashval_t
306 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
308 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
309 h = iterative_hash_hashval_t (h, mem_ref->access_size);
310 return h;
313 /* Compare two memory references. We accept the length of either
314 memory references to be NULL_TREE. */
316 inline bool
317 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
318 const asan_mem_ref *m2)
320 return (m1->access_size == m2->access_size
321 && operand_equal_p (m1->start, m2->start, 0));
324 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
326 /* Returns a reference to the hash table containing memory references.
327 This function ensures that the hash table is created. Note that
328 this hash table is updated by the function
329 update_mem_ref_hash_table. */
331 static hash_table <asan_mem_ref_hasher> &
332 get_mem_ref_hash_table ()
334 if (!asan_mem_ref_ht.is_created ())
335 asan_mem_ref_ht.create (10);
337 return asan_mem_ref_ht;
340 /* Clear all entries from the memory references hash table. */
342 static void
343 empty_mem_ref_hash_table ()
345 if (asan_mem_ref_ht.is_created ())
346 asan_mem_ref_ht.empty ();
349 /* Free the memory references hash table. */
351 static void
352 free_mem_ref_resources ()
354 if (asan_mem_ref_ht.is_created ())
355 asan_mem_ref_ht.dispose ();
357 if (asan_mem_ref_alloc_pool)
359 free_alloc_pool (asan_mem_ref_alloc_pool);
360 asan_mem_ref_alloc_pool = NULL;
364 /* Return true iff the memory reference REF has been instrumented. */
366 static bool
367 has_mem_ref_been_instrumented (tree ref, char access_size)
369 asan_mem_ref r;
370 asan_mem_ref_init (&r, ref, access_size);
372 return (get_mem_ref_hash_table ().find (&r) != NULL);
375 /* Return true iff the memory reference REF has been instrumented. */
377 static bool
378 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
380 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
383 /* Return true iff access to memory region starting at REF and of
384 length LEN has been instrumented. */
386 static bool
387 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
389 /* First let's see if the address of the beginning of REF has been
390 instrumented. */
391 if (!has_mem_ref_been_instrumented (ref))
392 return false;
394 if (len != 0)
396 /* Let's see if the end of the region has been instrumented. */
397 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
398 ref->access_size))
399 return false;
401 return true;
404 /* Set REF to the memory reference present in a gimple assignment
405 ASSIGNMENT. Return true upon successful completion, false
406 otherwise. */
408 static bool
409 get_mem_ref_of_assignment (const gimple assignment,
410 asan_mem_ref *ref,
411 bool *ref_is_store)
413 gcc_assert (gimple_assign_single_p (assignment));
415 if (gimple_store_p (assignment)
416 && !gimple_clobber_p (assignment))
418 ref->start = gimple_assign_lhs (assignment);
419 *ref_is_store = true;
421 else if (gimple_assign_load_p (assignment))
423 ref->start = gimple_assign_rhs1 (assignment);
424 *ref_is_store = false;
426 else
427 return false;
429 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
430 return true;
433 /* Return the memory references contained in a gimple statement
434 representing a builtin call that has to do with memory access. */
436 static bool
437 get_mem_refs_of_builtin_call (const gimple call,
438 asan_mem_ref *src0,
439 tree *src0_len,
440 bool *src0_is_store,
441 asan_mem_ref *src1,
442 tree *src1_len,
443 bool *src1_is_store,
444 asan_mem_ref *dst,
445 tree *dst_len,
446 bool *dst_is_store,
447 bool *dest_is_deref)
449 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
451 tree callee = gimple_call_fndecl (call);
452 tree source0 = NULL_TREE, source1 = NULL_TREE,
453 dest = NULL_TREE, len = NULL_TREE;
454 bool is_store = true, got_reference_p = false;
455 char access_size = 1;
457 switch (DECL_FUNCTION_CODE (callee))
459 /* (s, s, n) style memops. */
460 case BUILT_IN_BCMP:
461 case BUILT_IN_MEMCMP:
462 source0 = gimple_call_arg (call, 0);
463 source1 = gimple_call_arg (call, 1);
464 len = gimple_call_arg (call, 2);
465 break;
467 /* (src, dest, n) style memops. */
468 case BUILT_IN_BCOPY:
469 source0 = gimple_call_arg (call, 0);
470 dest = gimple_call_arg (call, 1);
471 len = gimple_call_arg (call, 2);
472 break;
474 /* (dest, src, n) style memops. */
475 case BUILT_IN_MEMCPY:
476 case BUILT_IN_MEMCPY_CHK:
477 case BUILT_IN_MEMMOVE:
478 case BUILT_IN_MEMMOVE_CHK:
479 case BUILT_IN_MEMPCPY:
480 case BUILT_IN_MEMPCPY_CHK:
481 dest = gimple_call_arg (call, 0);
482 source0 = gimple_call_arg (call, 1);
483 len = gimple_call_arg (call, 2);
484 break;
486 /* (dest, n) style memops. */
487 case BUILT_IN_BZERO:
488 dest = gimple_call_arg (call, 0);
489 len = gimple_call_arg (call, 1);
490 break;
492 /* (dest, x, n) style memops*/
493 case BUILT_IN_MEMSET:
494 case BUILT_IN_MEMSET_CHK:
495 dest = gimple_call_arg (call, 0);
496 len = gimple_call_arg (call, 2);
497 break;
499 case BUILT_IN_STRLEN:
500 source0 = gimple_call_arg (call, 0);
501 len = gimple_call_lhs (call);
502 break ;
504 /* And now the __atomic* and __sync builtins.
505 These are handled differently from the classical memory memory
506 access builtins above. */
508 case BUILT_IN_ATOMIC_LOAD_1:
509 case BUILT_IN_ATOMIC_LOAD_2:
510 case BUILT_IN_ATOMIC_LOAD_4:
511 case BUILT_IN_ATOMIC_LOAD_8:
512 case BUILT_IN_ATOMIC_LOAD_16:
513 is_store = false;
514 /* fall through. */
516 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
517 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
518 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
519 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
520 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
522 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
523 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
524 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
525 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
526 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
528 case BUILT_IN_SYNC_FETCH_AND_OR_1:
529 case BUILT_IN_SYNC_FETCH_AND_OR_2:
530 case BUILT_IN_SYNC_FETCH_AND_OR_4:
531 case BUILT_IN_SYNC_FETCH_AND_OR_8:
532 case BUILT_IN_SYNC_FETCH_AND_OR_16:
534 case BUILT_IN_SYNC_FETCH_AND_AND_1:
535 case BUILT_IN_SYNC_FETCH_AND_AND_2:
536 case BUILT_IN_SYNC_FETCH_AND_AND_4:
537 case BUILT_IN_SYNC_FETCH_AND_AND_8:
538 case BUILT_IN_SYNC_FETCH_AND_AND_16:
540 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
541 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
542 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
543 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
544 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
546 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
547 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
548 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
549 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
551 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
552 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
553 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
554 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
555 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
557 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
558 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
559 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
560 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
561 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
563 case BUILT_IN_SYNC_OR_AND_FETCH_1:
564 case BUILT_IN_SYNC_OR_AND_FETCH_2:
565 case BUILT_IN_SYNC_OR_AND_FETCH_4:
566 case BUILT_IN_SYNC_OR_AND_FETCH_8:
567 case BUILT_IN_SYNC_OR_AND_FETCH_16:
569 case BUILT_IN_SYNC_AND_AND_FETCH_1:
570 case BUILT_IN_SYNC_AND_AND_FETCH_2:
571 case BUILT_IN_SYNC_AND_AND_FETCH_4:
572 case BUILT_IN_SYNC_AND_AND_FETCH_8:
573 case BUILT_IN_SYNC_AND_AND_FETCH_16:
575 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
576 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
577 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
578 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
579 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
581 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
582 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
583 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
584 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
586 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
587 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
588 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
589 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
590 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
598 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
604 case BUILT_IN_SYNC_LOCK_RELEASE_1:
605 case BUILT_IN_SYNC_LOCK_RELEASE_2:
606 case BUILT_IN_SYNC_LOCK_RELEASE_4:
607 case BUILT_IN_SYNC_LOCK_RELEASE_8:
608 case BUILT_IN_SYNC_LOCK_RELEASE_16:
610 case BUILT_IN_ATOMIC_EXCHANGE_1:
611 case BUILT_IN_ATOMIC_EXCHANGE_2:
612 case BUILT_IN_ATOMIC_EXCHANGE_4:
613 case BUILT_IN_ATOMIC_EXCHANGE_8:
614 case BUILT_IN_ATOMIC_EXCHANGE_16:
616 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
617 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
618 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
622 case BUILT_IN_ATOMIC_STORE_1:
623 case BUILT_IN_ATOMIC_STORE_2:
624 case BUILT_IN_ATOMIC_STORE_4:
625 case BUILT_IN_ATOMIC_STORE_8:
626 case BUILT_IN_ATOMIC_STORE_16:
628 case BUILT_IN_ATOMIC_ADD_FETCH_1:
629 case BUILT_IN_ATOMIC_ADD_FETCH_2:
630 case BUILT_IN_ATOMIC_ADD_FETCH_4:
631 case BUILT_IN_ATOMIC_ADD_FETCH_8:
632 case BUILT_IN_ATOMIC_ADD_FETCH_16:
634 case BUILT_IN_ATOMIC_SUB_FETCH_1:
635 case BUILT_IN_ATOMIC_SUB_FETCH_2:
636 case BUILT_IN_ATOMIC_SUB_FETCH_4:
637 case BUILT_IN_ATOMIC_SUB_FETCH_8:
638 case BUILT_IN_ATOMIC_SUB_FETCH_16:
640 case BUILT_IN_ATOMIC_AND_FETCH_1:
641 case BUILT_IN_ATOMIC_AND_FETCH_2:
642 case BUILT_IN_ATOMIC_AND_FETCH_4:
643 case BUILT_IN_ATOMIC_AND_FETCH_8:
644 case BUILT_IN_ATOMIC_AND_FETCH_16:
646 case BUILT_IN_ATOMIC_NAND_FETCH_1:
647 case BUILT_IN_ATOMIC_NAND_FETCH_2:
648 case BUILT_IN_ATOMIC_NAND_FETCH_4:
649 case BUILT_IN_ATOMIC_NAND_FETCH_8:
650 case BUILT_IN_ATOMIC_NAND_FETCH_16:
652 case BUILT_IN_ATOMIC_XOR_FETCH_1:
653 case BUILT_IN_ATOMIC_XOR_FETCH_2:
654 case BUILT_IN_ATOMIC_XOR_FETCH_4:
655 case BUILT_IN_ATOMIC_XOR_FETCH_8:
656 case BUILT_IN_ATOMIC_XOR_FETCH_16:
658 case BUILT_IN_ATOMIC_OR_FETCH_1:
659 case BUILT_IN_ATOMIC_OR_FETCH_2:
660 case BUILT_IN_ATOMIC_OR_FETCH_4:
661 case BUILT_IN_ATOMIC_OR_FETCH_8:
662 case BUILT_IN_ATOMIC_OR_FETCH_16:
664 case BUILT_IN_ATOMIC_FETCH_ADD_1:
665 case BUILT_IN_ATOMIC_FETCH_ADD_2:
666 case BUILT_IN_ATOMIC_FETCH_ADD_4:
667 case BUILT_IN_ATOMIC_FETCH_ADD_8:
668 case BUILT_IN_ATOMIC_FETCH_ADD_16:
670 case BUILT_IN_ATOMIC_FETCH_SUB_1:
671 case BUILT_IN_ATOMIC_FETCH_SUB_2:
672 case BUILT_IN_ATOMIC_FETCH_SUB_4:
673 case BUILT_IN_ATOMIC_FETCH_SUB_8:
674 case BUILT_IN_ATOMIC_FETCH_SUB_16:
676 case BUILT_IN_ATOMIC_FETCH_AND_1:
677 case BUILT_IN_ATOMIC_FETCH_AND_2:
678 case BUILT_IN_ATOMIC_FETCH_AND_4:
679 case BUILT_IN_ATOMIC_FETCH_AND_8:
680 case BUILT_IN_ATOMIC_FETCH_AND_16:
682 case BUILT_IN_ATOMIC_FETCH_NAND_1:
683 case BUILT_IN_ATOMIC_FETCH_NAND_2:
684 case BUILT_IN_ATOMIC_FETCH_NAND_4:
685 case BUILT_IN_ATOMIC_FETCH_NAND_8:
686 case BUILT_IN_ATOMIC_FETCH_NAND_16:
688 case BUILT_IN_ATOMIC_FETCH_XOR_1:
689 case BUILT_IN_ATOMIC_FETCH_XOR_2:
690 case BUILT_IN_ATOMIC_FETCH_XOR_4:
691 case BUILT_IN_ATOMIC_FETCH_XOR_8:
692 case BUILT_IN_ATOMIC_FETCH_XOR_16:
694 case BUILT_IN_ATOMIC_FETCH_OR_1:
695 case BUILT_IN_ATOMIC_FETCH_OR_2:
696 case BUILT_IN_ATOMIC_FETCH_OR_4:
697 case BUILT_IN_ATOMIC_FETCH_OR_8:
698 case BUILT_IN_ATOMIC_FETCH_OR_16:
700 dest = gimple_call_arg (call, 0);
701 /* DEST represents the address of a memory location.
702 instrument_derefs wants the memory location, so lets
703 dereference the address DEST before handing it to
704 instrument_derefs. */
705 if (TREE_CODE (dest) == ADDR_EXPR)
706 dest = TREE_OPERAND (dest, 0);
707 else if (TREE_CODE (dest) == SSA_NAME)
708 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
709 dest, build_int_cst (TREE_TYPE (dest), 0));
710 else
711 gcc_unreachable ();
713 access_size = int_size_in_bytes (TREE_TYPE (dest));
716 default:
717 /* The other builtins memory access are not instrumented in this
718 function because they either don't have any length parameter,
719 or their length parameter is just a limit. */
720 break;
723 if (len != NULL_TREE)
725 if (source0 != NULL_TREE)
727 src0->start = source0;
728 src0->access_size = access_size;
729 *src0_len = len;
730 *src0_is_store = false;
733 if (source1 != NULL_TREE)
735 src1->start = source1;
736 src1->access_size = access_size;
737 *src1_len = len;
738 *src1_is_store = false;
741 if (dest != NULL_TREE)
743 dst->start = dest;
744 dst->access_size = access_size;
745 *dst_len = len;
746 *dst_is_store = true;
749 got_reference_p = true;
751 else if (dest)
753 dst->start = dest;
754 dst->access_size = access_size;
755 *dst_len = NULL_TREE;
756 *dst_is_store = is_store;
757 *dest_is_deref = true;
758 got_reference_p = true;
761 return got_reference_p;
764 /* Return true iff a given gimple statement has been instrumented.
765 Note that the statement is "defined" by the memory references it
766 contains. */
768 static bool
769 has_stmt_been_instrumented_p (gimple stmt)
771 if (gimple_assign_single_p (stmt))
773 bool r_is_store;
774 asan_mem_ref r;
775 asan_mem_ref_init (&r, NULL, 1);
777 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
778 return has_mem_ref_been_instrumented (&r);
780 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
782 asan_mem_ref src0, src1, dest;
783 asan_mem_ref_init (&src0, NULL, 1);
784 asan_mem_ref_init (&src1, NULL, 1);
785 asan_mem_ref_init (&dest, NULL, 1);
787 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
788 bool src0_is_store = false, src1_is_store = false,
789 dest_is_store = false, dest_is_deref = false;
790 if (get_mem_refs_of_builtin_call (stmt,
791 &src0, &src0_len, &src0_is_store,
792 &src1, &src1_len, &src1_is_store,
793 &dest, &dest_len, &dest_is_store,
794 &dest_is_deref))
796 if (src0.start != NULL_TREE
797 && !has_mem_ref_been_instrumented (&src0, src0_len))
798 return false;
800 if (src1.start != NULL_TREE
801 && !has_mem_ref_been_instrumented (&src1, src1_len))
802 return false;
804 if (dest.start != NULL_TREE
805 && !has_mem_ref_been_instrumented (&dest, dest_len))
806 return false;
808 return true;
811 return false;
814 /* Insert a memory reference into the hash table. */
816 static void
817 update_mem_ref_hash_table (tree ref, char access_size)
819 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
821 asan_mem_ref r;
822 asan_mem_ref_init (&r, ref, access_size);
824 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
825 if (*slot == NULL)
826 *slot = asan_mem_ref_new (ref, access_size);
829 /* Initialize shadow_ptr_types array. */
831 static void
832 asan_init_shadow_ptr_types (void)
834 asan_shadow_set = new_alias_set ();
835 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
836 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
837 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
838 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
839 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
840 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
841 initialize_sanitizer_builtins ();
844 /* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
845 static pretty_printer asan_pp;
846 static bool asan_pp_initialized;
848 /* Initialize asan_pp. */
850 static void
851 asan_pp_initialize (void)
853 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
854 asan_pp_initialized = true;
857 /* Create ADDR_EXPR of STRING_CST with asan_pp text. */
859 static tree
860 asan_pp_string (void)
862 const char *buf = pp_base_formatted_text (&asan_pp);
863 size_t len = strlen (buf);
864 tree ret = build_string (len + 1, buf);
865 TREE_TYPE (ret)
866 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
867 build_index_type (size_int (len)));
868 TREE_READONLY (ret) = 1;
869 TREE_STATIC (ret) = 1;
870 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
873 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
875 static rtx
876 asan_shadow_cst (unsigned char shadow_bytes[4])
878 int i;
879 unsigned HOST_WIDE_INT val = 0;
880 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
881 for (i = 0; i < 4; i++)
882 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
883 << (BITS_PER_UNIT * i);
884 return GEN_INT (trunc_int_for_mode (val, SImode));
887 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
888 though. */
890 static void
891 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
893 rtx insn, insns, top_label, end, addr, tmp, jump;
895 start_sequence ();
896 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
897 insns = get_insns ();
898 end_sequence ();
899 for (insn = insns; insn; insn = NEXT_INSN (insn))
900 if (CALL_P (insn))
901 break;
902 if (insn == NULL_RTX)
904 emit_insn (insns);
905 return;
908 gcc_assert ((len & 3) == 0);
909 top_label = gen_label_rtx ();
910 addr = force_reg (Pmode, XEXP (shadow_mem, 0));
911 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
912 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
913 emit_label (top_label);
915 emit_move_insn (shadow_mem, const0_rtx);
916 tmp = expand_simple_binop (Pmode, PLUS, addr, GEN_INT (4), addr,
917 true, OPTAB_LIB_WIDEN);
918 if (tmp != addr)
919 emit_move_insn (addr, tmp);
920 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
921 jump = get_last_insn ();
922 gcc_assert (JUMP_P (jump));
923 add_reg_note (jump, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE * 80 / 100));
926 /* Insert code to protect stack vars. The prologue sequence should be emitted
927 directly, epilogue sequence returned. BASE is the register holding the
928 stack base, against which OFFSETS array offsets are relative to, OFFSETS
929 array contains pairs of offsets in reverse order, always the end offset
930 of some gap that needs protection followed by starting offset,
931 and DECLS is an array of representative decls for each var partition.
932 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
933 elements long (OFFSETS include gap before the first variable as well
934 as gaps after each stack variable). */
937 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
938 int length)
940 rtx shadow_base, shadow_mem, ret, mem;
941 unsigned char shadow_bytes[4];
942 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
943 HOST_WIDE_INT last_offset, last_size;
944 int l;
945 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
946 tree str_cst;
948 if (shadow_ptr_types[0] == NULL_TREE)
949 asan_init_shadow_ptr_types ();
951 /* First of all, prepare the description string. */
952 if (!asan_pp_initialized)
953 asan_pp_initialize ();
955 pp_clear_output_area (&asan_pp);
956 if (DECL_NAME (current_function_decl))
957 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
958 else
959 pp_string (&asan_pp, "<unknown>");
960 pp_space (&asan_pp);
961 pp_decimal_int (&asan_pp, length / 2 - 1);
962 pp_space (&asan_pp);
963 for (l = length - 2; l; l -= 2)
965 tree decl = decls[l / 2 - 1];
966 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
967 pp_space (&asan_pp);
968 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
969 pp_space (&asan_pp);
970 if (DECL_P (decl) && DECL_NAME (decl))
972 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
973 pp_space (&asan_pp);
974 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
976 else
977 pp_string (&asan_pp, "9 <unknown>");
978 pp_space (&asan_pp);
980 str_cst = asan_pp_string ();
982 /* Emit the prologue sequence. */
983 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
984 NULL_RTX, 1, OPTAB_DIRECT);
985 mem = gen_rtx_MEM (ptr_mode, base);
986 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
987 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
988 emit_move_insn (mem, expand_normal (str_cst));
989 shadow_base = expand_binop (Pmode, lshr_optab, base,
990 GEN_INT (ASAN_SHADOW_SHIFT),
991 NULL_RTX, 1, OPTAB_DIRECT);
992 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
993 GEN_INT (targetm.asan_shadow_offset ()),
994 NULL_RTX, 1, OPTAB_DIRECT);
995 gcc_assert (asan_shadow_set != -1
996 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
997 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
998 set_mem_alias_set (shadow_mem, asan_shadow_set);
999 prev_offset = base_offset;
1000 for (l = length; l; l -= 2)
1002 if (l == 2)
1003 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1004 offset = offsets[l - 1];
1005 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1007 int i;
1008 HOST_WIDE_INT aoff
1009 = base_offset + ((offset - base_offset)
1010 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1011 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1012 (aoff - prev_offset)
1013 >> ASAN_SHADOW_SHIFT);
1014 prev_offset = aoff;
1015 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1016 if (aoff < offset)
1018 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1019 shadow_bytes[i] = 0;
1020 else
1021 shadow_bytes[i] = offset - aoff;
1023 else
1024 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1025 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1026 offset = aoff;
1028 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1030 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1031 (offset - prev_offset)
1032 >> ASAN_SHADOW_SHIFT);
1033 prev_offset = offset;
1034 memset (shadow_bytes, cur_shadow_byte, 4);
1035 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1036 offset += ASAN_RED_ZONE_SIZE;
1038 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1040 do_pending_stack_adjust ();
1042 /* Construct epilogue sequence. */
1043 start_sequence ();
1045 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1046 set_mem_alias_set (shadow_mem, asan_shadow_set);
1047 prev_offset = base_offset;
1048 last_offset = base_offset;
1049 last_size = 0;
1050 for (l = length; l; l -= 2)
1052 offset = base_offset + ((offsets[l - 1] - base_offset)
1053 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1054 if (last_offset + last_size != offset)
1056 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1057 (last_offset - prev_offset)
1058 >> ASAN_SHADOW_SHIFT);
1059 prev_offset = last_offset;
1060 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1061 last_offset = offset;
1062 last_size = 0;
1064 last_size += base_offset + ((offsets[l - 2] - base_offset)
1065 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1066 - offset;
1068 if (last_size)
1070 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1071 (last_offset - prev_offset)
1072 >> ASAN_SHADOW_SHIFT);
1073 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1076 do_pending_stack_adjust ();
1078 ret = get_insns ();
1079 end_sequence ();
1080 return ret;
1083 /* Return true if DECL, a global var, might be overridden and needs
1084 therefore a local alias. */
1086 static bool
1087 asan_needs_local_alias (tree decl)
1089 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1092 /* Return true if DECL is a VAR_DECL that should be protected
1093 by Address Sanitizer, by appending a red zone with protected
1094 shadow memory after it and aligning it to at least
1095 ASAN_RED_ZONE_SIZE bytes. */
1097 bool
1098 asan_protect_global (tree decl)
1100 rtx rtl, symbol;
1102 if (TREE_CODE (decl) == STRING_CST)
1104 /* Instrument all STRING_CSTs except those created
1105 by asan_pp_string here. */
1106 if (shadow_ptr_types[0] != NULL_TREE
1107 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1108 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1109 return false;
1110 return true;
1112 if (TREE_CODE (decl) != VAR_DECL
1113 /* TLS vars aren't statically protectable. */
1114 || DECL_THREAD_LOCAL_P (decl)
1115 /* Externs will be protected elsewhere. */
1116 || DECL_EXTERNAL (decl)
1117 || !DECL_RTL_SET_P (decl)
1118 /* Comdat vars pose an ABI problem, we can't know if
1119 the var that is selected by the linker will have
1120 padding or not. */
1121 || DECL_ONE_ONLY (decl)
1122 /* Similarly for common vars. People can use -fno-common. */
1123 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1124 /* Don't protect if using user section, often vars placed
1125 into user section from multiple TUs are then assumed
1126 to be an array of such vars, putting padding in there
1127 breaks this assumption. */
1128 || (DECL_SECTION_NAME (decl) != NULL_TREE
1129 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1130 || DECL_SIZE (decl) == 0
1131 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1132 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1133 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1134 return false;
1136 rtl = DECL_RTL (decl);
1137 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1138 return false;
1139 symbol = XEXP (rtl, 0);
1141 if (CONSTANT_POOL_ADDRESS_P (symbol)
1142 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1143 return false;
1145 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1146 return false;
1148 #ifndef ASM_OUTPUT_DEF
1149 if (asan_needs_local_alias (decl))
1150 return false;
1151 #endif
1153 return true;
1156 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1157 IS_STORE is either 1 (for a store) or 0 (for a load).
1158 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1160 static tree
1161 report_error_func (bool is_store, int size_in_bytes)
1163 static enum built_in_function report[2][5]
1164 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1165 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1166 BUILT_IN_ASAN_REPORT_LOAD16 },
1167 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1168 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1169 BUILT_IN_ASAN_REPORT_STORE16 } };
1170 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1173 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1174 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1176 /* Split the current basic block and create a condition statement
1177 insertion point right before or after the statement pointed to by
1178 ITER. Return an iterator to the point at which the caller might
1179 safely insert the condition statement.
1181 THEN_BLOCK must be set to the address of an uninitialized instance
1182 of basic_block. The function will then set *THEN_BLOCK to the
1183 'then block' of the condition statement to be inserted by the
1184 caller.
1186 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1187 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1189 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1190 block' of the condition statement to be inserted by the caller.
1192 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1193 statements starting from *ITER, and *THEN_BLOCK is a new empty
1194 block.
1196 *ITER is adjusted to point to always point to the first statement
1197 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1198 same as what ITER was pointing to prior to calling this function,
1199 if BEFORE_P is true; otherwise, it is its following statement. */
1201 static gimple_stmt_iterator
1202 create_cond_insert_point (gimple_stmt_iterator *iter,
1203 bool before_p,
1204 bool then_more_likely_p,
1205 bool create_then_fallthru_edge,
1206 basic_block *then_block,
1207 basic_block *fallthrough_block)
1209 gimple_stmt_iterator gsi = *iter;
1211 if (!gsi_end_p (gsi) && before_p)
1212 gsi_prev (&gsi);
1214 basic_block cur_bb = gsi_bb (*iter);
1216 edge e = split_block (cur_bb, gsi_stmt (gsi));
1218 /* Get a hold on the 'condition block', the 'then block' and the
1219 'else block'. */
1220 basic_block cond_bb = e->src;
1221 basic_block fallthru_bb = e->dest;
1222 basic_block then_bb = create_empty_bb (cond_bb);
1224 /* Set up the newly created 'then block'. */
1225 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1226 int fallthrough_probability
1227 = then_more_likely_p
1228 ? PROB_VERY_UNLIKELY
1229 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1230 e->probability = PROB_ALWAYS - fallthrough_probability;
1231 if (create_then_fallthru_edge)
1232 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1234 /* Set up the fallthrough basic block. */
1235 e = find_edge (cond_bb, fallthru_bb);
1236 e->flags = EDGE_FALSE_VALUE;
1237 e->count = cond_bb->count;
1238 e->probability = fallthrough_probability;
1240 /* Update dominance info for the newly created then_bb; note that
1241 fallthru_bb's dominance info has already been updated by
1242 split_bock. */
1243 if (dom_info_available_p (CDI_DOMINATORS))
1244 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1246 *then_block = then_bb;
1247 *fallthrough_block = fallthru_bb;
1248 *iter = gsi_start_bb (fallthru_bb);
1250 return gsi_last_bb (cond_bb);
1253 /* Insert an if condition followed by a 'then block' right before the
1254 statement pointed to by ITER. The fallthrough block -- which is the
1255 else block of the condition as well as the destination of the
1256 outcoming edge of the 'then block' -- starts with the statement
1257 pointed to by ITER.
1259 COND is the condition of the if.
1261 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1262 'then block' is higher than the probability of the edge to the
1263 fallthrough block.
1265 Upon completion of the function, *THEN_BB is set to the newly
1266 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1267 fallthrough block.
1269 *ITER is adjusted to still point to the same statement it was
1270 pointing to initially. */
1272 static void
1273 insert_if_then_before_iter (gimple cond,
1274 gimple_stmt_iterator *iter,
1275 bool then_more_likely_p,
1276 basic_block *then_bb,
1277 basic_block *fallthrough_bb)
1279 gimple_stmt_iterator cond_insert_point =
1280 create_cond_insert_point (iter,
1281 /*before_p=*/true,
1282 then_more_likely_p,
1283 /*create_then_fallthru_edge=*/true,
1284 then_bb,
1285 fallthrough_bb);
1286 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1289 /* Instrument the memory access instruction BASE. Insert new
1290 statements before or after ITER.
1292 Note that the memory access represented by BASE can be either an
1293 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1294 location. IS_STORE is TRUE for a store, FALSE for a load.
1295 BEFORE_P is TRUE for inserting the instrumentation code before
1296 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1297 1, 2, 4, 8, 16.
1299 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1300 statement it was pointing to prior to calling this function,
1301 otherwise, it points to the statement logically following it. */
1303 static void
1304 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1305 bool before_p, bool is_store, int size_in_bytes)
1307 gimple_stmt_iterator gsi;
1308 basic_block then_bb, else_bb;
1309 tree t, base_addr, shadow;
1310 gimple g;
1311 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1312 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1313 tree uintptr_type
1314 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1315 tree base_ssa = base;
1317 /* Get an iterator on the point where we can add the condition
1318 statement for the instrumentation. */
1319 gsi = create_cond_insert_point (iter, before_p,
1320 /*then_more_likely_p=*/false,
1321 /*create_then_fallthru_edge=*/false,
1322 &then_bb,
1323 &else_bb);
1325 base = unshare_expr (base);
1327 /* BASE can already be an SSA_NAME; in that case, do not create a
1328 new SSA_NAME for it. */
1329 if (TREE_CODE (base) != SSA_NAME)
1331 g = gimple_build_assign_with_ops (TREE_CODE (base),
1332 make_ssa_name (TREE_TYPE (base), NULL),
1333 base, NULL_TREE);
1334 gimple_set_location (g, location);
1335 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1336 base_ssa = gimple_assign_lhs (g);
1339 g = gimple_build_assign_with_ops (NOP_EXPR,
1340 make_ssa_name (uintptr_type, NULL),
1341 base_ssa, NULL_TREE);
1342 gimple_set_location (g, location);
1343 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1344 base_addr = gimple_assign_lhs (g);
1346 /* Build
1347 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1349 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1350 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1351 make_ssa_name (uintptr_type, NULL),
1352 base_addr, t);
1353 gimple_set_location (g, location);
1354 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1356 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1357 g = gimple_build_assign_with_ops (PLUS_EXPR,
1358 make_ssa_name (uintptr_type, NULL),
1359 gimple_assign_lhs (g), t);
1360 gimple_set_location (g, location);
1361 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1363 g = gimple_build_assign_with_ops (NOP_EXPR,
1364 make_ssa_name (shadow_ptr_type, NULL),
1365 gimple_assign_lhs (g), NULL_TREE);
1366 gimple_set_location (g, location);
1367 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1369 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1370 build_int_cst (shadow_ptr_type, 0));
1371 g = gimple_build_assign_with_ops (MEM_REF,
1372 make_ssa_name (shadow_type, NULL),
1373 t, NULL_TREE);
1374 gimple_set_location (g, location);
1375 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1376 shadow = gimple_assign_lhs (g);
1378 if (size_in_bytes < 8)
1380 /* Slow path for 1, 2 and 4 byte accesses.
1381 Test (shadow != 0)
1382 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1383 g = gimple_build_assign_with_ops (NE_EXPR,
1384 make_ssa_name (boolean_type_node,
1385 NULL),
1386 shadow,
1387 build_int_cst (shadow_type, 0));
1388 gimple_set_location (g, location);
1389 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1390 t = gimple_assign_lhs (g);
1392 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
1393 make_ssa_name (uintptr_type,
1394 NULL),
1395 base_addr,
1396 build_int_cst (uintptr_type, 7));
1397 gimple_set_location (g, location);
1398 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1400 g = gimple_build_assign_with_ops (NOP_EXPR,
1401 make_ssa_name (shadow_type,
1402 NULL),
1403 gimple_assign_lhs (g), NULL_TREE);
1404 gimple_set_location (g, location);
1405 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1407 if (size_in_bytes > 1)
1409 g = gimple_build_assign_with_ops (PLUS_EXPR,
1410 make_ssa_name (shadow_type,
1411 NULL),
1412 gimple_assign_lhs (g),
1413 build_int_cst (shadow_type,
1414 size_in_bytes - 1));
1415 gimple_set_location (g, location);
1416 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1419 g = gimple_build_assign_with_ops (GE_EXPR,
1420 make_ssa_name (boolean_type_node,
1421 NULL),
1422 gimple_assign_lhs (g),
1423 shadow);
1424 gimple_set_location (g, location);
1425 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1427 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
1428 make_ssa_name (boolean_type_node,
1429 NULL),
1430 t, gimple_assign_lhs (g));
1431 gimple_set_location (g, location);
1432 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1433 t = gimple_assign_lhs (g);
1435 else
1436 t = shadow;
1438 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1439 NULL_TREE, NULL_TREE);
1440 gimple_set_location (g, location);
1441 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1443 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1444 gsi = gsi_start_bb (then_bb);
1445 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1446 1, base_addr);
1447 gimple_set_location (g, location);
1448 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1450 *iter = gsi_start_bb (else_bb);
1453 /* If T represents a memory access, add instrumentation code before ITER.
1454 LOCATION is source code location.
1455 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1457 static void
1458 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1459 location_t location, bool is_store)
1461 tree type, base;
1462 HOST_WIDE_INT size_in_bytes;
1464 type = TREE_TYPE (t);
1465 switch (TREE_CODE (t))
1467 case ARRAY_REF:
1468 case COMPONENT_REF:
1469 case INDIRECT_REF:
1470 case MEM_REF:
1471 break;
1472 default:
1473 return;
1476 size_in_bytes = int_size_in_bytes (type);
1477 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1478 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1479 return;
1481 HOST_WIDE_INT bitsize, bitpos;
1482 tree offset;
1483 enum machine_mode mode;
1484 int volatilep = 0, unsignedp = 0;
1485 get_inner_reference (t, &bitsize, &bitpos, &offset,
1486 &mode, &unsignedp, &volatilep, false);
1487 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1488 || bitsize != size_in_bytes * BITS_PER_UNIT)
1490 if (TREE_CODE (t) == COMPONENT_REF
1491 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1493 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1494 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1495 TREE_OPERAND (t, 0), repr,
1496 NULL_TREE), location, is_store);
1498 return;
1501 base = build_fold_addr_expr (t);
1502 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1504 build_check_stmt (location, base, iter, /*before_p=*/true,
1505 is_store, size_in_bytes);
1506 update_mem_ref_hash_table (base, size_in_bytes);
1507 update_mem_ref_hash_table (t, size_in_bytes);
1512 /* Instrument an access to a contiguous memory region that starts at
1513 the address pointed to by BASE, over a length of LEN (expressed in
1514 the sizeof (*BASE) bytes). ITER points to the instruction before
1515 which the instrumentation instructions must be inserted. LOCATION
1516 is the source location that the instrumentation instructions must
1517 have. If IS_STORE is true, then the memory access is a store;
1518 otherwise, it's a load. */
1520 static void
1521 instrument_mem_region_access (tree base, tree len,
1522 gimple_stmt_iterator *iter,
1523 location_t location, bool is_store)
1525 if (!POINTER_TYPE_P (TREE_TYPE (base))
1526 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1527 || integer_zerop (len))
1528 return;
1530 gimple_stmt_iterator gsi = *iter;
1532 basic_block fallthrough_bb = NULL, then_bb = NULL;
1534 /* If the beginning of the memory region has already been
1535 instrumented, do not instrument it. */
1536 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1538 /* If the end of the memory region has already been instrumented, do
1539 not instrument it. */
1540 tree end = asan_mem_ref_get_end (base, len);
1541 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1543 if (start_instrumented && end_instrumented)
1544 return;
1546 if (!is_gimple_constant (len))
1548 /* So, the length of the memory area to asan-protect is
1549 non-constant. Let's guard the generated instrumentation code
1550 like:
1552 if (len != 0)
1554 //asan instrumentation code goes here.
1556 // falltrough instructions, starting with *ITER. */
1558 gimple g = gimple_build_cond (NE_EXPR,
1559 len,
1560 build_int_cst (TREE_TYPE (len), 0),
1561 NULL_TREE, NULL_TREE);
1562 gimple_set_location (g, location);
1563 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1564 &then_bb, &fallthrough_bb);
1565 /* Note that fallthrough_bb starts with the statement that was
1566 pointed to by ITER. */
1568 /* The 'then block' of the 'if (len != 0) condition is where
1569 we'll generate the asan instrumentation code now. */
1570 gsi = gsi_last_bb (then_bb);
1573 if (!start_instrumented)
1575 /* Instrument the beginning of the memory region to be accessed,
1576 and arrange for the rest of the intrumentation code to be
1577 inserted in the then block *after* the current gsi. */
1578 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1580 if (then_bb)
1581 /* We are in the case where the length of the region is not
1582 constant; so instrumentation code is being generated in the
1583 'then block' of the 'if (len != 0) condition. Let's arrange
1584 for the subsequent instrumentation statements to go in the
1585 'then block'. */
1586 gsi = gsi_last_bb (then_bb);
1587 else
1589 *iter = gsi;
1590 /* Don't remember this access as instrumented, if length
1591 is unknown. It might be zero and not being actually
1592 instrumented, so we can't rely on it being instrumented. */
1593 update_mem_ref_hash_table (base, 1);
1597 if (end_instrumented)
1598 return;
1600 /* We want to instrument the access at the end of the memory region,
1601 which is at (base + len - 1). */
1603 /* offset = len - 1; */
1604 len = unshare_expr (len);
1605 tree offset;
1606 gimple_seq seq = NULL;
1607 if (TREE_CODE (len) == INTEGER_CST)
1608 offset = fold_build2 (MINUS_EXPR, size_type_node,
1609 fold_convert (size_type_node, len),
1610 build_int_cst (size_type_node, 1));
1611 else
1613 gimple g;
1614 tree t;
1616 if (TREE_CODE (len) != SSA_NAME)
1618 t = make_ssa_name (TREE_TYPE (len), NULL);
1619 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1620 gimple_set_location (g, location);
1621 gimple_seq_add_stmt_without_update (&seq, g);
1622 len = t;
1624 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1626 t = make_ssa_name (size_type_node, NULL);
1627 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1628 gimple_set_location (g, location);
1629 gimple_seq_add_stmt_without_update (&seq, g);
1630 len = t;
1633 t = make_ssa_name (size_type_node, NULL);
1634 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1635 build_int_cst (size_type_node, 1));
1636 gimple_set_location (g, location);
1637 gimple_seq_add_stmt_without_update (&seq, g);
1638 offset = gimple_assign_lhs (g);
1641 /* _1 = base; */
1642 base = unshare_expr (base);
1643 gimple region_end =
1644 gimple_build_assign_with_ops (TREE_CODE (base),
1645 make_ssa_name (TREE_TYPE (base), NULL),
1646 base, NULL);
1647 gimple_set_location (region_end, location);
1648 gimple_seq_add_stmt_without_update (&seq, region_end);
1650 /* _2 = _1 + offset; */
1651 region_end =
1652 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1653 make_ssa_name (TREE_TYPE (base), NULL),
1654 gimple_assign_lhs (region_end),
1655 offset);
1656 gimple_set_location (region_end, location);
1657 gimple_seq_add_stmt_without_update (&seq, region_end);
1658 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1660 /* instrument access at _2; */
1661 gsi = gsi_for_stmt (region_end);
1662 build_check_stmt (location, gimple_assign_lhs (region_end),
1663 &gsi, /*before_p=*/false, is_store, 1);
1665 if (then_bb == NULL)
1666 update_mem_ref_hash_table (end, 1);
1668 *iter = gsi_for_stmt (gsi_stmt (*iter));
1671 /* Instrument the call (to the builtin strlen function) pointed to by
1672 ITER.
1674 This function instruments the access to the first byte of the
1675 argument, right before the call. After the call it instruments the
1676 access to the last byte of the argument; it uses the result of the
1677 call to deduce the offset of that last byte.
1679 Upon completion, iff the call has actullay been instrumented, this
1680 function returns TRUE and *ITER points to the statement logically
1681 following the built-in strlen function call *ITER was initially
1682 pointing to. Otherwise, the function returns FALSE and *ITER
1683 remains unchanged. */
1685 static bool
1686 instrument_strlen_call (gimple_stmt_iterator *iter)
1688 gimple call = gsi_stmt (*iter);
1689 gcc_assert (is_gimple_call (call));
1691 tree callee = gimple_call_fndecl (call);
1692 gcc_assert (is_builtin_fn (callee)
1693 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1694 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1696 tree len = gimple_call_lhs (call);
1697 if (len == NULL)
1698 /* Some passes might clear the return value of the strlen call;
1699 bail out in that case. Return FALSE as we are not advancing
1700 *ITER. */
1701 return false;
1702 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1704 location_t loc = gimple_location (call);
1705 tree str_arg = gimple_call_arg (call, 0);
1707 /* Instrument the access to the first byte of str_arg. i.e:
1709 _1 = str_arg; instrument (_1); */
1710 gimple str_arg_ssa =
1711 gimple_build_assign_with_ops (NOP_EXPR,
1712 make_ssa_name (build_pointer_type
1713 (char_type_node), NULL),
1714 str_arg, NULL);
1715 gimple_set_location (str_arg_ssa, loc);
1716 gimple_stmt_iterator gsi = *iter;
1717 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1718 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1719 /*before_p=*/false, /*is_store=*/false, 1);
1721 /* If we initially had an instruction like:
1723 int n = strlen (str)
1725 we now want to instrument the access to str[n], after the
1726 instruction above.*/
1728 /* So let's build the access to str[n] that is, access through the
1729 pointer_plus expr: (_1 + len). */
1730 gimple stmt =
1731 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1732 make_ssa_name (TREE_TYPE (str_arg),
1733 NULL),
1734 gimple_assign_lhs (str_arg_ssa),
1735 len);
1736 gimple_set_location (stmt, loc);
1737 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1739 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1740 /*before_p=*/false, /*is_store=*/false, 1);
1742 /* Ensure that iter points to the statement logically following the
1743 one it was initially pointing to. */
1744 *iter = gsi;
1745 /* As *ITER has been advanced to point to the next statement, let's
1746 return true to inform transform_statements that it shouldn't
1747 advance *ITER anymore; otherwises it will skip that next
1748 statement, which wouldn't be instrumented. */
1749 return true;
1752 /* Instrument the call to a built-in memory access function that is
1753 pointed to by the iterator ITER.
1755 Upon completion, return TRUE iff *ITER has been advanced to the
1756 statement following the one it was originally pointing to. */
1758 static bool
1759 instrument_builtin_call (gimple_stmt_iterator *iter)
1761 bool iter_advanced_p = false;
1762 gimple call = gsi_stmt (*iter);
1764 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1766 tree callee = gimple_call_fndecl (call);
1767 location_t loc = gimple_location (call);
1769 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1770 iter_advanced_p = instrument_strlen_call (iter);
1771 else
1773 asan_mem_ref src0, src1, dest;
1774 asan_mem_ref_init (&src0, NULL, 1);
1775 asan_mem_ref_init (&src1, NULL, 1);
1776 asan_mem_ref_init (&dest, NULL, 1);
1778 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1779 bool src0_is_store = false, src1_is_store = false,
1780 dest_is_store = false, dest_is_deref = false;
1782 if (get_mem_refs_of_builtin_call (call,
1783 &src0, &src0_len, &src0_is_store,
1784 &src1, &src1_len, &src1_is_store,
1785 &dest, &dest_len, &dest_is_store,
1786 &dest_is_deref))
1788 if (dest_is_deref)
1790 instrument_derefs (iter, dest.start, loc, dest_is_store);
1791 gsi_next (iter);
1792 iter_advanced_p = true;
1794 else if (src0_len || src1_len || dest_len)
1796 if (src0.start != NULL_TREE)
1797 instrument_mem_region_access (src0.start, src0_len,
1798 iter, loc, /*is_store=*/false);
1799 if (src1.start != NULL_TREE)
1800 instrument_mem_region_access (src1.start, src1_len,
1801 iter, loc, /*is_store=*/false);
1802 if (dest.start != NULL_TREE)
1803 instrument_mem_region_access (dest.start, dest_len,
1804 iter, loc, /*is_store=*/true);
1805 *iter = gsi_for_stmt (call);
1806 gsi_next (iter);
1807 iter_advanced_p = true;
1811 return iter_advanced_p;
1814 /* Instrument the assignment statement ITER if it is subject to
1815 instrumentation. Return TRUE iff instrumentation actually
1816 happened. In that case, the iterator ITER is advanced to the next
1817 logical expression following the one initially pointed to by ITER,
1818 and the relevant memory reference that which access has been
1819 instrumented is added to the memory references hash table. */
1821 static bool
1822 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1824 gimple s = gsi_stmt (*iter);
1826 gcc_assert (gimple_assign_single_p (s));
1828 tree ref_expr = NULL_TREE;
1829 bool is_store, is_instrumented = false;
1831 if (gimple_store_p (s))
1833 ref_expr = gimple_assign_lhs (s);
1834 is_store = true;
1835 instrument_derefs (iter, ref_expr,
1836 gimple_location (s),
1837 is_store);
1838 is_instrumented = true;
1841 if (gimple_assign_load_p (s))
1843 ref_expr = gimple_assign_rhs1 (s);
1844 is_store = false;
1845 instrument_derefs (iter, ref_expr,
1846 gimple_location (s),
1847 is_store);
1848 is_instrumented = true;
1851 if (is_instrumented)
1852 gsi_next (iter);
1854 return is_instrumented;
1857 /* Instrument the function call pointed to by the iterator ITER, if it
1858 is subject to instrumentation. At the moment, the only function
1859 calls that are instrumented are some built-in functions that access
1860 memory. Look at instrument_builtin_call to learn more.
1862 Upon completion return TRUE iff *ITER was advanced to the statement
1863 following the one it was originally pointing to. */
1865 static bool
1866 maybe_instrument_call (gimple_stmt_iterator *iter)
1868 gimple stmt = gsi_stmt (*iter);
1869 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1871 if (is_builtin && instrument_builtin_call (iter))
1872 return true;
1874 if (gimple_call_noreturn_p (stmt))
1876 if (is_builtin)
1878 tree callee = gimple_call_fndecl (stmt);
1879 switch (DECL_FUNCTION_CODE (callee))
1881 case BUILT_IN_UNREACHABLE:
1882 case BUILT_IN_TRAP:
1883 /* Don't instrument these. */
1884 return false;
1887 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1888 gimple g = gimple_build_call (decl, 0);
1889 gimple_set_location (g, gimple_location (stmt));
1890 gsi_insert_before (iter, g, GSI_SAME_STMT);
1892 return false;
1895 /* Walk each instruction of all basic block and instrument those that
1896 represent memory references: loads, stores, or function calls.
1897 In a given basic block, this function avoids instrumenting memory
1898 references that have already been instrumented. */
1900 static void
1901 transform_statements (void)
1903 basic_block bb, last_bb = NULL;
1904 gimple_stmt_iterator i;
1905 int saved_last_basic_block = last_basic_block;
1907 FOR_EACH_BB (bb)
1909 basic_block prev_bb = bb;
1911 if (bb->index >= saved_last_basic_block) continue;
1913 /* Flush the mem ref hash table, if current bb doesn't have
1914 exactly one predecessor, or if that predecessor (skipping
1915 over asan created basic blocks) isn't the last processed
1916 basic block. Thus we effectively flush on extended basic
1917 block boundaries. */
1918 while (single_pred_p (prev_bb))
1920 prev_bb = single_pred (prev_bb);
1921 if (prev_bb->index < saved_last_basic_block)
1922 break;
1924 if (prev_bb != last_bb)
1925 empty_mem_ref_hash_table ();
1926 last_bb = bb;
1928 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1930 gimple s = gsi_stmt (i);
1932 if (has_stmt_been_instrumented_p (s))
1933 gsi_next (&i);
1934 else if (gimple_assign_single_p (s)
1935 && maybe_instrument_assignment (&i))
1936 /* Nothing to do as maybe_instrument_assignment advanced
1937 the iterator I. */;
1938 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1939 /* Nothing to do as maybe_instrument_call
1940 advanced the iterator I. */;
1941 else
1943 /* No instrumentation happened.
1945 If the current instruction is a function call that
1946 might free something, let's forget about the memory
1947 references that got instrumented. Otherwise we might
1948 miss some instrumentation opportunities. */
1949 if (is_gimple_call (s) && !nonfreeing_call_p (s))
1950 empty_mem_ref_hash_table ();
1952 gsi_next (&i);
1956 free_mem_ref_resources ();
1959 /* Build
1960 struct __asan_global
1962 const void *__beg;
1963 uptr __size;
1964 uptr __size_with_redzone;
1965 const void *__name;
1966 uptr __has_dynamic_init;
1967 } type. */
1969 static tree
1970 asan_global_struct (void)
1972 static const char *field_names[5]
1973 = { "__beg", "__size", "__size_with_redzone",
1974 "__name", "__has_dynamic_init" };
1975 tree fields[5], ret;
1976 int i;
1978 ret = make_node (RECORD_TYPE);
1979 for (i = 0; i < 5; i++)
1981 fields[i]
1982 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1983 get_identifier (field_names[i]),
1984 (i == 0 || i == 3) ? const_ptr_type_node
1985 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1986 DECL_CONTEXT (fields[i]) = ret;
1987 if (i)
1988 DECL_CHAIN (fields[i - 1]) = fields[i];
1990 TYPE_FIELDS (ret) = fields[0];
1991 TYPE_NAME (ret) = get_identifier ("__asan_global");
1992 layout_type (ret);
1993 return ret;
1996 /* Append description of a single global DECL into vector V.
1997 TYPE is __asan_global struct type as returned by asan_global_struct. */
1999 static void
2000 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2002 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2003 unsigned HOST_WIDE_INT size;
2004 tree str_cst, refdecl = decl;
2005 vec<constructor_elt, va_gc> *vinner = NULL;
2007 if (!asan_pp_initialized)
2008 asan_pp_initialize ();
2010 pp_clear_output_area (&asan_pp);
2011 if (DECL_NAME (decl))
2012 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
2013 else
2014 pp_string (&asan_pp, "<unknown>");
2015 pp_space (&asan_pp);
2016 pp_left_paren (&asan_pp);
2017 pp_string (&asan_pp, main_input_filename);
2018 pp_right_paren (&asan_pp);
2019 str_cst = asan_pp_string ();
2021 if (asan_needs_local_alias (decl))
2023 char buf[20];
2024 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2025 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2026 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2027 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2028 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2029 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2030 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2031 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2032 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2033 TREE_STATIC (refdecl) = 1;
2034 TREE_PUBLIC (refdecl) = 0;
2035 TREE_USED (refdecl) = 1;
2036 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2039 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2040 fold_convert (const_ptr_type_node,
2041 build_fold_addr_expr (refdecl)));
2042 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
2043 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2044 size += asan_red_zone_size (size);
2045 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2046 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2047 fold_convert (const_ptr_type_node, str_cst));
2048 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2049 init = build_constructor (type, vinner);
2050 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2053 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2054 void
2055 initialize_sanitizer_builtins (void)
2057 tree decl;
2059 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2060 return;
2062 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2063 tree BT_FN_VOID_PTR
2064 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2065 tree BT_FN_VOID_PTR_PTRMODE
2066 = build_function_type_list (void_type_node, ptr_type_node,
2067 build_nonstandard_integer_type (POINTER_SIZE,
2068 1), NULL_TREE);
2069 tree BT_FN_VOID_INT
2070 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2071 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2072 tree BT_FN_IX_CONST_VPTR_INT[5];
2073 tree BT_FN_IX_VPTR_IX_INT[5];
2074 tree BT_FN_VOID_VPTR_IX_INT[5];
2075 tree vptr
2076 = build_pointer_type (build_qualified_type (void_type_node,
2077 TYPE_QUAL_VOLATILE));
2078 tree cvptr
2079 = build_pointer_type (build_qualified_type (void_type_node,
2080 TYPE_QUAL_VOLATILE
2081 |TYPE_QUAL_CONST));
2082 tree boolt
2083 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2084 int i;
2085 for (i = 0; i < 5; i++)
2087 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2088 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2089 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2090 integer_type_node, integer_type_node,
2091 NULL_TREE);
2092 BT_FN_IX_CONST_VPTR_INT[i]
2093 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2094 BT_FN_IX_VPTR_IX_INT[i]
2095 = build_function_type_list (ix, vptr, ix, integer_type_node,
2096 NULL_TREE);
2097 BT_FN_VOID_VPTR_IX_INT[i]
2098 = build_function_type_list (void_type_node, vptr, ix,
2099 integer_type_node, NULL_TREE);
2101 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2102 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2103 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2104 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2105 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2106 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2107 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2108 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2109 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2110 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2111 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2112 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2113 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2114 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2115 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2116 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2117 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2118 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2119 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2120 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2121 #undef ATTR_NOTHROW_LEAF_LIST
2122 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2123 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2124 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2125 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2126 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2127 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2128 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2129 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2130 #undef DEF_SANITIZER_BUILTIN
2131 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2132 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2133 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2134 set_call_expr_flags (decl, ATTRS); \
2135 set_builtin_decl (ENUM, decl, true);
2137 #include "sanitizer.def"
2139 #undef DEF_SANITIZER_BUILTIN
2142 /* Called via htab_traverse. Count number of emitted
2143 STRING_CSTs in the constant hash table. */
2145 static int
2146 count_string_csts (void **slot, void *data)
2148 struct constant_descriptor_tree *desc
2149 = (struct constant_descriptor_tree *) *slot;
2150 if (TREE_CODE (desc->value) == STRING_CST
2151 && TREE_ASM_WRITTEN (desc->value)
2152 && asan_protect_global (desc->value))
2153 ++*((unsigned HOST_WIDE_INT *) data);
2154 return 1;
2157 /* Helper structure to pass two parameters to
2158 add_string_csts. */
2160 struct asan_add_string_csts_data
2162 tree type;
2163 vec<constructor_elt, va_gc> *v;
2166 /* Called via htab_traverse. Call asan_add_global
2167 on emitted STRING_CSTs from the constant hash table. */
2169 static int
2170 add_string_csts (void **slot, void *data)
2172 struct constant_descriptor_tree *desc
2173 = (struct constant_descriptor_tree *) *slot;
2174 if (TREE_CODE (desc->value) == STRING_CST
2175 && TREE_ASM_WRITTEN (desc->value)
2176 && asan_protect_global (desc->value))
2178 struct asan_add_string_csts_data *aascd
2179 = (struct asan_add_string_csts_data *) data;
2180 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2181 aascd->type, aascd->v);
2183 return 1;
2186 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2187 invoke ggc_collect. */
2188 static GTY(()) tree asan_ctor_statements;
2190 /* Module-level instrumentation.
2191 - Insert __asan_init() into the list of CTORs.
2192 - TODO: insert redzones around globals.
2195 void
2196 asan_finish_file (void)
2198 struct varpool_node *vnode;
2199 unsigned HOST_WIDE_INT gcount = 0;
2201 if (shadow_ptr_types[0] == NULL_TREE)
2202 asan_init_shadow_ptr_types ();
2203 /* Avoid instrumenting code in the asan ctors/dtors.
2204 We don't need to insert padding after the description strings,
2205 nor after .LASAN* array. */
2206 flag_asan = 0;
2208 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2209 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2210 FOR_EACH_DEFINED_VARIABLE (vnode)
2211 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2212 && asan_protect_global (vnode->symbol.decl))
2213 ++gcount;
2214 htab_t const_desc_htab = constant_pool_htab ();
2215 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2216 if (gcount)
2218 tree type = asan_global_struct (), var, ctor;
2219 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
2220 tree dtor_statements = NULL_TREE;
2221 vec<constructor_elt, va_gc> *v;
2222 char buf[20];
2224 type = build_array_type_nelts (type, gcount);
2225 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2226 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2227 type);
2228 TREE_STATIC (var) = 1;
2229 TREE_PUBLIC (var) = 0;
2230 DECL_ARTIFICIAL (var) = 1;
2231 DECL_IGNORED_P (var) = 1;
2232 vec_alloc (v, gcount);
2233 FOR_EACH_DEFINED_VARIABLE (vnode)
2234 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2235 && asan_protect_global (vnode->symbol.decl))
2236 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
2237 struct asan_add_string_csts_data aascd;
2238 aascd.type = TREE_TYPE (type);
2239 aascd.v = v;
2240 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2241 ctor = build_constructor (type, v);
2242 TREE_CONSTANT (ctor) = 1;
2243 TREE_STATIC (ctor) = 1;
2244 DECL_INITIAL (var) = ctor;
2245 varpool_assemble_decl (varpool_node_for_decl (var));
2247 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2248 append_to_statement_list (build_call_expr (fn, 2,
2249 build_fold_addr_expr (var),
2250 build_int_cst (uptr, gcount)),
2251 &asan_ctor_statements);
2253 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2254 append_to_statement_list (build_call_expr (fn, 2,
2255 build_fold_addr_expr (var),
2256 build_int_cst (uptr, gcount)),
2257 &dtor_statements);
2258 cgraph_build_static_cdtor ('D', dtor_statements,
2259 MAX_RESERVED_INIT_PRIORITY - 1);
2261 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2262 MAX_RESERVED_INIT_PRIORITY - 1);
2263 flag_asan = 1;
2266 /* Instrument the current function. */
2268 static unsigned int
2269 asan_instrument (void)
2271 if (shadow_ptr_types[0] == NULL_TREE)
2272 asan_init_shadow_ptr_types ();
2273 transform_statements ();
2274 return 0;
2277 static bool
2278 gate_asan (void)
2280 return flag_asan != 0
2281 && !lookup_attribute ("no_sanitize_address",
2282 DECL_ATTRIBUTES (current_function_decl));
2285 struct gimple_opt_pass pass_asan =
2288 GIMPLE_PASS,
2289 "asan", /* name */
2290 OPTGROUP_NONE, /* optinfo_flags */
2291 gate_asan, /* gate */
2292 asan_instrument, /* execute */
2293 NULL, /* sub */
2294 NULL, /* next */
2295 0, /* static_pass_number */
2296 TV_NONE, /* tv_id */
2297 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
2298 0, /* properties_provided */
2299 0, /* properties_destroyed */
2300 0, /* todo_flags_start */
2301 TODO_verify_flow | TODO_verify_stmts
2302 | TODO_update_ssa /* todo_flags_finish */
2306 static bool
2307 gate_asan_O0 (void)
2309 return !optimize && gate_asan ();
2312 struct gimple_opt_pass pass_asan_O0 =
2315 GIMPLE_PASS,
2316 "asan0", /* name */
2317 OPTGROUP_NONE, /* optinfo_flags */
2318 gate_asan_O0, /* gate */
2319 asan_instrument, /* execute */
2320 NULL, /* sub */
2321 NULL, /* next */
2322 0, /* static_pass_number */
2323 TV_NONE, /* tv_id */
2324 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
2325 0, /* properties_provided */
2326 0, /* properties_destroyed */
2327 0, /* todo_flags_start */
2328 TODO_verify_flow | TODO_verify_stmts
2329 | TODO_update_ssa /* todo_flags_finish */
2333 #include "gt-asan.h"