2014-07-29 Ed Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / asan.c
blob475dd824fa3401f634154398210560610574b4c4
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
31 #include "is-a.h"
32 #include "inchash.h"
33 #include "gimple.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "stor-layout.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "stringpool.h"
42 #include "tree-ssanames.h"
43 #include "tree-pass.h"
44 #include "asan.h"
45 #include "gimple-pretty-print.h"
46 #include "target.h"
47 #include "expr.h"
48 #include "optabs.h"
49 #include "output.h"
50 #include "tm_p.h"
51 #include "langhooks.h"
52 #include "alloc-pool.h"
53 #include "cfgloop.h"
54 #include "gimple-builder.h"
55 #include "ubsan.h"
56 #include "predict.h"
57 #include "params.h"
58 #include "builtins.h"
60 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
61 with <2x slowdown on average.
63 The tool consists of two parts:
64 instrumentation module (this file) and a run-time library.
65 The instrumentation module adds a run-time check before every memory insn.
66 For a 8- or 16- byte load accessing address X:
67 ShadowAddr = (X >> 3) + Offset
68 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
69 if (ShadowValue)
70 __asan_report_load8(X);
71 For a load of N bytes (N=1, 2 or 4) from address X:
72 ShadowAddr = (X >> 3) + Offset
73 ShadowValue = *(char*)ShadowAddr;
74 if (ShadowValue)
75 if ((X & 7) + N - 1 > ShadowValue)
76 __asan_report_loadN(X);
77 Stores are instrumented similarly, but using __asan_report_storeN functions.
78 A call too __asan_init_vN() is inserted to the list of module CTORs.
79 N is the version number of the AddressSanitizer API. The changes between the
80 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
82 The run-time library redefines malloc (so that redzone are inserted around
83 the allocated memory) and free (so that reuse of free-ed memory is delayed),
84 provides __asan_report* and __asan_init_vN functions.
86 Read more:
87 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
89 The current implementation supports detection of out-of-bounds and
90 use-after-free in the heap, on the stack and for global variables.
92 [Protection of stack variables]
94 To understand how detection of out-of-bounds and use-after-free works
95 for stack variables, lets look at this example on x86_64 where the
96 stack grows downward:
98 int
99 foo ()
101 char a[23] = {0};
102 int b[2] = {0};
104 a[5] = 1;
105 b[1] = 2;
107 return a[5] + b[1];
110 For this function, the stack protected by asan will be organized as
111 follows, from the top of the stack to the bottom:
113 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
115 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
116 the next slot be 32 bytes aligned; this one is called Partial
117 Redzone; this 32 bytes alignment is an asan constraint]
119 Slot 3/ [24 bytes for variable 'a']
121 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
123 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
125 Slot 6/ [8 bytes for variable 'b']
127 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
128 'LEFT RedZone']
130 The 32 bytes of LEFT red zone at the bottom of the stack can be
131 decomposed as such:
133 1/ The first 8 bytes contain a magical asan number that is always
134 0x41B58AB3.
136 2/ The following 8 bytes contains a pointer to a string (to be
137 parsed at runtime by the runtime asan library), which format is
138 the following:
140 "<function-name> <space> <num-of-variables-on-the-stack>
141 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
142 <length-of-var-in-bytes> ){n} "
144 where '(...){n}' means the content inside the parenthesis occurs 'n'
145 times, with 'n' being the number of variables on the stack.
147 3/ The following 8 bytes contain the PC of the current function which
148 will be used by the run-time library to print an error message.
150 4/ The following 8 bytes are reserved for internal use by the run-time.
152 The shadow memory for that stack layout is going to look like this:
154 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
155 The F1 byte pattern is a magic number called
156 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
157 the memory for that shadow byte is part of a the LEFT red zone
158 intended to seat at the bottom of the variables on the stack.
160 - content of shadow memory 8 bytes for slots 6 and 5:
161 0xF4F4F400. The F4 byte pattern is a magic number
162 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
163 memory region for this shadow byte is a PARTIAL red zone
164 intended to pad a variable A, so that the slot following
165 {A,padding} is 32 bytes aligned.
167 Note that the fact that the least significant byte of this
168 shadow memory content is 00 means that 8 bytes of its
169 corresponding memory (which corresponds to the memory of
170 variable 'b') is addressable.
172 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
173 The F2 byte pattern is a magic number called
174 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
175 region for this shadow byte is a MIDDLE red zone intended to
176 seat between two 32 aligned slots of {variable,padding}.
178 - content of shadow memory 8 bytes for slot 3 and 2:
179 0xF4000000. This represents is the concatenation of
180 variable 'a' and the partial red zone following it, like what we
181 had for variable 'b'. The least significant 3 bytes being 00
182 means that the 3 bytes of variable 'a' are addressable.
184 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
185 The F3 byte pattern is a magic number called
186 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
187 region for this shadow byte is a RIGHT red zone intended to seat
188 at the top of the variables of the stack.
190 Note that the real variable layout is done in expand_used_vars in
191 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
192 stack variables as well as the different red zones, emits some
193 prologue code to populate the shadow memory as to poison (mark as
194 non-accessible) the regions of the red zones and mark the regions of
195 stack variables as accessible, and emit some epilogue code to
196 un-poison (mark as accessible) the regions of red zones right before
197 the function exits.
199 [Protection of global variables]
201 The basic idea is to insert a red zone between two global variables
202 and install a constructor function that calls the asan runtime to do
203 the populating of the relevant shadow memory regions at load time.
205 So the global variables are laid out as to insert a red zone between
206 them. The size of the red zones is so that each variable starts on a
207 32 bytes boundary.
209 Then a constructor function is installed so that, for each global
210 variable, it calls the runtime asan library function
211 __asan_register_globals_with an instance of this type:
213 struct __asan_global
215 // Address of the beginning of the global variable.
216 const void *__beg;
218 // Initial size of the global variable.
219 uptr __size;
221 // Size of the global variable + size of the red zone. This
222 // size is 32 bytes aligned.
223 uptr __size_with_redzone;
225 // Name of the global variable.
226 const void *__name;
228 // Name of the module where the global variable is declared.
229 const void *__module_name;
231 // 1 if it has dynamic initialization, 0 otherwise.
232 uptr __has_dynamic_init;
235 A destructor function that calls the runtime asan library function
236 _asan_unregister_globals is also installed. */
238 alias_set_type asan_shadow_set = -1;
240 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
241 alias set is used for all shadow memory accesses. */
242 static GTY(()) tree shadow_ptr_types[2];
244 /* Decl for __asan_option_detect_stack_use_after_return. */
245 static GTY(()) tree asan_detect_stack_use_after_return;
247 /* Number of instrumentations in current function so far. */
249 static int asan_num_accesses;
251 /* Check whether we should replace inline instrumentation with calls. */
253 static inline bool
254 use_calls_p ()
256 return ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
257 && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
260 /* Hashtable support for memory references used by gimple
261 statements. */
263 /* This type represents a reference to a memory region. */
264 struct asan_mem_ref
266 /* The expression of the beginning of the memory region. */
267 tree start;
269 /* The size of the access. */
270 HOST_WIDE_INT access_size;
273 static alloc_pool asan_mem_ref_alloc_pool;
275 /* This creates the alloc pool used to store the instances of
276 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
278 static alloc_pool
279 asan_mem_ref_get_alloc_pool ()
281 if (asan_mem_ref_alloc_pool == NULL)
282 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
283 sizeof (asan_mem_ref),
284 10);
285 return asan_mem_ref_alloc_pool;
289 /* Initializes an instance of asan_mem_ref. */
291 static void
292 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
294 ref->start = start;
295 ref->access_size = access_size;
298 /* Allocates memory for an instance of asan_mem_ref into the memory
299 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
300 START is the address of (or the expression pointing to) the
301 beginning of memory reference. ACCESS_SIZE is the size of the
302 access to the referenced memory. */
304 static asan_mem_ref*
305 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
307 asan_mem_ref *ref =
308 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
310 asan_mem_ref_init (ref, start, access_size);
311 return ref;
314 /* This builds and returns a pointer to the end of the memory region
315 that starts at START and of length LEN. */
317 tree
318 asan_mem_ref_get_end (tree start, tree len)
320 if (len == NULL_TREE || integer_zerop (len))
321 return start;
323 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
326 /* Return a tree expression that represents the end of the referenced
327 memory region. Beware that this function can actually build a new
328 tree expression. */
330 tree
331 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
333 return asan_mem_ref_get_end (ref->start, len);
336 struct asan_mem_ref_hasher
337 : typed_noop_remove <asan_mem_ref>
339 typedef asan_mem_ref value_type;
340 typedef asan_mem_ref compare_type;
342 static inline hashval_t hash (const value_type *);
343 static inline bool equal (const value_type *, const compare_type *);
346 /* Hash a memory reference. */
348 inline hashval_t
349 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
351 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
352 h = iterative_hash_host_wide_int (mem_ref->access_size, h);
353 return h;
356 /* Compare two memory references. We accept the length of either
357 memory references to be NULL_TREE. */
359 inline bool
360 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
361 const asan_mem_ref *m2)
363 return (m1->access_size == m2->access_size
364 && operand_equal_p (m1->start, m2->start, 0));
367 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
369 /* Returns a reference to the hash table containing memory references.
370 This function ensures that the hash table is created. Note that
371 this hash table is updated by the function
372 update_mem_ref_hash_table. */
374 static hash_table<asan_mem_ref_hasher> *
375 get_mem_ref_hash_table ()
377 if (!asan_mem_ref_ht)
378 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
380 return asan_mem_ref_ht;
383 /* Clear all entries from the memory references hash table. */
385 static void
386 empty_mem_ref_hash_table ()
388 if (asan_mem_ref_ht)
389 asan_mem_ref_ht->empty ();
392 /* Free the memory references hash table. */
394 static void
395 free_mem_ref_resources ()
397 delete asan_mem_ref_ht;
398 asan_mem_ref_ht = NULL;
400 if (asan_mem_ref_alloc_pool)
402 free_alloc_pool (asan_mem_ref_alloc_pool);
403 asan_mem_ref_alloc_pool = NULL;
407 /* Return true iff the memory reference REF has been instrumented. */
409 static bool
410 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
412 asan_mem_ref r;
413 asan_mem_ref_init (&r, ref, access_size);
415 return (get_mem_ref_hash_table ()->find (&r) != NULL);
418 /* Return true iff the memory reference REF has been instrumented. */
420 static bool
421 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
423 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
426 /* Return true iff access to memory region starting at REF and of
427 length LEN has been instrumented. */
429 static bool
430 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
432 /* First let's see if the address of the beginning of REF has been
433 instrumented. */
434 if (!has_mem_ref_been_instrumented (ref))
435 return false;
437 if (len != 0)
439 /* Let's see if the end of the region has been instrumented. */
440 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
441 ref->access_size))
442 return false;
444 return true;
447 /* Set REF to the memory reference present in a gimple assignment
448 ASSIGNMENT. Return true upon successful completion, false
449 otherwise. */
451 static bool
452 get_mem_ref_of_assignment (const gimple assignment,
453 asan_mem_ref *ref,
454 bool *ref_is_store)
456 gcc_assert (gimple_assign_single_p (assignment));
458 if (gimple_store_p (assignment)
459 && !gimple_clobber_p (assignment))
461 ref->start = gimple_assign_lhs (assignment);
462 *ref_is_store = true;
464 else if (gimple_assign_load_p (assignment))
466 ref->start = gimple_assign_rhs1 (assignment);
467 *ref_is_store = false;
469 else
470 return false;
472 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
473 return true;
476 /* Return the memory references contained in a gimple statement
477 representing a builtin call that has to do with memory access. */
479 static bool
480 get_mem_refs_of_builtin_call (const gimple call,
481 asan_mem_ref *src0,
482 tree *src0_len,
483 bool *src0_is_store,
484 asan_mem_ref *src1,
485 tree *src1_len,
486 bool *src1_is_store,
487 asan_mem_ref *dst,
488 tree *dst_len,
489 bool *dst_is_store,
490 bool *dest_is_deref)
492 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
494 tree callee = gimple_call_fndecl (call);
495 tree source0 = NULL_TREE, source1 = NULL_TREE,
496 dest = NULL_TREE, len = NULL_TREE;
497 bool is_store = true, got_reference_p = false;
498 HOST_WIDE_INT access_size = 1;
500 switch (DECL_FUNCTION_CODE (callee))
502 /* (s, s, n) style memops. */
503 case BUILT_IN_BCMP:
504 case BUILT_IN_MEMCMP:
505 source0 = gimple_call_arg (call, 0);
506 source1 = gimple_call_arg (call, 1);
507 len = gimple_call_arg (call, 2);
508 break;
510 /* (src, dest, n) style memops. */
511 case BUILT_IN_BCOPY:
512 source0 = gimple_call_arg (call, 0);
513 dest = gimple_call_arg (call, 1);
514 len = gimple_call_arg (call, 2);
515 break;
517 /* (dest, src, n) style memops. */
518 case BUILT_IN_MEMCPY:
519 case BUILT_IN_MEMCPY_CHK:
520 case BUILT_IN_MEMMOVE:
521 case BUILT_IN_MEMMOVE_CHK:
522 case BUILT_IN_MEMPCPY:
523 case BUILT_IN_MEMPCPY_CHK:
524 dest = gimple_call_arg (call, 0);
525 source0 = gimple_call_arg (call, 1);
526 len = gimple_call_arg (call, 2);
527 break;
529 /* (dest, n) style memops. */
530 case BUILT_IN_BZERO:
531 dest = gimple_call_arg (call, 0);
532 len = gimple_call_arg (call, 1);
533 break;
535 /* (dest, x, n) style memops*/
536 case BUILT_IN_MEMSET:
537 case BUILT_IN_MEMSET_CHK:
538 dest = gimple_call_arg (call, 0);
539 len = gimple_call_arg (call, 2);
540 break;
542 case BUILT_IN_STRLEN:
543 source0 = gimple_call_arg (call, 0);
544 len = gimple_call_lhs (call);
545 break ;
547 /* And now the __atomic* and __sync builtins.
548 These are handled differently from the classical memory memory
549 access builtins above. */
551 case BUILT_IN_ATOMIC_LOAD_1:
552 case BUILT_IN_ATOMIC_LOAD_2:
553 case BUILT_IN_ATOMIC_LOAD_4:
554 case BUILT_IN_ATOMIC_LOAD_8:
555 case BUILT_IN_ATOMIC_LOAD_16:
556 is_store = false;
557 /* fall through. */
559 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
560 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
561 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
562 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
563 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
565 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
566 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
567 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
568 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
569 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
571 case BUILT_IN_SYNC_FETCH_AND_OR_1:
572 case BUILT_IN_SYNC_FETCH_AND_OR_2:
573 case BUILT_IN_SYNC_FETCH_AND_OR_4:
574 case BUILT_IN_SYNC_FETCH_AND_OR_8:
575 case BUILT_IN_SYNC_FETCH_AND_OR_16:
577 case BUILT_IN_SYNC_FETCH_AND_AND_1:
578 case BUILT_IN_SYNC_FETCH_AND_AND_2:
579 case BUILT_IN_SYNC_FETCH_AND_AND_4:
580 case BUILT_IN_SYNC_FETCH_AND_AND_8:
581 case BUILT_IN_SYNC_FETCH_AND_AND_16:
583 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
584 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
585 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
586 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
587 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
589 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
590 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
591 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
592 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
594 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
595 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
596 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
597 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
598 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
600 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
601 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
602 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
603 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
604 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
606 case BUILT_IN_SYNC_OR_AND_FETCH_1:
607 case BUILT_IN_SYNC_OR_AND_FETCH_2:
608 case BUILT_IN_SYNC_OR_AND_FETCH_4:
609 case BUILT_IN_SYNC_OR_AND_FETCH_8:
610 case BUILT_IN_SYNC_OR_AND_FETCH_16:
612 case BUILT_IN_SYNC_AND_AND_FETCH_1:
613 case BUILT_IN_SYNC_AND_AND_FETCH_2:
614 case BUILT_IN_SYNC_AND_AND_FETCH_4:
615 case BUILT_IN_SYNC_AND_AND_FETCH_8:
616 case BUILT_IN_SYNC_AND_AND_FETCH_16:
618 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
619 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
620 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
621 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
622 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
624 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
625 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
626 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
627 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
629 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
630 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
631 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
632 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
633 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
635 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
636 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
637 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
638 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
639 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
641 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
642 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
643 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
644 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
645 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
647 case BUILT_IN_SYNC_LOCK_RELEASE_1:
648 case BUILT_IN_SYNC_LOCK_RELEASE_2:
649 case BUILT_IN_SYNC_LOCK_RELEASE_4:
650 case BUILT_IN_SYNC_LOCK_RELEASE_8:
651 case BUILT_IN_SYNC_LOCK_RELEASE_16:
653 case BUILT_IN_ATOMIC_EXCHANGE_1:
654 case BUILT_IN_ATOMIC_EXCHANGE_2:
655 case BUILT_IN_ATOMIC_EXCHANGE_4:
656 case BUILT_IN_ATOMIC_EXCHANGE_8:
657 case BUILT_IN_ATOMIC_EXCHANGE_16:
659 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
660 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
661 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
662 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
663 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
665 case BUILT_IN_ATOMIC_STORE_1:
666 case BUILT_IN_ATOMIC_STORE_2:
667 case BUILT_IN_ATOMIC_STORE_4:
668 case BUILT_IN_ATOMIC_STORE_8:
669 case BUILT_IN_ATOMIC_STORE_16:
671 case BUILT_IN_ATOMIC_ADD_FETCH_1:
672 case BUILT_IN_ATOMIC_ADD_FETCH_2:
673 case BUILT_IN_ATOMIC_ADD_FETCH_4:
674 case BUILT_IN_ATOMIC_ADD_FETCH_8:
675 case BUILT_IN_ATOMIC_ADD_FETCH_16:
677 case BUILT_IN_ATOMIC_SUB_FETCH_1:
678 case BUILT_IN_ATOMIC_SUB_FETCH_2:
679 case BUILT_IN_ATOMIC_SUB_FETCH_4:
680 case BUILT_IN_ATOMIC_SUB_FETCH_8:
681 case BUILT_IN_ATOMIC_SUB_FETCH_16:
683 case BUILT_IN_ATOMIC_AND_FETCH_1:
684 case BUILT_IN_ATOMIC_AND_FETCH_2:
685 case BUILT_IN_ATOMIC_AND_FETCH_4:
686 case BUILT_IN_ATOMIC_AND_FETCH_8:
687 case BUILT_IN_ATOMIC_AND_FETCH_16:
689 case BUILT_IN_ATOMIC_NAND_FETCH_1:
690 case BUILT_IN_ATOMIC_NAND_FETCH_2:
691 case BUILT_IN_ATOMIC_NAND_FETCH_4:
692 case BUILT_IN_ATOMIC_NAND_FETCH_8:
693 case BUILT_IN_ATOMIC_NAND_FETCH_16:
695 case BUILT_IN_ATOMIC_XOR_FETCH_1:
696 case BUILT_IN_ATOMIC_XOR_FETCH_2:
697 case BUILT_IN_ATOMIC_XOR_FETCH_4:
698 case BUILT_IN_ATOMIC_XOR_FETCH_8:
699 case BUILT_IN_ATOMIC_XOR_FETCH_16:
701 case BUILT_IN_ATOMIC_OR_FETCH_1:
702 case BUILT_IN_ATOMIC_OR_FETCH_2:
703 case BUILT_IN_ATOMIC_OR_FETCH_4:
704 case BUILT_IN_ATOMIC_OR_FETCH_8:
705 case BUILT_IN_ATOMIC_OR_FETCH_16:
707 case BUILT_IN_ATOMIC_FETCH_ADD_1:
708 case BUILT_IN_ATOMIC_FETCH_ADD_2:
709 case BUILT_IN_ATOMIC_FETCH_ADD_4:
710 case BUILT_IN_ATOMIC_FETCH_ADD_8:
711 case BUILT_IN_ATOMIC_FETCH_ADD_16:
713 case BUILT_IN_ATOMIC_FETCH_SUB_1:
714 case BUILT_IN_ATOMIC_FETCH_SUB_2:
715 case BUILT_IN_ATOMIC_FETCH_SUB_4:
716 case BUILT_IN_ATOMIC_FETCH_SUB_8:
717 case BUILT_IN_ATOMIC_FETCH_SUB_16:
719 case BUILT_IN_ATOMIC_FETCH_AND_1:
720 case BUILT_IN_ATOMIC_FETCH_AND_2:
721 case BUILT_IN_ATOMIC_FETCH_AND_4:
722 case BUILT_IN_ATOMIC_FETCH_AND_8:
723 case BUILT_IN_ATOMIC_FETCH_AND_16:
725 case BUILT_IN_ATOMIC_FETCH_NAND_1:
726 case BUILT_IN_ATOMIC_FETCH_NAND_2:
727 case BUILT_IN_ATOMIC_FETCH_NAND_4:
728 case BUILT_IN_ATOMIC_FETCH_NAND_8:
729 case BUILT_IN_ATOMIC_FETCH_NAND_16:
731 case BUILT_IN_ATOMIC_FETCH_XOR_1:
732 case BUILT_IN_ATOMIC_FETCH_XOR_2:
733 case BUILT_IN_ATOMIC_FETCH_XOR_4:
734 case BUILT_IN_ATOMIC_FETCH_XOR_8:
735 case BUILT_IN_ATOMIC_FETCH_XOR_16:
737 case BUILT_IN_ATOMIC_FETCH_OR_1:
738 case BUILT_IN_ATOMIC_FETCH_OR_2:
739 case BUILT_IN_ATOMIC_FETCH_OR_4:
740 case BUILT_IN_ATOMIC_FETCH_OR_8:
741 case BUILT_IN_ATOMIC_FETCH_OR_16:
743 dest = gimple_call_arg (call, 0);
744 /* DEST represents the address of a memory location.
745 instrument_derefs wants the memory location, so lets
746 dereference the address DEST before handing it to
747 instrument_derefs. */
748 if (TREE_CODE (dest) == ADDR_EXPR)
749 dest = TREE_OPERAND (dest, 0);
750 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
751 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
752 dest, build_int_cst (TREE_TYPE (dest), 0));
753 else
754 gcc_unreachable ();
756 access_size = int_size_in_bytes (TREE_TYPE (dest));
759 default:
760 /* The other builtins memory access are not instrumented in this
761 function because they either don't have any length parameter,
762 or their length parameter is just a limit. */
763 break;
766 if (len != NULL_TREE)
768 if (source0 != NULL_TREE)
770 src0->start = source0;
771 src0->access_size = access_size;
772 *src0_len = len;
773 *src0_is_store = false;
776 if (source1 != NULL_TREE)
778 src1->start = source1;
779 src1->access_size = access_size;
780 *src1_len = len;
781 *src1_is_store = false;
784 if (dest != NULL_TREE)
786 dst->start = dest;
787 dst->access_size = access_size;
788 *dst_len = len;
789 *dst_is_store = true;
792 got_reference_p = true;
794 else if (dest)
796 dst->start = dest;
797 dst->access_size = access_size;
798 *dst_len = NULL_TREE;
799 *dst_is_store = is_store;
800 *dest_is_deref = true;
801 got_reference_p = true;
804 return got_reference_p;
807 /* Return true iff a given gimple statement has been instrumented.
808 Note that the statement is "defined" by the memory references it
809 contains. */
811 static bool
812 has_stmt_been_instrumented_p (gimple stmt)
814 if (gimple_assign_single_p (stmt))
816 bool r_is_store;
817 asan_mem_ref r;
818 asan_mem_ref_init (&r, NULL, 1);
820 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
821 return has_mem_ref_been_instrumented (&r);
823 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
825 asan_mem_ref src0, src1, dest;
826 asan_mem_ref_init (&src0, NULL, 1);
827 asan_mem_ref_init (&src1, NULL, 1);
828 asan_mem_ref_init (&dest, NULL, 1);
830 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
831 bool src0_is_store = false, src1_is_store = false,
832 dest_is_store = false, dest_is_deref = false;
833 if (get_mem_refs_of_builtin_call (stmt,
834 &src0, &src0_len, &src0_is_store,
835 &src1, &src1_len, &src1_is_store,
836 &dest, &dest_len, &dest_is_store,
837 &dest_is_deref))
839 if (src0.start != NULL_TREE
840 && !has_mem_ref_been_instrumented (&src0, src0_len))
841 return false;
843 if (src1.start != NULL_TREE
844 && !has_mem_ref_been_instrumented (&src1, src1_len))
845 return false;
847 if (dest.start != NULL_TREE
848 && !has_mem_ref_been_instrumented (&dest, dest_len))
849 return false;
851 return true;
854 return false;
857 /* Insert a memory reference into the hash table. */
859 static void
860 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
862 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
864 asan_mem_ref r;
865 asan_mem_ref_init (&r, ref, access_size);
867 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
868 if (*slot == NULL)
869 *slot = asan_mem_ref_new (ref, access_size);
872 /* Initialize shadow_ptr_types array. */
874 static void
875 asan_init_shadow_ptr_types (void)
877 asan_shadow_set = new_alias_set ();
878 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
879 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
880 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
881 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
882 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
883 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
884 initialize_sanitizer_builtins ();
887 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
889 static tree
890 asan_pp_string (pretty_printer *pp)
892 const char *buf = pp_formatted_text (pp);
893 size_t len = strlen (buf);
894 tree ret = build_string (len + 1, buf);
895 TREE_TYPE (ret)
896 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
897 build_index_type (size_int (len)));
898 TREE_READONLY (ret) = 1;
899 TREE_STATIC (ret) = 1;
900 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
903 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
905 static rtx
906 asan_shadow_cst (unsigned char shadow_bytes[4])
908 int i;
909 unsigned HOST_WIDE_INT val = 0;
910 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
911 for (i = 0; i < 4; i++)
912 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
913 << (BITS_PER_UNIT * i);
914 return gen_int_mode (val, SImode);
917 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
918 though. */
920 static void
921 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
923 rtx insn, insns, top_label, end, addr, tmp, jump;
925 start_sequence ();
926 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
927 insns = get_insns ();
928 end_sequence ();
929 for (insn = insns; insn; insn = NEXT_INSN (insn))
930 if (CALL_P (insn))
931 break;
932 if (insn == NULL_RTX)
934 emit_insn (insns);
935 return;
938 gcc_assert ((len & 3) == 0);
939 top_label = gen_label_rtx ();
940 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
941 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
942 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
943 emit_label (top_label);
945 emit_move_insn (shadow_mem, const0_rtx);
946 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
947 true, OPTAB_LIB_WIDEN);
948 if (tmp != addr)
949 emit_move_insn (addr, tmp);
950 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
951 jump = get_last_insn ();
952 gcc_assert (JUMP_P (jump));
953 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
956 void
957 asan_function_start (void)
959 section *fnsec = function_section (current_function_decl);
960 switch_to_section (fnsec);
961 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
962 current_function_funcdef_no);
965 /* Insert code to protect stack vars. The prologue sequence should be emitted
966 directly, epilogue sequence returned. BASE is the register holding the
967 stack base, against which OFFSETS array offsets are relative to, OFFSETS
968 array contains pairs of offsets in reverse order, always the end offset
969 of some gap that needs protection followed by starting offset,
970 and DECLS is an array of representative decls for each var partition.
971 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
972 elements long (OFFSETS include gap before the first variable as well
973 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
974 register which stack vars DECL_RTLs are based on. Either BASE should be
975 assigned to PBASE, when not doing use after return protection, or
976 corresponding address based on __asan_stack_malloc* return value. */
979 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
980 HOST_WIDE_INT *offsets, tree *decls, int length)
982 rtx shadow_base, shadow_mem, ret, mem, orig_base, lab;
983 char buf[30];
984 unsigned char shadow_bytes[4];
985 HOST_WIDE_INT base_offset = offsets[length - 1];
986 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
987 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
988 HOST_WIDE_INT last_offset, last_size;
989 int l;
990 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
991 tree str_cst, decl, id;
992 int use_after_return_class = -1;
994 if (shadow_ptr_types[0] == NULL_TREE)
995 asan_init_shadow_ptr_types ();
997 /* First of all, prepare the description string. */
998 pretty_printer asan_pp;
1000 pp_decimal_int (&asan_pp, length / 2 - 1);
1001 pp_space (&asan_pp);
1002 for (l = length - 2; l; l -= 2)
1004 tree decl = decls[l / 2 - 1];
1005 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1006 pp_space (&asan_pp);
1007 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1008 pp_space (&asan_pp);
1009 if (DECL_P (decl) && DECL_NAME (decl))
1011 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1012 pp_space (&asan_pp);
1013 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1015 else
1016 pp_string (&asan_pp, "9 <unknown>");
1017 pp_space (&asan_pp);
1019 str_cst = asan_pp_string (&asan_pp);
1021 /* Emit the prologue sequence. */
1022 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1023 && ASAN_USE_AFTER_RETURN)
1025 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1026 /* __asan_stack_malloc_N guarantees alignment
1027 N < 6 ? (64 << N) : 4096 bytes. */
1028 if (alignb > (use_after_return_class < 6
1029 ? (64U << use_after_return_class) : 4096U))
1030 use_after_return_class = -1;
1031 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1032 base_align_bias = ((asan_frame_size + alignb - 1)
1033 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1035 /* Align base if target is STRICT_ALIGNMENT. */
1036 if (STRICT_ALIGNMENT)
1037 base = expand_binop (Pmode, and_optab, base,
1038 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1039 << ASAN_SHADOW_SHIFT)
1040 / BITS_PER_UNIT), Pmode), NULL_RTX,
1041 1, OPTAB_DIRECT);
1043 if (use_after_return_class == -1 && pbase)
1044 emit_move_insn (pbase, base);
1046 base = expand_binop (Pmode, add_optab, base,
1047 gen_int_mode (base_offset - base_align_bias, Pmode),
1048 NULL_RTX, 1, OPTAB_DIRECT);
1049 orig_base = NULL_RTX;
1050 if (use_after_return_class != -1)
1052 if (asan_detect_stack_use_after_return == NULL_TREE)
1054 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1055 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1056 integer_type_node);
1057 SET_DECL_ASSEMBLER_NAME (decl, id);
1058 TREE_ADDRESSABLE (decl) = 1;
1059 DECL_ARTIFICIAL (decl) = 1;
1060 DECL_IGNORED_P (decl) = 1;
1061 DECL_EXTERNAL (decl) = 1;
1062 TREE_STATIC (decl) = 1;
1063 TREE_PUBLIC (decl) = 1;
1064 TREE_USED (decl) = 1;
1065 asan_detect_stack_use_after_return = decl;
1067 orig_base = gen_reg_rtx (Pmode);
1068 emit_move_insn (orig_base, base);
1069 ret = expand_normal (asan_detect_stack_use_after_return);
1070 lab = gen_label_rtx ();
1071 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1072 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1073 VOIDmode, 0, lab, very_likely);
1074 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1075 use_after_return_class);
1076 ret = init_one_libfunc (buf);
1077 rtx addr = convert_memory_address (ptr_mode, base);
1078 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1079 GEN_INT (asan_frame_size
1080 + base_align_bias),
1081 TYPE_MODE (pointer_sized_int_node),
1082 addr, ptr_mode);
1083 ret = convert_memory_address (Pmode, ret);
1084 emit_move_insn (base, ret);
1085 emit_label (lab);
1086 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1087 gen_int_mode (base_align_bias
1088 - base_offset, Pmode),
1089 NULL_RTX, 1, OPTAB_DIRECT));
1091 mem = gen_rtx_MEM (ptr_mode, base);
1092 mem = adjust_address (mem, VOIDmode, base_align_bias);
1093 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1094 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1095 emit_move_insn (mem, expand_normal (str_cst));
1096 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1097 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1098 id = get_identifier (buf);
1099 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1100 VAR_DECL, id, char_type_node);
1101 SET_DECL_ASSEMBLER_NAME (decl, id);
1102 TREE_ADDRESSABLE (decl) = 1;
1103 TREE_READONLY (decl) = 1;
1104 DECL_ARTIFICIAL (decl) = 1;
1105 DECL_IGNORED_P (decl) = 1;
1106 TREE_STATIC (decl) = 1;
1107 TREE_PUBLIC (decl) = 0;
1108 TREE_USED (decl) = 1;
1109 DECL_INITIAL (decl) = decl;
1110 TREE_ASM_WRITTEN (decl) = 1;
1111 TREE_ASM_WRITTEN (id) = 1;
1112 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1113 shadow_base = expand_binop (Pmode, lshr_optab, base,
1114 GEN_INT (ASAN_SHADOW_SHIFT),
1115 NULL_RTX, 1, OPTAB_DIRECT);
1116 shadow_base
1117 = plus_constant (Pmode, shadow_base,
1118 targetm.asan_shadow_offset ()
1119 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1120 gcc_assert (asan_shadow_set != -1
1121 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1122 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1123 set_mem_alias_set (shadow_mem, asan_shadow_set);
1124 if (STRICT_ALIGNMENT)
1125 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1126 prev_offset = base_offset;
1127 for (l = length; l; l -= 2)
1129 if (l == 2)
1130 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1131 offset = offsets[l - 1];
1132 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1134 int i;
1135 HOST_WIDE_INT aoff
1136 = base_offset + ((offset - base_offset)
1137 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1138 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1139 (aoff - prev_offset)
1140 >> ASAN_SHADOW_SHIFT);
1141 prev_offset = aoff;
1142 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1143 if (aoff < offset)
1145 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1146 shadow_bytes[i] = 0;
1147 else
1148 shadow_bytes[i] = offset - aoff;
1150 else
1151 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1152 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1153 offset = aoff;
1155 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1157 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1158 (offset - prev_offset)
1159 >> ASAN_SHADOW_SHIFT);
1160 prev_offset = offset;
1161 memset (shadow_bytes, cur_shadow_byte, 4);
1162 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1163 offset += ASAN_RED_ZONE_SIZE;
1165 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1167 do_pending_stack_adjust ();
1169 /* Construct epilogue sequence. */
1170 start_sequence ();
1172 lab = NULL_RTX;
1173 if (use_after_return_class != -1)
1175 rtx lab2 = gen_label_rtx ();
1176 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1177 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1178 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1179 VOIDmode, 0, lab2, very_likely);
1180 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1181 set_mem_alias_set (shadow_mem, asan_shadow_set);
1182 mem = gen_rtx_MEM (ptr_mode, base);
1183 mem = adjust_address (mem, VOIDmode, base_align_bias);
1184 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1185 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1186 if (use_after_return_class < 5
1187 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1188 BITS_PER_UNIT, true))
1189 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1190 BITS_PER_UNIT, true, 0);
1191 else if (use_after_return_class >= 5
1192 || !set_storage_via_setmem (shadow_mem,
1193 GEN_INT (sz),
1194 gen_int_mode (c, QImode),
1195 BITS_PER_UNIT, BITS_PER_UNIT,
1196 -1, sz, sz, sz))
1198 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1199 use_after_return_class);
1200 ret = init_one_libfunc (buf);
1201 rtx addr = convert_memory_address (ptr_mode, base);
1202 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1203 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1204 GEN_INT (asan_frame_size + base_align_bias),
1205 TYPE_MODE (pointer_sized_int_node),
1206 orig_addr, ptr_mode);
1208 lab = gen_label_rtx ();
1209 emit_jump (lab);
1210 emit_label (lab2);
1213 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1214 set_mem_alias_set (shadow_mem, asan_shadow_set);
1216 if (STRICT_ALIGNMENT)
1217 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1219 prev_offset = base_offset;
1220 last_offset = base_offset;
1221 last_size = 0;
1222 for (l = length; l; l -= 2)
1224 offset = base_offset + ((offsets[l - 1] - base_offset)
1225 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1226 if (last_offset + last_size != offset)
1228 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1229 (last_offset - prev_offset)
1230 >> ASAN_SHADOW_SHIFT);
1231 prev_offset = last_offset;
1232 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1233 last_offset = offset;
1234 last_size = 0;
1236 last_size += base_offset + ((offsets[l - 2] - base_offset)
1237 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1238 - offset;
1240 if (last_size)
1242 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1243 (last_offset - prev_offset)
1244 >> ASAN_SHADOW_SHIFT);
1245 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1248 do_pending_stack_adjust ();
1249 if (lab)
1250 emit_label (lab);
1252 ret = get_insns ();
1253 end_sequence ();
1254 return ret;
1257 /* Return true if DECL, a global var, might be overridden and needs
1258 therefore a local alias. */
1260 static bool
1261 asan_needs_local_alias (tree decl)
1263 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1266 /* Return true if DECL is a VAR_DECL that should be protected
1267 by Address Sanitizer, by appending a red zone with protected
1268 shadow memory after it and aligning it to at least
1269 ASAN_RED_ZONE_SIZE bytes. */
1271 bool
1272 asan_protect_global (tree decl)
1274 if (!ASAN_GLOBALS)
1275 return false;
1277 rtx rtl, symbol;
1279 if (TREE_CODE (decl) == STRING_CST)
1281 /* Instrument all STRING_CSTs except those created
1282 by asan_pp_string here. */
1283 if (shadow_ptr_types[0] != NULL_TREE
1284 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1285 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1286 return false;
1287 return true;
1289 if (TREE_CODE (decl) != VAR_DECL
1290 /* TLS vars aren't statically protectable. */
1291 || DECL_THREAD_LOCAL_P (decl)
1292 /* Externs will be protected elsewhere. */
1293 || DECL_EXTERNAL (decl)
1294 || !DECL_RTL_SET_P (decl)
1295 /* Comdat vars pose an ABI problem, we can't know if
1296 the var that is selected by the linker will have
1297 padding or not. */
1298 || DECL_ONE_ONLY (decl)
1299 /* Similarly for common vars. People can use -fno-common. */
1300 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1301 /* Don't protect if using user section, often vars placed
1302 into user section from multiple TUs are then assumed
1303 to be an array of such vars, putting padding in there
1304 breaks this assumption. */
1305 || (DECL_SECTION_NAME (decl) != NULL
1306 && !symtab_node::get (decl)->implicit_section)
1307 || DECL_SIZE (decl) == 0
1308 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1309 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1310 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1311 return false;
1313 rtl = DECL_RTL (decl);
1314 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1315 return false;
1316 symbol = XEXP (rtl, 0);
1318 if (CONSTANT_POOL_ADDRESS_P (symbol)
1319 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1320 return false;
1322 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1323 return false;
1325 #ifndef ASM_OUTPUT_DEF
1326 if (asan_needs_local_alias (decl))
1327 return false;
1328 #endif
1330 return true;
1333 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1334 IS_STORE is either 1 (for a store) or 0 (for a load). */
1336 static tree
1337 report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, int *nargs)
1339 static enum built_in_function report[2][6]
1340 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1341 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1342 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1343 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1344 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1345 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
1346 if (size_in_bytes == -1)
1348 *nargs = 2;
1349 return builtin_decl_implicit (report[is_store][5]);
1351 *nargs = 1;
1352 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1355 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1356 IS_STORE is either 1 (for a store) or 0 (for a load). */
1358 static tree
1359 check_func (bool is_store, int size_in_bytes, int *nargs)
1361 static enum built_in_function check[2][6]
1362 = { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1363 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1364 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1365 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1366 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1367 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } };
1368 if (size_in_bytes == -1)
1370 *nargs = 2;
1371 return builtin_decl_implicit (check[is_store][5]);
1373 *nargs = 1;
1374 return builtin_decl_implicit (check[is_store][exact_log2 (size_in_bytes)]);
1377 /* Split the current basic block and create a condition statement
1378 insertion point right before or after the statement pointed to by
1379 ITER. Return an iterator to the point at which the caller might
1380 safely insert the condition statement.
1382 THEN_BLOCK must be set to the address of an uninitialized instance
1383 of basic_block. The function will then set *THEN_BLOCK to the
1384 'then block' of the condition statement to be inserted by the
1385 caller.
1387 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1388 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1390 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1391 block' of the condition statement to be inserted by the caller.
1393 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1394 statements starting from *ITER, and *THEN_BLOCK is a new empty
1395 block.
1397 *ITER is adjusted to point to always point to the first statement
1398 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1399 same as what ITER was pointing to prior to calling this function,
1400 if BEFORE_P is true; otherwise, it is its following statement. */
1402 gimple_stmt_iterator
1403 create_cond_insert_point (gimple_stmt_iterator *iter,
1404 bool before_p,
1405 bool then_more_likely_p,
1406 bool create_then_fallthru_edge,
1407 basic_block *then_block,
1408 basic_block *fallthrough_block)
1410 gimple_stmt_iterator gsi = *iter;
1412 if (!gsi_end_p (gsi) && before_p)
1413 gsi_prev (&gsi);
1415 basic_block cur_bb = gsi_bb (*iter);
1417 edge e = split_block (cur_bb, gsi_stmt (gsi));
1419 /* Get a hold on the 'condition block', the 'then block' and the
1420 'else block'. */
1421 basic_block cond_bb = e->src;
1422 basic_block fallthru_bb = e->dest;
1423 basic_block then_bb = create_empty_bb (cond_bb);
1424 if (current_loops)
1426 add_bb_to_loop (then_bb, cond_bb->loop_father);
1427 loops_state_set (LOOPS_NEED_FIXUP);
1430 /* Set up the newly created 'then block'. */
1431 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1432 int fallthrough_probability
1433 = then_more_likely_p
1434 ? PROB_VERY_UNLIKELY
1435 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1436 e->probability = PROB_ALWAYS - fallthrough_probability;
1437 if (create_then_fallthru_edge)
1438 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1440 /* Set up the fallthrough basic block. */
1441 e = find_edge (cond_bb, fallthru_bb);
1442 e->flags = EDGE_FALSE_VALUE;
1443 e->count = cond_bb->count;
1444 e->probability = fallthrough_probability;
1446 /* Update dominance info for the newly created then_bb; note that
1447 fallthru_bb's dominance info has already been updated by
1448 split_bock. */
1449 if (dom_info_available_p (CDI_DOMINATORS))
1450 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1452 *then_block = then_bb;
1453 *fallthrough_block = fallthru_bb;
1454 *iter = gsi_start_bb (fallthru_bb);
1456 return gsi_last_bb (cond_bb);
1459 /* Insert an if condition followed by a 'then block' right before the
1460 statement pointed to by ITER. The fallthrough block -- which is the
1461 else block of the condition as well as the destination of the
1462 outcoming edge of the 'then block' -- starts with the statement
1463 pointed to by ITER.
1465 COND is the condition of the if.
1467 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1468 'then block' is higher than the probability of the edge to the
1469 fallthrough block.
1471 Upon completion of the function, *THEN_BB is set to the newly
1472 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1473 fallthrough block.
1475 *ITER is adjusted to still point to the same statement it was
1476 pointing to initially. */
1478 static void
1479 insert_if_then_before_iter (gimple cond,
1480 gimple_stmt_iterator *iter,
1481 bool then_more_likely_p,
1482 basic_block *then_bb,
1483 basic_block *fallthrough_bb)
1485 gimple_stmt_iterator cond_insert_point =
1486 create_cond_insert_point (iter,
1487 /*before_p=*/true,
1488 then_more_likely_p,
1489 /*create_then_fallthru_edge=*/true,
1490 then_bb,
1491 fallthrough_bb);
1492 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1495 /* Build
1496 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1498 static tree
1499 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1500 tree base_addr, tree shadow_ptr_type)
1502 tree t, uintptr_type = TREE_TYPE (base_addr);
1503 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1504 gimple g;
1506 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1507 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1508 make_ssa_name (uintptr_type, NULL),
1509 base_addr, t);
1510 gimple_set_location (g, location);
1511 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1513 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1514 g = gimple_build_assign_with_ops (PLUS_EXPR,
1515 make_ssa_name (uintptr_type, NULL),
1516 gimple_assign_lhs (g), t);
1517 gimple_set_location (g, location);
1518 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1520 g = gimple_build_assign_with_ops (NOP_EXPR,
1521 make_ssa_name (shadow_ptr_type, NULL),
1522 gimple_assign_lhs (g), NULL_TREE);
1523 gimple_set_location (g, location);
1524 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1526 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1527 build_int_cst (shadow_ptr_type, 0));
1528 g = gimple_build_assign_with_ops (MEM_REF,
1529 make_ssa_name (shadow_type, NULL),
1530 t, NULL_TREE);
1531 gimple_set_location (g, location);
1532 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1533 return gimple_assign_lhs (g);
1536 /* BASE can already be an SSA_NAME; in that case, do not create a
1537 new SSA_NAME for it. */
1539 static tree
1540 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1541 bool before_p)
1543 if (TREE_CODE (base) == SSA_NAME)
1544 return base;
1545 gimple g
1546 = gimple_build_assign_with_ops (TREE_CODE (base),
1547 make_ssa_name (TREE_TYPE (base), NULL),
1548 base, NULL_TREE);
1549 gimple_set_location (g, loc);
1550 if (before_p)
1551 gsi_insert_before (iter, g, GSI_SAME_STMT);
1552 else
1553 gsi_insert_after (iter, g, GSI_NEW_STMT);
1554 return gimple_assign_lhs (g);
1557 /* Instrument the memory access instruction using callbacks.
1558 Parameters are similar to BUILD_CHECK_STMT. */
1560 static void
1561 build_check_stmt_with_calls (location_t loc, tree base, tree len,
1562 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1563 bool before_p, bool is_store, bool is_scalar_access)
1565 gimple_stmt_iterator gsi = *iter;
1566 tree base_ssa = maybe_create_ssa_name (loc, base, &gsi, before_p);
1568 gimple g
1569 = gimple_build_assign_with_ops (NOP_EXPR,
1570 make_ssa_name (pointer_sized_int_node, NULL),
1571 base_ssa, NULL_TREE);
1572 gimple_set_location (g, loc);
1573 if (before_p)
1574 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
1575 else
1576 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1577 tree base_addr = gimple_assign_lhs (g);
1579 int nargs;
1580 tree fun
1581 = check_func (is_store, is_scalar_access ? size_in_bytes : -1, &nargs);
1582 if (nargs == 1)
1583 g = gimple_build_call (fun, 1, base_addr);
1584 else
1586 gcc_assert (nargs == 2);
1587 g = gimple_build_assign_with_ops (NOP_EXPR,
1588 make_ssa_name (pointer_sized_int_node,
1589 NULL),
1590 len, NULL_TREE);
1591 gimple_set_location (g, loc);
1592 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1593 tree sz_arg = gimple_assign_lhs (g);
1594 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
1596 gimple_set_location (g, loc);
1597 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1599 if (!before_p)
1601 gsi_next (&gsi);
1602 *iter = gsi;
1606 /* Instrument the memory access instruction BASE. Insert new
1607 statements before or after ITER.
1609 Note that the memory access represented by BASE can be either an
1610 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1611 location. IS_STORE is TRUE for a store, FALSE for a load.
1612 BEFORE_P is TRUE for inserting the instrumentation code before
1613 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1614 for a scalar memory access and FALSE for memory region access.
1615 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1616 length. ALIGN tells alignment of accessed memory object.
1618 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1619 memory region have already been instrumented.
1621 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1622 statement it was pointing to prior to calling this function,
1623 otherwise, it points to the statement logically following it. */
1625 static void
1626 build_check_stmt (location_t location, tree base, tree len,
1627 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1628 bool non_zero_len_p, bool before_p, bool is_store,
1629 bool is_scalar_access, unsigned int align = 0,
1630 bool start_instrumented = false,
1631 bool end_instrumented = false)
1633 gimple_stmt_iterator gsi = *iter;
1634 gimple g;
1635 tree uintptr_type
1636 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1638 gcc_assert (!(size_in_bytes > 0 && !non_zero_len_p));
1640 if (start_instrumented && end_instrumented)
1642 if (!before_p)
1643 gsi_next (iter);
1644 return;
1647 if (len)
1648 len = unshare_expr (len);
1649 else
1651 gcc_assert (size_in_bytes != -1);
1652 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1655 if (size_in_bytes > 1)
1657 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1658 || !is_scalar_access
1659 || size_in_bytes > 16)
1660 size_in_bytes = -1;
1661 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1663 /* On non-strict alignment targets, if
1664 16-byte access is just 8-byte aligned,
1665 this will result in misaligned shadow
1666 memory 2 byte load, but otherwise can
1667 be handled using one read. */
1668 if (size_in_bytes != 16
1669 || STRICT_ALIGNMENT
1670 || align < 8 * BITS_PER_UNIT)
1671 size_in_bytes = -1;
1675 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
1677 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
1678 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1680 base = unshare_expr (base);
1682 if (use_calls_p ())
1684 gsi = *iter;
1685 build_check_stmt_with_calls (location, base, len, size_in_bytes, iter,
1686 before_p, is_store, is_scalar_access);
1687 return;
1690 ++asan_num_accesses;
1692 if (!non_zero_len_p)
1694 gcc_assert (before_p);
1696 /* So, the length of the memory area to asan-protect is
1697 non-constant. Let's guard the generated instrumentation code
1698 like:
1700 if (len != 0)
1702 //asan instrumentation code goes here.
1704 // falltrough instructions, starting with *ITER. */
1706 g = gimple_build_cond (NE_EXPR,
1707 len,
1708 build_int_cst (TREE_TYPE (len), 0),
1709 NULL_TREE, NULL_TREE);
1710 gimple_set_location (g, location);
1712 basic_block then_bb, fallthrough_bb;
1713 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1714 &then_bb, &fallthrough_bb);
1715 /* Note that fallthrough_bb starts with the statement that was
1716 pointed to by ITER. */
1718 /* The 'then block' of the 'if (len != 0) condition is where
1719 we'll generate the asan instrumentation code now. */
1720 gsi = gsi_last_bb (then_bb);
1721 build_check_stmt (location, base, len, size_in_bytes, &gsi,
1722 /*non_zero_len_p*/true, /*before_p*/true, is_store,
1723 is_scalar_access, align,
1724 start_instrumented, end_instrumented);
1725 return;
1728 /* Get an iterator on the point where we can add the condition
1729 statement for the instrumentation. */
1730 basic_block then_bb, else_bb;
1731 gsi = create_cond_insert_point (&gsi, before_p,
1732 /*then_more_likely_p=*/false,
1733 /*create_then_fallthru_edge=*/false,
1734 &then_bb,
1735 &else_bb);
1737 tree base_ssa = maybe_create_ssa_name (location, base, &gsi,
1738 /*before_p*/false);
1740 g = gimple_build_assign_with_ops (NOP_EXPR,
1741 make_ssa_name (uintptr_type, NULL),
1742 base_ssa, NULL_TREE);
1743 gimple_set_location (g, location);
1744 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1745 tree base_addr = gimple_assign_lhs (g);
1747 tree t = NULL_TREE;
1748 if (real_size_in_bytes >= 8)
1750 tree shadow = build_shadow_mem_access (&gsi, location, base_addr,
1751 shadow_ptr_type);
1752 t = shadow;
1754 else
1756 /* Slow path for 1, 2 and 4 byte accesses. */
1758 if (!start_instrumented)
1760 /* Test (shadow != 0)
1761 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
1762 tree shadow = build_shadow_mem_access (&gsi, location, base_addr,
1763 shadow_ptr_type);
1764 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1765 gimple_seq seq = NULL;
1766 gimple_seq_add_stmt (&seq, shadow_test);
1767 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1768 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1769 gimple_seq_last (seq)));
1770 if (real_size_in_bytes > 1)
1771 gimple_seq_add_stmt (&seq,
1772 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1773 real_size_in_bytes - 1));
1774 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
1775 gimple_seq_last (seq),
1776 shadow));
1777 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1778 gimple_seq_last (seq)));
1779 t = gimple_assign_lhs (gimple_seq_last (seq));
1780 gimple_seq_set_location (seq, location);
1781 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1784 /* For non-constant, misaligned or otherwise weird access sizes,
1785 check first and last byte. */
1786 if (size_in_bytes == -1 && !end_instrumented)
1788 g = gimple_build_assign_with_ops (MINUS_EXPR,
1789 make_ssa_name (uintptr_type, NULL),
1790 len,
1791 build_int_cst (uintptr_type, 1));
1792 gimple_set_location (g, location);
1793 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1794 tree last = gimple_assign_lhs (g);
1795 g = gimple_build_assign_with_ops (PLUS_EXPR,
1796 make_ssa_name (uintptr_type, NULL),
1797 base_addr,
1798 last);
1799 gimple_set_location (g, location);
1800 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1801 tree base_end_addr = gimple_assign_lhs (g);
1803 tree shadow = build_shadow_mem_access (&gsi, location, base_end_addr,
1804 shadow_ptr_type);
1805 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1806 gimple_seq seq = NULL;
1807 gimple_seq_add_stmt (&seq, shadow_test);
1808 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
1809 base_end_addr, 7));
1810 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1811 gimple_seq_last (seq)));
1812 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
1813 gimple_seq_last (seq),
1814 shadow));
1815 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1816 gimple_seq_last (seq)));
1817 if (!start_instrumented)
1818 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
1819 gimple_seq_last (seq)));
1820 t = gimple_assign_lhs (gimple_seq_last (seq));
1821 gimple_seq_set_location (seq, location);
1822 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1826 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1827 NULL_TREE, NULL_TREE);
1828 gimple_set_location (g, location);
1829 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1831 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1832 gsi = gsi_start_bb (then_bb);
1833 int nargs;
1834 tree fun = report_error_func (is_store, is_scalar_access ? size_in_bytes : -1,
1835 &nargs);
1836 if (nargs == 1)
1837 g = gimple_build_call (fun, 1, base_addr);
1838 else
1840 gcc_assert (nargs == 2);
1841 g = gimple_build_assign_with_ops (NOP_EXPR,
1842 make_ssa_name (pointer_sized_int_node,
1843 NULL),
1844 len, NULL_TREE);
1845 gimple_set_location (g, location);
1846 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1847 tree sz_arg = gimple_assign_lhs (g);
1848 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
1850 gimple_set_location (g, location);
1851 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1853 *iter = gsi_start_bb (else_bb);
1856 /* If T represents a memory access, add instrumentation code before ITER.
1857 LOCATION is source code location.
1858 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1860 static void
1861 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1862 location_t location, bool is_store)
1864 if (is_store && !ASAN_INSTRUMENT_WRITES)
1865 return;
1866 if (!is_store && !ASAN_INSTRUMENT_READS)
1867 return;
1869 tree type, base;
1870 HOST_WIDE_INT size_in_bytes;
1872 type = TREE_TYPE (t);
1873 switch (TREE_CODE (t))
1875 case ARRAY_REF:
1876 case COMPONENT_REF:
1877 case INDIRECT_REF:
1878 case MEM_REF:
1879 case VAR_DECL:
1880 break;
1881 /* FALLTHRU */
1882 default:
1883 return;
1886 size_in_bytes = int_size_in_bytes (type);
1887 if (size_in_bytes <= 0)
1888 return;
1890 HOST_WIDE_INT bitsize, bitpos;
1891 tree offset;
1892 enum machine_mode mode;
1893 int volatilep = 0, unsignedp = 0;
1894 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1895 &mode, &unsignedp, &volatilep, false);
1896 if (((size_in_bytes & (size_in_bytes - 1)) == 0
1897 && (bitpos % (size_in_bytes * BITS_PER_UNIT)))
1898 || bitsize != size_in_bytes * BITS_PER_UNIT)
1900 if (TREE_CODE (t) == COMPONENT_REF
1901 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1903 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1904 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1905 TREE_OPERAND (t, 0), repr,
1906 NULL_TREE), location, is_store);
1908 return;
1910 if (bitpos % BITS_PER_UNIT)
1911 return;
1913 if (TREE_CODE (inner) == VAR_DECL
1914 && offset == NULL_TREE
1915 && bitpos >= 0
1916 && DECL_SIZE (inner)
1917 && tree_fits_shwi_p (DECL_SIZE (inner))
1918 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1920 if (DECL_THREAD_LOCAL_P (inner))
1921 return;
1922 if (!TREE_STATIC (inner))
1924 /* Automatic vars in the current function will be always
1925 accessible. */
1926 if (decl_function_context (inner) == current_function_decl)
1927 return;
1929 /* Always instrument external vars, they might be dynamically
1930 initialized. */
1931 else if (!DECL_EXTERNAL (inner))
1933 /* For static vars if they are known not to be dynamically
1934 initialized, they will be always accessible. */
1935 varpool_node *vnode = varpool_node::get (inner);
1936 if (vnode && !vnode->dynamically_initialized)
1937 return;
1941 base = build_fold_addr_expr (t);
1942 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1944 unsigned int align = get_object_alignment (t);
1945 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1946 /*non_zero_len_p*/size_in_bytes > 0, /*before_p=*/true,
1947 is_store, /*is_scalar_access*/true, align);
1948 update_mem_ref_hash_table (base, size_in_bytes);
1949 update_mem_ref_hash_table (t, size_in_bytes);
1954 /* Instrument an access to a contiguous memory region that starts at
1955 the address pointed to by BASE, over a length of LEN (expressed in
1956 the sizeof (*BASE) bytes). ITER points to the instruction before
1957 which the instrumentation instructions must be inserted. LOCATION
1958 is the source location that the instrumentation instructions must
1959 have. If IS_STORE is true, then the memory access is a store;
1960 otherwise, it's a load. */
1962 static void
1963 instrument_mem_region_access (tree base, tree len,
1964 gimple_stmt_iterator *iter,
1965 location_t location, bool is_store)
1967 if (!POINTER_TYPE_P (TREE_TYPE (base))
1968 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1969 || integer_zerop (len))
1970 return;
1972 /* If the beginning of the memory region has already been
1973 instrumented, do not instrument it. */
1974 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1976 /* If the end of the memory region has already been instrumented, do
1977 not instrument it. */
1978 tree end = asan_mem_ref_get_end (base, len);
1979 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1981 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1983 build_check_stmt (location, base, len, size_in_bytes, iter,
1984 /*non_zero_len_p*/size_in_bytes > 0, /*before_p*/true,
1985 is_store, /*is_scalar_access*/false, /*align*/0,
1986 start_instrumented, end_instrumented);
1988 update_mem_ref_hash_table (base, 1);
1989 if (size_in_bytes != -1)
1990 update_mem_ref_hash_table (end, 1);
1992 *iter = gsi_for_stmt (gsi_stmt (*iter));
1995 /* Instrument the call (to the builtin strlen function) pointed to by
1996 ITER.
1998 This function instruments the access to the first byte of the
1999 argument, right before the call. After the call it instruments the
2000 access to the last byte of the argument; it uses the result of the
2001 call to deduce the offset of that last byte.
2003 Upon completion, iff the call has actually been instrumented, this
2004 function returns TRUE and *ITER points to the statement logically
2005 following the built-in strlen function call *ITER was initially
2006 pointing to. Otherwise, the function returns FALSE and *ITER
2007 remains unchanged. */
2009 static bool
2010 instrument_strlen_call (gimple_stmt_iterator *iter)
2012 gimple call = gsi_stmt (*iter);
2013 gcc_assert (is_gimple_call (call));
2015 tree callee = gimple_call_fndecl (call);
2016 gcc_assert (is_builtin_fn (callee)
2017 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2018 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
2020 tree len = gimple_call_lhs (call);
2021 if (len == NULL)
2022 /* Some passes might clear the return value of the strlen call;
2023 bail out in that case. Return FALSE as we are not advancing
2024 *ITER. */
2025 return false;
2026 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
2028 location_t loc = gimple_location (call);
2029 tree str_arg = gimple_call_arg (call, 0);
2030 bool start_instrumented = has_mem_ref_been_instrumented (str_arg, 1);
2032 tree cptr_type = build_pointer_type (char_type_node);
2033 gimple str_arg_ssa =
2034 gimple_build_assign_with_ops (NOP_EXPR,
2035 make_ssa_name (cptr_type, NULL),
2036 str_arg, NULL);
2037 gimple_set_location (str_arg_ssa, loc);
2038 gsi_insert_before (iter, str_arg_ssa, GSI_SAME_STMT);
2040 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), NULL_TREE, 1, iter,
2041 /*non_zero_len_p*/true, /*before_p=*/true,
2042 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0,
2043 start_instrumented, start_instrumented);
2045 gimple g =
2046 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
2047 make_ssa_name (cptr_type, NULL),
2048 gimple_assign_lhs (str_arg_ssa),
2049 len);
2050 gimple_set_location (g, loc);
2051 gsi_insert_after (iter, g, GSI_NEW_STMT);
2053 build_check_stmt (loc, gimple_assign_lhs (g), NULL_TREE, 1, iter,
2054 /*non_zero_len_p*/true, /*before_p=*/false,
2055 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0);
2057 return true;
2060 /* Instrument the call to a built-in memory access function that is
2061 pointed to by the iterator ITER.
2063 Upon completion, return TRUE iff *ITER has been advanced to the
2064 statement following the one it was originally pointing to. */
2066 static bool
2067 instrument_builtin_call (gimple_stmt_iterator *iter)
2069 if (!ASAN_MEMINTRIN)
2070 return false;
2072 bool iter_advanced_p = false;
2073 gimple call = gsi_stmt (*iter);
2075 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2077 tree callee = gimple_call_fndecl (call);
2078 location_t loc = gimple_location (call);
2080 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
2081 iter_advanced_p = instrument_strlen_call (iter);
2082 else
2084 asan_mem_ref src0, src1, dest;
2085 asan_mem_ref_init (&src0, NULL, 1);
2086 asan_mem_ref_init (&src1, NULL, 1);
2087 asan_mem_ref_init (&dest, NULL, 1);
2089 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2090 bool src0_is_store = false, src1_is_store = false,
2091 dest_is_store = false, dest_is_deref = false;
2093 if (get_mem_refs_of_builtin_call (call,
2094 &src0, &src0_len, &src0_is_store,
2095 &src1, &src1_len, &src1_is_store,
2096 &dest, &dest_len, &dest_is_store,
2097 &dest_is_deref))
2099 if (dest_is_deref)
2101 instrument_derefs (iter, dest.start, loc, dest_is_store);
2102 gsi_next (iter);
2103 iter_advanced_p = true;
2105 else if (src0_len || src1_len || dest_len)
2107 if (src0.start != NULL_TREE)
2108 instrument_mem_region_access (src0.start, src0_len,
2109 iter, loc, /*is_store=*/false);
2110 if (src1.start != NULL_TREE)
2111 instrument_mem_region_access (src1.start, src1_len,
2112 iter, loc, /*is_store=*/false);
2113 if (dest.start != NULL_TREE)
2114 instrument_mem_region_access (dest.start, dest_len,
2115 iter, loc, /*is_store=*/true);
2116 *iter = gsi_for_stmt (call);
2117 gsi_next (iter);
2118 iter_advanced_p = true;
2122 return iter_advanced_p;
2125 /* Instrument the assignment statement ITER if it is subject to
2126 instrumentation. Return TRUE iff instrumentation actually
2127 happened. In that case, the iterator ITER is advanced to the next
2128 logical expression following the one initially pointed to by ITER,
2129 and the relevant memory reference that which access has been
2130 instrumented is added to the memory references hash table. */
2132 static bool
2133 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2135 gimple s = gsi_stmt (*iter);
2137 gcc_assert (gimple_assign_single_p (s));
2139 tree ref_expr = NULL_TREE;
2140 bool is_store, is_instrumented = false;
2142 if (gimple_store_p (s))
2144 ref_expr = gimple_assign_lhs (s);
2145 is_store = true;
2146 instrument_derefs (iter, ref_expr,
2147 gimple_location (s),
2148 is_store);
2149 is_instrumented = true;
2152 if (gimple_assign_load_p (s))
2154 ref_expr = gimple_assign_rhs1 (s);
2155 is_store = false;
2156 instrument_derefs (iter, ref_expr,
2157 gimple_location (s),
2158 is_store);
2159 is_instrumented = true;
2162 if (is_instrumented)
2163 gsi_next (iter);
2165 return is_instrumented;
2168 /* Instrument the function call pointed to by the iterator ITER, if it
2169 is subject to instrumentation. At the moment, the only function
2170 calls that are instrumented are some built-in functions that access
2171 memory. Look at instrument_builtin_call to learn more.
2173 Upon completion return TRUE iff *ITER was advanced to the statement
2174 following the one it was originally pointing to. */
2176 static bool
2177 maybe_instrument_call (gimple_stmt_iterator *iter)
2179 gimple stmt = gsi_stmt (*iter);
2180 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2182 if (is_builtin && instrument_builtin_call (iter))
2183 return true;
2185 if (gimple_call_noreturn_p (stmt))
2187 if (is_builtin)
2189 tree callee = gimple_call_fndecl (stmt);
2190 switch (DECL_FUNCTION_CODE (callee))
2192 case BUILT_IN_UNREACHABLE:
2193 case BUILT_IN_TRAP:
2194 /* Don't instrument these. */
2195 return false;
2198 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2199 gimple g = gimple_build_call (decl, 0);
2200 gimple_set_location (g, gimple_location (stmt));
2201 gsi_insert_before (iter, g, GSI_SAME_STMT);
2203 return false;
2206 /* Walk each instruction of all basic block and instrument those that
2207 represent memory references: loads, stores, or function calls.
2208 In a given basic block, this function avoids instrumenting memory
2209 references that have already been instrumented. */
2211 static void
2212 transform_statements (void)
2214 basic_block bb, last_bb = NULL;
2215 gimple_stmt_iterator i;
2216 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2218 FOR_EACH_BB_FN (bb, cfun)
2220 basic_block prev_bb = bb;
2222 if (bb->index >= saved_last_basic_block) continue;
2224 /* Flush the mem ref hash table, if current bb doesn't have
2225 exactly one predecessor, or if that predecessor (skipping
2226 over asan created basic blocks) isn't the last processed
2227 basic block. Thus we effectively flush on extended basic
2228 block boundaries. */
2229 while (single_pred_p (prev_bb))
2231 prev_bb = single_pred (prev_bb);
2232 if (prev_bb->index < saved_last_basic_block)
2233 break;
2235 if (prev_bb != last_bb)
2236 empty_mem_ref_hash_table ();
2237 last_bb = bb;
2239 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2241 gimple s = gsi_stmt (i);
2243 if (has_stmt_been_instrumented_p (s))
2244 gsi_next (&i);
2245 else if (gimple_assign_single_p (s)
2246 && maybe_instrument_assignment (&i))
2247 /* Nothing to do as maybe_instrument_assignment advanced
2248 the iterator I. */;
2249 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2250 /* Nothing to do as maybe_instrument_call
2251 advanced the iterator I. */;
2252 else
2254 /* No instrumentation happened.
2256 If the current instruction is a function call that
2257 might free something, let's forget about the memory
2258 references that got instrumented. Otherwise we might
2259 miss some instrumentation opportunities. */
2260 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2261 empty_mem_ref_hash_table ();
2263 gsi_next (&i);
2267 free_mem_ref_resources ();
2270 /* Build
2271 __asan_before_dynamic_init (module_name)
2273 __asan_after_dynamic_init ()
2274 call. */
2276 tree
2277 asan_dynamic_init_call (bool after_p)
2279 tree fn = builtin_decl_implicit (after_p
2280 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2281 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2282 tree module_name_cst = NULL_TREE;
2283 if (!after_p)
2285 pretty_printer module_name_pp;
2286 pp_string (&module_name_pp, main_input_filename);
2288 if (shadow_ptr_types[0] == NULL_TREE)
2289 asan_init_shadow_ptr_types ();
2290 module_name_cst = asan_pp_string (&module_name_pp);
2291 module_name_cst = fold_convert (const_ptr_type_node,
2292 module_name_cst);
2295 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2298 /* Build
2299 struct __asan_global
2301 const void *__beg;
2302 uptr __size;
2303 uptr __size_with_redzone;
2304 const void *__name;
2305 const void *__module_name;
2306 uptr __has_dynamic_init;
2307 } type. */
2309 static tree
2310 asan_global_struct (void)
2312 static const char *field_names[6]
2313 = { "__beg", "__size", "__size_with_redzone",
2314 "__name", "__module_name", "__has_dynamic_init" };
2315 tree fields[6], ret;
2316 int i;
2318 ret = make_node (RECORD_TYPE);
2319 for (i = 0; i < 6; i++)
2321 fields[i]
2322 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2323 get_identifier (field_names[i]),
2324 (i == 0 || i == 3) ? const_ptr_type_node
2325 : pointer_sized_int_node);
2326 DECL_CONTEXT (fields[i]) = ret;
2327 if (i)
2328 DECL_CHAIN (fields[i - 1]) = fields[i];
2330 TYPE_FIELDS (ret) = fields[0];
2331 TYPE_NAME (ret) = get_identifier ("__asan_global");
2332 layout_type (ret);
2333 return ret;
2336 /* Append description of a single global DECL into vector V.
2337 TYPE is __asan_global struct type as returned by asan_global_struct. */
2339 static void
2340 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2342 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2343 unsigned HOST_WIDE_INT size;
2344 tree str_cst, module_name_cst, refdecl = decl;
2345 vec<constructor_elt, va_gc> *vinner = NULL;
2347 pretty_printer asan_pp, module_name_pp;
2349 if (DECL_NAME (decl))
2350 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2351 else
2352 pp_string (&asan_pp, "<unknown>");
2353 str_cst = asan_pp_string (&asan_pp);
2355 pp_string (&module_name_pp, main_input_filename);
2356 module_name_cst = asan_pp_string (&module_name_pp);
2358 if (asan_needs_local_alias (decl))
2360 char buf[20];
2361 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2362 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2363 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2364 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2365 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2366 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2367 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2368 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2369 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2370 TREE_STATIC (refdecl) = 1;
2371 TREE_PUBLIC (refdecl) = 0;
2372 TREE_USED (refdecl) = 1;
2373 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2376 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2377 fold_convert (const_ptr_type_node,
2378 build_fold_addr_expr (refdecl)));
2379 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2380 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2381 size += asan_red_zone_size (size);
2382 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2383 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2384 fold_convert (const_ptr_type_node, str_cst));
2385 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2386 fold_convert (const_ptr_type_node, module_name_cst));
2387 varpool_node *vnode = varpool_node::get (decl);
2388 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2389 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2390 build_int_cst (uptr, has_dynamic_init));
2391 init = build_constructor (type, vinner);
2392 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2395 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2396 void
2397 initialize_sanitizer_builtins (void)
2399 tree decl;
2401 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2402 return;
2404 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2405 tree BT_FN_VOID_PTR
2406 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2407 tree BT_FN_VOID_CONST_PTR
2408 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2409 tree BT_FN_VOID_PTR_PTR
2410 = build_function_type_list (void_type_node, ptr_type_node,
2411 ptr_type_node, NULL_TREE);
2412 tree BT_FN_VOID_PTR_PTR_PTR
2413 = build_function_type_list (void_type_node, ptr_type_node,
2414 ptr_type_node, ptr_type_node, NULL_TREE);
2415 tree BT_FN_VOID_PTR_PTRMODE
2416 = build_function_type_list (void_type_node, ptr_type_node,
2417 pointer_sized_int_node, NULL_TREE);
2418 tree BT_FN_VOID_INT
2419 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2420 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2421 tree BT_FN_IX_CONST_VPTR_INT[5];
2422 tree BT_FN_IX_VPTR_IX_INT[5];
2423 tree BT_FN_VOID_VPTR_IX_INT[5];
2424 tree vptr
2425 = build_pointer_type (build_qualified_type (void_type_node,
2426 TYPE_QUAL_VOLATILE));
2427 tree cvptr
2428 = build_pointer_type (build_qualified_type (void_type_node,
2429 TYPE_QUAL_VOLATILE
2430 |TYPE_QUAL_CONST));
2431 tree boolt
2432 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2433 int i;
2434 for (i = 0; i < 5; i++)
2436 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2437 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2438 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2439 integer_type_node, integer_type_node,
2440 NULL_TREE);
2441 BT_FN_IX_CONST_VPTR_INT[i]
2442 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2443 BT_FN_IX_VPTR_IX_INT[i]
2444 = build_function_type_list (ix, vptr, ix, integer_type_node,
2445 NULL_TREE);
2446 BT_FN_VOID_VPTR_IX_INT[i]
2447 = build_function_type_list (void_type_node, vptr, ix,
2448 integer_type_node, NULL_TREE);
2450 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2451 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2452 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2453 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2454 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2455 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2456 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2457 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2458 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2459 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2460 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2461 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2462 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2463 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2464 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2465 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2466 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2467 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2468 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2469 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2470 #undef ATTR_NOTHROW_LEAF_LIST
2471 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2472 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2473 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2474 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2475 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2476 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2477 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2478 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2479 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2480 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2481 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2482 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2483 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2484 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2485 #undef DEF_SANITIZER_BUILTIN
2486 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2487 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2488 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2489 set_call_expr_flags (decl, ATTRS); \
2490 set_builtin_decl (ENUM, decl, true);
2492 #include "sanitizer.def"
2494 #undef DEF_SANITIZER_BUILTIN
2497 /* Called via htab_traverse. Count number of emitted
2498 STRING_CSTs in the constant hash table. */
2500 static int
2501 count_string_csts (void **slot, void *data)
2503 struct constant_descriptor_tree *desc
2504 = (struct constant_descriptor_tree *) *slot;
2505 if (TREE_CODE (desc->value) == STRING_CST
2506 && TREE_ASM_WRITTEN (desc->value)
2507 && asan_protect_global (desc->value))
2508 ++*((unsigned HOST_WIDE_INT *) data);
2509 return 1;
2512 /* Helper structure to pass two parameters to
2513 add_string_csts. */
2515 struct asan_add_string_csts_data
2517 tree type;
2518 vec<constructor_elt, va_gc> *v;
2521 /* Called via htab_traverse. Call asan_add_global
2522 on emitted STRING_CSTs from the constant hash table. */
2524 static int
2525 add_string_csts (void **slot, void *data)
2527 struct constant_descriptor_tree *desc
2528 = (struct constant_descriptor_tree *) *slot;
2529 if (TREE_CODE (desc->value) == STRING_CST
2530 && TREE_ASM_WRITTEN (desc->value)
2531 && asan_protect_global (desc->value))
2533 struct asan_add_string_csts_data *aascd
2534 = (struct asan_add_string_csts_data *) data;
2535 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2536 aascd->type, aascd->v);
2538 return 1;
2541 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2542 invoke ggc_collect. */
2543 static GTY(()) tree asan_ctor_statements;
2545 /* Module-level instrumentation.
2546 - Insert __asan_init_vN() into the list of CTORs.
2547 - TODO: insert redzones around globals.
2550 void
2551 asan_finish_file (void)
2553 varpool_node *vnode;
2554 unsigned HOST_WIDE_INT gcount = 0;
2556 if (shadow_ptr_types[0] == NULL_TREE)
2557 asan_init_shadow_ptr_types ();
2558 /* Avoid instrumenting code in the asan ctors/dtors.
2559 We don't need to insert padding after the description strings,
2560 nor after .LASAN* array. */
2561 flag_sanitize &= ~SANITIZE_ADDRESS;
2563 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2564 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2565 FOR_EACH_DEFINED_VARIABLE (vnode)
2566 if (TREE_ASM_WRITTEN (vnode->decl)
2567 && asan_protect_global (vnode->decl))
2568 ++gcount;
2569 htab_t const_desc_htab = constant_pool_htab ();
2570 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2571 if (gcount)
2573 tree type = asan_global_struct (), var, ctor;
2574 tree dtor_statements = NULL_TREE;
2575 vec<constructor_elt, va_gc> *v;
2576 char buf[20];
2578 type = build_array_type_nelts (type, gcount);
2579 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2580 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2581 type);
2582 TREE_STATIC (var) = 1;
2583 TREE_PUBLIC (var) = 0;
2584 DECL_ARTIFICIAL (var) = 1;
2585 DECL_IGNORED_P (var) = 1;
2586 vec_alloc (v, gcount);
2587 FOR_EACH_DEFINED_VARIABLE (vnode)
2588 if (TREE_ASM_WRITTEN (vnode->decl)
2589 && asan_protect_global (vnode->decl))
2590 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2591 struct asan_add_string_csts_data aascd;
2592 aascd.type = TREE_TYPE (type);
2593 aascd.v = v;
2594 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2595 ctor = build_constructor (type, v);
2596 TREE_CONSTANT (ctor) = 1;
2597 TREE_STATIC (ctor) = 1;
2598 DECL_INITIAL (var) = ctor;
2599 varpool_node::finalize_decl (var);
2601 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2602 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2603 append_to_statement_list (build_call_expr (fn, 2,
2604 build_fold_addr_expr (var),
2605 gcount_tree),
2606 &asan_ctor_statements);
2608 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2609 append_to_statement_list (build_call_expr (fn, 2,
2610 build_fold_addr_expr (var),
2611 gcount_tree),
2612 &dtor_statements);
2613 cgraph_build_static_cdtor ('D', dtor_statements,
2614 MAX_RESERVED_INIT_PRIORITY - 1);
2616 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2617 MAX_RESERVED_INIT_PRIORITY - 1);
2618 flag_sanitize |= SANITIZE_ADDRESS;
2621 /* Instrument the current function. */
2623 static unsigned int
2624 asan_instrument (void)
2626 if (shadow_ptr_types[0] == NULL_TREE)
2627 asan_init_shadow_ptr_types ();
2628 asan_num_accesses = 0;
2629 transform_statements ();
2630 return 0;
2633 static bool
2634 gate_asan (void)
2636 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2637 && !lookup_attribute ("no_sanitize_address",
2638 DECL_ATTRIBUTES (current_function_decl));
2641 namespace {
2643 const pass_data pass_data_asan =
2645 GIMPLE_PASS, /* type */
2646 "asan", /* name */
2647 OPTGROUP_NONE, /* optinfo_flags */
2648 TV_NONE, /* tv_id */
2649 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2650 0, /* properties_provided */
2651 0, /* properties_destroyed */
2652 0, /* todo_flags_start */
2653 TODO_update_ssa, /* todo_flags_finish */
2656 class pass_asan : public gimple_opt_pass
2658 public:
2659 pass_asan (gcc::context *ctxt)
2660 : gimple_opt_pass (pass_data_asan, ctxt)
2663 /* opt_pass methods: */
2664 opt_pass * clone () { return new pass_asan (m_ctxt); }
2665 virtual bool gate (function *) { return gate_asan (); }
2666 virtual unsigned int execute (function *) { return asan_instrument (); }
2668 }; // class pass_asan
2670 } // anon namespace
2672 gimple_opt_pass *
2673 make_pass_asan (gcc::context *ctxt)
2675 return new pass_asan (ctxt);
2678 namespace {
2680 const pass_data pass_data_asan_O0 =
2682 GIMPLE_PASS, /* type */
2683 "asan0", /* name */
2684 OPTGROUP_NONE, /* optinfo_flags */
2685 TV_NONE, /* tv_id */
2686 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2687 0, /* properties_provided */
2688 0, /* properties_destroyed */
2689 0, /* todo_flags_start */
2690 TODO_update_ssa, /* todo_flags_finish */
2693 class pass_asan_O0 : public gimple_opt_pass
2695 public:
2696 pass_asan_O0 (gcc::context *ctxt)
2697 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2700 /* opt_pass methods: */
2701 virtual bool gate (function *) { return !optimize && gate_asan (); }
2702 virtual unsigned int execute (function *) { return asan_instrument (); }
2704 }; // class pass_asan_O0
2706 } // anon namespace
2708 gimple_opt_pass *
2709 make_pass_asan_O0 (gcc::context *ctxt)
2711 return new pass_asan_O0 (ctxt);
2714 /* Perform optimization of sanitize functions. */
2716 namespace {
2718 const pass_data pass_data_sanopt =
2720 GIMPLE_PASS, /* type */
2721 "sanopt", /* name */
2722 OPTGROUP_NONE, /* optinfo_flags */
2723 TV_NONE, /* tv_id */
2724 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2725 0, /* properties_provided */
2726 0, /* properties_destroyed */
2727 0, /* todo_flags_start */
2728 TODO_update_ssa, /* todo_flags_finish */
2731 class pass_sanopt : public gimple_opt_pass
2733 public:
2734 pass_sanopt (gcc::context *ctxt)
2735 : gimple_opt_pass (pass_data_sanopt, ctxt)
2738 /* opt_pass methods: */
2739 virtual bool gate (function *) { return flag_sanitize; }
2740 virtual unsigned int execute (function *);
2742 }; // class pass_sanopt
2744 unsigned int
2745 pass_sanopt::execute (function *fun)
2747 basic_block bb;
2749 FOR_EACH_BB_FN (bb, fun)
2751 gimple_stmt_iterator gsi;
2752 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2754 gimple stmt = gsi_stmt (gsi);
2756 if (!is_gimple_call (stmt))
2757 continue;
2759 if (gimple_call_internal_p (stmt))
2760 switch (gimple_call_internal_fn (stmt))
2762 case IFN_UBSAN_NULL:
2763 ubsan_expand_null_ifn (gsi);
2764 break;
2765 case IFN_UBSAN_BOUNDS:
2766 ubsan_expand_bounds_ifn (&gsi);
2767 break;
2768 default:
2769 break;
2772 if (dump_file && (dump_flags & TDF_DETAILS))
2774 fprintf (dump_file, "Optimized\n ");
2775 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2776 fprintf (dump_file, "\n");
2779 /* ubsan_expand_bounds_ifn might move us to the end of the BB. */
2780 if (gsi_end_p (gsi))
2781 break;
2784 return 0;
2787 } // anon namespace
2789 gimple_opt_pass *
2790 make_pass_sanopt (gcc::context *ctxt)
2792 return new pass_sanopt (ctxt);
2795 #include "gt-asan.h"