2015-06-23 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / asan.c
blobe89817e6fe295fd5571ba3e5d05021a3dccad81e
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "options.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "predict.h"
31 #include "tm.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "dominance.h"
35 #include "cfg.h"
36 #include "cfganal.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "gimple.h"
42 #include "gimplify.h"
43 #include "gimple-iterator.h"
44 #include "calls.h"
45 #include "varasm.h"
46 #include "stor-layout.h"
47 #include "tree-iterator.h"
48 #include "plugin-api.h"
49 #include "ipa-ref.h"
50 #include "cgraph.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-pass.h"
54 #include "asan.h"
55 #include "gimple-pretty-print.h"
56 #include "target.h"
57 #include "rtl.h"
58 #include "flags.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "output.h"
69 #include "tm_p.h"
70 #include "langhooks.h"
71 #include "alloc-pool.h"
72 #include "cfgloop.h"
73 #include "gimple-builder.h"
74 #include "ubsan.h"
75 #include "params.h"
76 #include "builtins.h"
77 #include "fnmatch.h"
79 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
80 with <2x slowdown on average.
82 The tool consists of two parts:
83 instrumentation module (this file) and a run-time library.
84 The instrumentation module adds a run-time check before every memory insn.
85 For a 8- or 16- byte load accessing address X:
86 ShadowAddr = (X >> 3) + Offset
87 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
88 if (ShadowValue)
89 __asan_report_load8(X);
90 For a load of N bytes (N=1, 2 or 4) from address X:
91 ShadowAddr = (X >> 3) + Offset
92 ShadowValue = *(char*)ShadowAddr;
93 if (ShadowValue)
94 if ((X & 7) + N - 1 > ShadowValue)
95 __asan_report_loadN(X);
96 Stores are instrumented similarly, but using __asan_report_storeN functions.
97 A call too __asan_init_vN() is inserted to the list of module CTORs.
98 N is the version number of the AddressSanitizer API. The changes between the
99 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
101 The run-time library redefines malloc (so that redzone are inserted around
102 the allocated memory) and free (so that reuse of free-ed memory is delayed),
103 provides __asan_report* and __asan_init_vN functions.
105 Read more:
106 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
108 The current implementation supports detection of out-of-bounds and
109 use-after-free in the heap, on the stack and for global variables.
111 [Protection of stack variables]
113 To understand how detection of out-of-bounds and use-after-free works
114 for stack variables, lets look at this example on x86_64 where the
115 stack grows downward:
118 foo ()
120 char a[23] = {0};
121 int b[2] = {0};
123 a[5] = 1;
124 b[1] = 2;
126 return a[5] + b[1];
129 For this function, the stack protected by asan will be organized as
130 follows, from the top of the stack to the bottom:
132 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
134 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
135 the next slot be 32 bytes aligned; this one is called Partial
136 Redzone; this 32 bytes alignment is an asan constraint]
138 Slot 3/ [24 bytes for variable 'a']
140 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
142 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
144 Slot 6/ [8 bytes for variable 'b']
146 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
147 'LEFT RedZone']
149 The 32 bytes of LEFT red zone at the bottom of the stack can be
150 decomposed as such:
152 1/ The first 8 bytes contain a magical asan number that is always
153 0x41B58AB3.
155 2/ The following 8 bytes contains a pointer to a string (to be
156 parsed at runtime by the runtime asan library), which format is
157 the following:
159 "<function-name> <space> <num-of-variables-on-the-stack>
160 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
161 <length-of-var-in-bytes> ){n} "
163 where '(...){n}' means the content inside the parenthesis occurs 'n'
164 times, with 'n' being the number of variables on the stack.
166 3/ The following 8 bytes contain the PC of the current function which
167 will be used by the run-time library to print an error message.
169 4/ The following 8 bytes are reserved for internal use by the run-time.
171 The shadow memory for that stack layout is going to look like this:
173 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
174 The F1 byte pattern is a magic number called
175 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
176 the memory for that shadow byte is part of a the LEFT red zone
177 intended to seat at the bottom of the variables on the stack.
179 - content of shadow memory 8 bytes for slots 6 and 5:
180 0xF4F4F400. The F4 byte pattern is a magic number
181 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
182 memory region for this shadow byte is a PARTIAL red zone
183 intended to pad a variable A, so that the slot following
184 {A,padding} is 32 bytes aligned.
186 Note that the fact that the least significant byte of this
187 shadow memory content is 00 means that 8 bytes of its
188 corresponding memory (which corresponds to the memory of
189 variable 'b') is addressable.
191 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
192 The F2 byte pattern is a magic number called
193 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
194 region for this shadow byte is a MIDDLE red zone intended to
195 seat between two 32 aligned slots of {variable,padding}.
197 - content of shadow memory 8 bytes for slot 3 and 2:
198 0xF4000000. This represents is the concatenation of
199 variable 'a' and the partial red zone following it, like what we
200 had for variable 'b'. The least significant 3 bytes being 00
201 means that the 3 bytes of variable 'a' are addressable.
203 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
204 The F3 byte pattern is a magic number called
205 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
206 region for this shadow byte is a RIGHT red zone intended to seat
207 at the top of the variables of the stack.
209 Note that the real variable layout is done in expand_used_vars in
210 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
211 stack variables as well as the different red zones, emits some
212 prologue code to populate the shadow memory as to poison (mark as
213 non-accessible) the regions of the red zones and mark the regions of
214 stack variables as accessible, and emit some epilogue code to
215 un-poison (mark as accessible) the regions of red zones right before
216 the function exits.
218 [Protection of global variables]
220 The basic idea is to insert a red zone between two global variables
221 and install a constructor function that calls the asan runtime to do
222 the populating of the relevant shadow memory regions at load time.
224 So the global variables are laid out as to insert a red zone between
225 them. The size of the red zones is so that each variable starts on a
226 32 bytes boundary.
228 Then a constructor function is installed so that, for each global
229 variable, it calls the runtime asan library function
230 __asan_register_globals_with an instance of this type:
232 struct __asan_global
234 // Address of the beginning of the global variable.
235 const void *__beg;
237 // Initial size of the global variable.
238 uptr __size;
240 // Size of the global variable + size of the red zone. This
241 // size is 32 bytes aligned.
242 uptr __size_with_redzone;
244 // Name of the global variable.
245 const void *__name;
247 // Name of the module where the global variable is declared.
248 const void *__module_name;
250 // 1 if it has dynamic initialization, 0 otherwise.
251 uptr __has_dynamic_init;
253 // A pointer to struct that contains source location, could be NULL.
254 __asan_global_source_location *__location;
257 A destructor function that calls the runtime asan library function
258 _asan_unregister_globals is also installed. */
260 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
261 static bool asan_shadow_offset_computed;
262 static vec<char *> sanitized_sections;
264 /* Sets shadow offset to value in string VAL. */
266 bool
267 set_asan_shadow_offset (const char *val)
269 char *endp;
271 errno = 0;
272 #ifdef HAVE_LONG_LONG
273 asan_shadow_offset_value = strtoull (val, &endp, 0);
274 #else
275 asan_shadow_offset_value = strtoul (val, &endp, 0);
276 #endif
277 if (!(*val != '\0' && *endp == '\0' && errno == 0))
278 return false;
280 asan_shadow_offset_computed = true;
282 return true;
285 /* Set list of user-defined sections that need to be sanitized. */
287 void
288 set_sanitized_sections (const char *sections)
290 char *pat;
291 unsigned i;
292 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
293 free (pat);
294 sanitized_sections.truncate (0);
296 for (const char *s = sections; *s; )
298 const char *end;
299 for (end = s; *end && *end != ','; ++end);
300 size_t len = end - s;
301 sanitized_sections.safe_push (xstrndup (s, len));
302 s = *end ? end + 1 : end;
306 /* Checks whether section SEC should be sanitized. */
308 static bool
309 section_sanitized_p (const char *sec)
311 char *pat;
312 unsigned i;
313 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
314 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
315 return true;
316 return false;
319 /* Returns Asan shadow offset. */
321 static unsigned HOST_WIDE_INT
322 asan_shadow_offset ()
324 if (!asan_shadow_offset_computed)
326 asan_shadow_offset_computed = true;
327 asan_shadow_offset_value = targetm.asan_shadow_offset ();
329 return asan_shadow_offset_value;
332 alias_set_type asan_shadow_set = -1;
334 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
335 alias set is used for all shadow memory accesses. */
336 static GTY(()) tree shadow_ptr_types[2];
338 /* Decl for __asan_option_detect_stack_use_after_return. */
339 static GTY(()) tree asan_detect_stack_use_after_return;
341 /* Various flags for Asan builtins. */
342 enum asan_check_flags
344 ASAN_CHECK_STORE = 1 << 0,
345 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
346 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
347 ASAN_CHECK_LAST = 1 << 3
350 /* Hashtable support for memory references used by gimple
351 statements. */
353 /* This type represents a reference to a memory region. */
354 struct asan_mem_ref
356 /* The expression of the beginning of the memory region. */
357 tree start;
359 /* The size of the access. */
360 HOST_WIDE_INT access_size;
362 /* Pool allocation new operator. */
363 inline void *operator new (size_t)
365 return pool.allocate ();
368 /* Delete operator utilizing pool allocation. */
369 inline void operator delete (void *ptr)
371 pool.remove ((asan_mem_ref *) ptr);
374 /* Memory allocation pool. */
375 static pool_allocator<asan_mem_ref> pool;
378 pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
380 /* Initializes an instance of asan_mem_ref. */
382 static void
383 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
385 ref->start = start;
386 ref->access_size = access_size;
389 /* Allocates memory for an instance of asan_mem_ref into the memory
390 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
391 START is the address of (or the expression pointing to) the
392 beginning of memory reference. ACCESS_SIZE is the size of the
393 access to the referenced memory. */
395 static asan_mem_ref*
396 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
398 asan_mem_ref *ref = new asan_mem_ref;
400 asan_mem_ref_init (ref, start, access_size);
401 return ref;
404 /* This builds and returns a pointer to the end of the memory region
405 that starts at START and of length LEN. */
407 tree
408 asan_mem_ref_get_end (tree start, tree len)
410 if (len == NULL_TREE || integer_zerop (len))
411 return start;
413 if (!ptrofftype_p (len))
414 len = convert_to_ptrofftype (len);
416 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
419 /* Return a tree expression that represents the end of the referenced
420 memory region. Beware that this function can actually build a new
421 tree expression. */
423 tree
424 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
426 return asan_mem_ref_get_end (ref->start, len);
429 struct asan_mem_ref_hasher
430 : typed_noop_remove <asan_mem_ref>
432 typedef asan_mem_ref *value_type;
433 typedef asan_mem_ref *compare_type;
435 static inline hashval_t hash (const asan_mem_ref *);
436 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
439 /* Hash a memory reference. */
441 inline hashval_t
442 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
444 return iterative_hash_expr (mem_ref->start, 0);
447 /* Compare two memory references. We accept the length of either
448 memory references to be NULL_TREE. */
450 inline bool
451 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
452 const asan_mem_ref *m2)
454 return operand_equal_p (m1->start, m2->start, 0);
457 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
459 /* Returns a reference to the hash table containing memory references.
460 This function ensures that the hash table is created. Note that
461 this hash table is updated by the function
462 update_mem_ref_hash_table. */
464 static hash_table<asan_mem_ref_hasher> *
465 get_mem_ref_hash_table ()
467 if (!asan_mem_ref_ht)
468 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
470 return asan_mem_ref_ht;
473 /* Clear all entries from the memory references hash table. */
475 static void
476 empty_mem_ref_hash_table ()
478 if (asan_mem_ref_ht)
479 asan_mem_ref_ht->empty ();
482 /* Free the memory references hash table. */
484 static void
485 free_mem_ref_resources ()
487 delete asan_mem_ref_ht;
488 asan_mem_ref_ht = NULL;
490 asan_mem_ref::pool.release ();
493 /* Return true iff the memory reference REF has been instrumented. */
495 static bool
496 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
498 asan_mem_ref r;
499 asan_mem_ref_init (&r, ref, access_size);
501 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
502 return saved_ref && saved_ref->access_size >= access_size;
505 /* Return true iff the memory reference REF has been instrumented. */
507 static bool
508 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
510 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
513 /* Return true iff access to memory region starting at REF and of
514 length LEN has been instrumented. */
516 static bool
517 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
519 HOST_WIDE_INT size_in_bytes
520 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
522 return size_in_bytes != -1
523 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
526 /* Set REF to the memory reference present in a gimple assignment
527 ASSIGNMENT. Return true upon successful completion, false
528 otherwise. */
530 static bool
531 get_mem_ref_of_assignment (const gassign *assignment,
532 asan_mem_ref *ref,
533 bool *ref_is_store)
535 gcc_assert (gimple_assign_single_p (assignment));
537 if (gimple_store_p (assignment)
538 && !gimple_clobber_p (assignment))
540 ref->start = gimple_assign_lhs (assignment);
541 *ref_is_store = true;
543 else if (gimple_assign_load_p (assignment))
545 ref->start = gimple_assign_rhs1 (assignment);
546 *ref_is_store = false;
548 else
549 return false;
551 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
552 return true;
555 /* Return the memory references contained in a gimple statement
556 representing a builtin call that has to do with memory access. */
558 static bool
559 get_mem_refs_of_builtin_call (const gcall *call,
560 asan_mem_ref *src0,
561 tree *src0_len,
562 bool *src0_is_store,
563 asan_mem_ref *src1,
564 tree *src1_len,
565 bool *src1_is_store,
566 asan_mem_ref *dst,
567 tree *dst_len,
568 bool *dst_is_store,
569 bool *dest_is_deref,
570 bool *intercepted_p)
572 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
574 tree callee = gimple_call_fndecl (call);
575 tree source0 = NULL_TREE, source1 = NULL_TREE,
576 dest = NULL_TREE, len = NULL_TREE;
577 bool is_store = true, got_reference_p = false;
578 HOST_WIDE_INT access_size = 1;
580 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
582 switch (DECL_FUNCTION_CODE (callee))
584 /* (s, s, n) style memops. */
585 case BUILT_IN_BCMP:
586 case BUILT_IN_MEMCMP:
587 source0 = gimple_call_arg (call, 0);
588 source1 = gimple_call_arg (call, 1);
589 len = gimple_call_arg (call, 2);
590 break;
592 /* (src, dest, n) style memops. */
593 case BUILT_IN_BCOPY:
594 source0 = gimple_call_arg (call, 0);
595 dest = gimple_call_arg (call, 1);
596 len = gimple_call_arg (call, 2);
597 break;
599 /* (dest, src, n) style memops. */
600 case BUILT_IN_MEMCPY:
601 case BUILT_IN_MEMCPY_CHK:
602 case BUILT_IN_MEMMOVE:
603 case BUILT_IN_MEMMOVE_CHK:
604 case BUILT_IN_MEMPCPY:
605 case BUILT_IN_MEMPCPY_CHK:
606 dest = gimple_call_arg (call, 0);
607 source0 = gimple_call_arg (call, 1);
608 len = gimple_call_arg (call, 2);
609 break;
611 /* (dest, n) style memops. */
612 case BUILT_IN_BZERO:
613 dest = gimple_call_arg (call, 0);
614 len = gimple_call_arg (call, 1);
615 break;
617 /* (dest, x, n) style memops*/
618 case BUILT_IN_MEMSET:
619 case BUILT_IN_MEMSET_CHK:
620 dest = gimple_call_arg (call, 0);
621 len = gimple_call_arg (call, 2);
622 break;
624 case BUILT_IN_STRLEN:
625 source0 = gimple_call_arg (call, 0);
626 len = gimple_call_lhs (call);
627 break ;
629 /* And now the __atomic* and __sync builtins.
630 These are handled differently from the classical memory memory
631 access builtins above. */
633 case BUILT_IN_ATOMIC_LOAD_1:
634 case BUILT_IN_ATOMIC_LOAD_2:
635 case BUILT_IN_ATOMIC_LOAD_4:
636 case BUILT_IN_ATOMIC_LOAD_8:
637 case BUILT_IN_ATOMIC_LOAD_16:
638 is_store = false;
639 /* fall through. */
641 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
642 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
643 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
644 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
645 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
647 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
648 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
649 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
650 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
651 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
653 case BUILT_IN_SYNC_FETCH_AND_OR_1:
654 case BUILT_IN_SYNC_FETCH_AND_OR_2:
655 case BUILT_IN_SYNC_FETCH_AND_OR_4:
656 case BUILT_IN_SYNC_FETCH_AND_OR_8:
657 case BUILT_IN_SYNC_FETCH_AND_OR_16:
659 case BUILT_IN_SYNC_FETCH_AND_AND_1:
660 case BUILT_IN_SYNC_FETCH_AND_AND_2:
661 case BUILT_IN_SYNC_FETCH_AND_AND_4:
662 case BUILT_IN_SYNC_FETCH_AND_AND_8:
663 case BUILT_IN_SYNC_FETCH_AND_AND_16:
665 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
666 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
667 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
668 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
669 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
671 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
672 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
673 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
674 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
676 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
677 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
678 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
679 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
680 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
682 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
683 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
684 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
685 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
686 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
688 case BUILT_IN_SYNC_OR_AND_FETCH_1:
689 case BUILT_IN_SYNC_OR_AND_FETCH_2:
690 case BUILT_IN_SYNC_OR_AND_FETCH_4:
691 case BUILT_IN_SYNC_OR_AND_FETCH_8:
692 case BUILT_IN_SYNC_OR_AND_FETCH_16:
694 case BUILT_IN_SYNC_AND_AND_FETCH_1:
695 case BUILT_IN_SYNC_AND_AND_FETCH_2:
696 case BUILT_IN_SYNC_AND_AND_FETCH_4:
697 case BUILT_IN_SYNC_AND_AND_FETCH_8:
698 case BUILT_IN_SYNC_AND_AND_FETCH_16:
700 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
701 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
702 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
703 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
704 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
706 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
707 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
708 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
709 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
711 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
712 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
713 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
714 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
715 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
717 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
718 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
719 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
720 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
721 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
723 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
724 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
725 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
726 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
727 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
729 case BUILT_IN_SYNC_LOCK_RELEASE_1:
730 case BUILT_IN_SYNC_LOCK_RELEASE_2:
731 case BUILT_IN_SYNC_LOCK_RELEASE_4:
732 case BUILT_IN_SYNC_LOCK_RELEASE_8:
733 case BUILT_IN_SYNC_LOCK_RELEASE_16:
735 case BUILT_IN_ATOMIC_EXCHANGE_1:
736 case BUILT_IN_ATOMIC_EXCHANGE_2:
737 case BUILT_IN_ATOMIC_EXCHANGE_4:
738 case BUILT_IN_ATOMIC_EXCHANGE_8:
739 case BUILT_IN_ATOMIC_EXCHANGE_16:
741 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
742 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
743 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
744 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
747 case BUILT_IN_ATOMIC_STORE_1:
748 case BUILT_IN_ATOMIC_STORE_2:
749 case BUILT_IN_ATOMIC_STORE_4:
750 case BUILT_IN_ATOMIC_STORE_8:
751 case BUILT_IN_ATOMIC_STORE_16:
753 case BUILT_IN_ATOMIC_ADD_FETCH_1:
754 case BUILT_IN_ATOMIC_ADD_FETCH_2:
755 case BUILT_IN_ATOMIC_ADD_FETCH_4:
756 case BUILT_IN_ATOMIC_ADD_FETCH_8:
757 case BUILT_IN_ATOMIC_ADD_FETCH_16:
759 case BUILT_IN_ATOMIC_SUB_FETCH_1:
760 case BUILT_IN_ATOMIC_SUB_FETCH_2:
761 case BUILT_IN_ATOMIC_SUB_FETCH_4:
762 case BUILT_IN_ATOMIC_SUB_FETCH_8:
763 case BUILT_IN_ATOMIC_SUB_FETCH_16:
765 case BUILT_IN_ATOMIC_AND_FETCH_1:
766 case BUILT_IN_ATOMIC_AND_FETCH_2:
767 case BUILT_IN_ATOMIC_AND_FETCH_4:
768 case BUILT_IN_ATOMIC_AND_FETCH_8:
769 case BUILT_IN_ATOMIC_AND_FETCH_16:
771 case BUILT_IN_ATOMIC_NAND_FETCH_1:
772 case BUILT_IN_ATOMIC_NAND_FETCH_2:
773 case BUILT_IN_ATOMIC_NAND_FETCH_4:
774 case BUILT_IN_ATOMIC_NAND_FETCH_8:
775 case BUILT_IN_ATOMIC_NAND_FETCH_16:
777 case BUILT_IN_ATOMIC_XOR_FETCH_1:
778 case BUILT_IN_ATOMIC_XOR_FETCH_2:
779 case BUILT_IN_ATOMIC_XOR_FETCH_4:
780 case BUILT_IN_ATOMIC_XOR_FETCH_8:
781 case BUILT_IN_ATOMIC_XOR_FETCH_16:
783 case BUILT_IN_ATOMIC_OR_FETCH_1:
784 case BUILT_IN_ATOMIC_OR_FETCH_2:
785 case BUILT_IN_ATOMIC_OR_FETCH_4:
786 case BUILT_IN_ATOMIC_OR_FETCH_8:
787 case BUILT_IN_ATOMIC_OR_FETCH_16:
789 case BUILT_IN_ATOMIC_FETCH_ADD_1:
790 case BUILT_IN_ATOMIC_FETCH_ADD_2:
791 case BUILT_IN_ATOMIC_FETCH_ADD_4:
792 case BUILT_IN_ATOMIC_FETCH_ADD_8:
793 case BUILT_IN_ATOMIC_FETCH_ADD_16:
795 case BUILT_IN_ATOMIC_FETCH_SUB_1:
796 case BUILT_IN_ATOMIC_FETCH_SUB_2:
797 case BUILT_IN_ATOMIC_FETCH_SUB_4:
798 case BUILT_IN_ATOMIC_FETCH_SUB_8:
799 case BUILT_IN_ATOMIC_FETCH_SUB_16:
801 case BUILT_IN_ATOMIC_FETCH_AND_1:
802 case BUILT_IN_ATOMIC_FETCH_AND_2:
803 case BUILT_IN_ATOMIC_FETCH_AND_4:
804 case BUILT_IN_ATOMIC_FETCH_AND_8:
805 case BUILT_IN_ATOMIC_FETCH_AND_16:
807 case BUILT_IN_ATOMIC_FETCH_NAND_1:
808 case BUILT_IN_ATOMIC_FETCH_NAND_2:
809 case BUILT_IN_ATOMIC_FETCH_NAND_4:
810 case BUILT_IN_ATOMIC_FETCH_NAND_8:
811 case BUILT_IN_ATOMIC_FETCH_NAND_16:
813 case BUILT_IN_ATOMIC_FETCH_XOR_1:
814 case BUILT_IN_ATOMIC_FETCH_XOR_2:
815 case BUILT_IN_ATOMIC_FETCH_XOR_4:
816 case BUILT_IN_ATOMIC_FETCH_XOR_8:
817 case BUILT_IN_ATOMIC_FETCH_XOR_16:
819 case BUILT_IN_ATOMIC_FETCH_OR_1:
820 case BUILT_IN_ATOMIC_FETCH_OR_2:
821 case BUILT_IN_ATOMIC_FETCH_OR_4:
822 case BUILT_IN_ATOMIC_FETCH_OR_8:
823 case BUILT_IN_ATOMIC_FETCH_OR_16:
825 dest = gimple_call_arg (call, 0);
826 /* DEST represents the address of a memory location.
827 instrument_derefs wants the memory location, so lets
828 dereference the address DEST before handing it to
829 instrument_derefs. */
830 if (TREE_CODE (dest) == ADDR_EXPR)
831 dest = TREE_OPERAND (dest, 0);
832 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
833 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
834 dest, build_int_cst (TREE_TYPE (dest), 0));
835 else
836 gcc_unreachable ();
838 access_size = int_size_in_bytes (TREE_TYPE (dest));
841 default:
842 /* The other builtins memory access are not instrumented in this
843 function because they either don't have any length parameter,
844 or their length parameter is just a limit. */
845 break;
848 if (len != NULL_TREE)
850 if (source0 != NULL_TREE)
852 src0->start = source0;
853 src0->access_size = access_size;
854 *src0_len = len;
855 *src0_is_store = false;
858 if (source1 != NULL_TREE)
860 src1->start = source1;
861 src1->access_size = access_size;
862 *src1_len = len;
863 *src1_is_store = false;
866 if (dest != NULL_TREE)
868 dst->start = dest;
869 dst->access_size = access_size;
870 *dst_len = len;
871 *dst_is_store = true;
874 got_reference_p = true;
876 else if (dest)
878 dst->start = dest;
879 dst->access_size = access_size;
880 *dst_len = NULL_TREE;
881 *dst_is_store = is_store;
882 *dest_is_deref = true;
883 got_reference_p = true;
886 return got_reference_p;
889 /* Return true iff a given gimple statement has been instrumented.
890 Note that the statement is "defined" by the memory references it
891 contains. */
893 static bool
894 has_stmt_been_instrumented_p (gimple stmt)
896 if (gimple_assign_single_p (stmt))
898 bool r_is_store;
899 asan_mem_ref r;
900 asan_mem_ref_init (&r, NULL, 1);
902 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
903 &r_is_store))
904 return has_mem_ref_been_instrumented (&r);
906 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
908 asan_mem_ref src0, src1, dest;
909 asan_mem_ref_init (&src0, NULL, 1);
910 asan_mem_ref_init (&src1, NULL, 1);
911 asan_mem_ref_init (&dest, NULL, 1);
913 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
914 bool src0_is_store = false, src1_is_store = false,
915 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
916 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
917 &src0, &src0_len, &src0_is_store,
918 &src1, &src1_len, &src1_is_store,
919 &dest, &dest_len, &dest_is_store,
920 &dest_is_deref, &intercepted_p))
922 if (src0.start != NULL_TREE
923 && !has_mem_ref_been_instrumented (&src0, src0_len))
924 return false;
926 if (src1.start != NULL_TREE
927 && !has_mem_ref_been_instrumented (&src1, src1_len))
928 return false;
930 if (dest.start != NULL_TREE
931 && !has_mem_ref_been_instrumented (&dest, dest_len))
932 return false;
934 return true;
937 return false;
940 /* Insert a memory reference into the hash table. */
942 static void
943 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
945 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
947 asan_mem_ref r;
948 asan_mem_ref_init (&r, ref, access_size);
950 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
951 if (*slot == NULL || (*slot)->access_size < access_size)
952 *slot = asan_mem_ref_new (ref, access_size);
955 /* Initialize shadow_ptr_types array. */
957 static void
958 asan_init_shadow_ptr_types (void)
960 asan_shadow_set = new_alias_set ();
961 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
962 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
963 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
964 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
965 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
966 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
967 initialize_sanitizer_builtins ();
970 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
972 static tree
973 asan_pp_string (pretty_printer *pp)
975 const char *buf = pp_formatted_text (pp);
976 size_t len = strlen (buf);
977 tree ret = build_string (len + 1, buf);
978 TREE_TYPE (ret)
979 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
980 build_index_type (size_int (len)));
981 TREE_READONLY (ret) = 1;
982 TREE_STATIC (ret) = 1;
983 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
986 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
988 static rtx
989 asan_shadow_cst (unsigned char shadow_bytes[4])
991 int i;
992 unsigned HOST_WIDE_INT val = 0;
993 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
994 for (i = 0; i < 4; i++)
995 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
996 << (BITS_PER_UNIT * i);
997 return gen_int_mode (val, SImode);
1000 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1001 though. */
1003 static void
1004 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1006 rtx_insn *insn, *insns, *jump;
1007 rtx_code_label *top_label;
1008 rtx end, addr, tmp;
1010 start_sequence ();
1011 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1012 insns = get_insns ();
1013 end_sequence ();
1014 for (insn = insns; insn; insn = NEXT_INSN (insn))
1015 if (CALL_P (insn))
1016 break;
1017 if (insn == NULL_RTX)
1019 emit_insn (insns);
1020 return;
1023 gcc_assert ((len & 3) == 0);
1024 top_label = gen_label_rtx ();
1025 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1026 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1027 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1028 emit_label (top_label);
1030 emit_move_insn (shadow_mem, const0_rtx);
1031 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1032 true, OPTAB_LIB_WIDEN);
1033 if (tmp != addr)
1034 emit_move_insn (addr, tmp);
1035 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1036 jump = get_last_insn ();
1037 gcc_assert (JUMP_P (jump));
1038 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1041 void
1042 asan_function_start (void)
1044 section *fnsec = function_section (current_function_decl);
1045 switch_to_section (fnsec);
1046 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1047 current_function_funcdef_no);
1050 /* Insert code to protect stack vars. The prologue sequence should be emitted
1051 directly, epilogue sequence returned. BASE is the register holding the
1052 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1053 array contains pairs of offsets in reverse order, always the end offset
1054 of some gap that needs protection followed by starting offset,
1055 and DECLS is an array of representative decls for each var partition.
1056 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1057 elements long (OFFSETS include gap before the first variable as well
1058 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1059 register which stack vars DECL_RTLs are based on. Either BASE should be
1060 assigned to PBASE, when not doing use after return protection, or
1061 corresponding address based on __asan_stack_malloc* return value. */
1063 rtx_insn *
1064 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1065 HOST_WIDE_INT *offsets, tree *decls, int length)
1067 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1068 rtx_code_label *lab;
1069 rtx_insn *insns;
1070 char buf[30];
1071 unsigned char shadow_bytes[4];
1072 HOST_WIDE_INT base_offset = offsets[length - 1];
1073 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1074 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1075 HOST_WIDE_INT last_offset, last_size;
1076 int l;
1077 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1078 tree str_cst, decl, id;
1079 int use_after_return_class = -1;
1081 if (shadow_ptr_types[0] == NULL_TREE)
1082 asan_init_shadow_ptr_types ();
1084 /* First of all, prepare the description string. */
1085 pretty_printer asan_pp;
1087 pp_decimal_int (&asan_pp, length / 2 - 1);
1088 pp_space (&asan_pp);
1089 for (l = length - 2; l; l -= 2)
1091 tree decl = decls[l / 2 - 1];
1092 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1093 pp_space (&asan_pp);
1094 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1095 pp_space (&asan_pp);
1096 if (DECL_P (decl) && DECL_NAME (decl))
1098 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1099 pp_space (&asan_pp);
1100 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1102 else
1103 pp_string (&asan_pp, "9 <unknown>");
1104 pp_space (&asan_pp);
1106 str_cst = asan_pp_string (&asan_pp);
1108 /* Emit the prologue sequence. */
1109 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1110 && ASAN_USE_AFTER_RETURN)
1112 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1113 /* __asan_stack_malloc_N guarantees alignment
1114 N < 6 ? (64 << N) : 4096 bytes. */
1115 if (alignb > (use_after_return_class < 6
1116 ? (64U << use_after_return_class) : 4096U))
1117 use_after_return_class = -1;
1118 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1119 base_align_bias = ((asan_frame_size + alignb - 1)
1120 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1122 /* Align base if target is STRICT_ALIGNMENT. */
1123 if (STRICT_ALIGNMENT)
1124 base = expand_binop (Pmode, and_optab, base,
1125 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1126 << ASAN_SHADOW_SHIFT)
1127 / BITS_PER_UNIT), Pmode), NULL_RTX,
1128 1, OPTAB_DIRECT);
1130 if (use_after_return_class == -1 && pbase)
1131 emit_move_insn (pbase, base);
1133 base = expand_binop (Pmode, add_optab, base,
1134 gen_int_mode (base_offset - base_align_bias, Pmode),
1135 NULL_RTX, 1, OPTAB_DIRECT);
1136 orig_base = NULL_RTX;
1137 if (use_after_return_class != -1)
1139 if (asan_detect_stack_use_after_return == NULL_TREE)
1141 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1142 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1143 integer_type_node);
1144 SET_DECL_ASSEMBLER_NAME (decl, id);
1145 TREE_ADDRESSABLE (decl) = 1;
1146 DECL_ARTIFICIAL (decl) = 1;
1147 DECL_IGNORED_P (decl) = 1;
1148 DECL_EXTERNAL (decl) = 1;
1149 TREE_STATIC (decl) = 1;
1150 TREE_PUBLIC (decl) = 1;
1151 TREE_USED (decl) = 1;
1152 asan_detect_stack_use_after_return = decl;
1154 orig_base = gen_reg_rtx (Pmode);
1155 emit_move_insn (orig_base, base);
1156 ret = expand_normal (asan_detect_stack_use_after_return);
1157 lab = gen_label_rtx ();
1158 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1159 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1160 VOIDmode, 0, lab, very_likely);
1161 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1162 use_after_return_class);
1163 ret = init_one_libfunc (buf);
1164 rtx addr = convert_memory_address (ptr_mode, base);
1165 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1166 GEN_INT (asan_frame_size
1167 + base_align_bias),
1168 TYPE_MODE (pointer_sized_int_node),
1169 addr, ptr_mode);
1170 ret = convert_memory_address (Pmode, ret);
1171 emit_move_insn (base, ret);
1172 emit_label (lab);
1173 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1174 gen_int_mode (base_align_bias
1175 - base_offset, Pmode),
1176 NULL_RTX, 1, OPTAB_DIRECT));
1178 mem = gen_rtx_MEM (ptr_mode, base);
1179 mem = adjust_address (mem, VOIDmode, base_align_bias);
1180 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1181 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1182 emit_move_insn (mem, expand_normal (str_cst));
1183 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1184 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1185 id = get_identifier (buf);
1186 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1187 VAR_DECL, id, char_type_node);
1188 SET_DECL_ASSEMBLER_NAME (decl, id);
1189 TREE_ADDRESSABLE (decl) = 1;
1190 TREE_READONLY (decl) = 1;
1191 DECL_ARTIFICIAL (decl) = 1;
1192 DECL_IGNORED_P (decl) = 1;
1193 TREE_STATIC (decl) = 1;
1194 TREE_PUBLIC (decl) = 0;
1195 TREE_USED (decl) = 1;
1196 DECL_INITIAL (decl) = decl;
1197 TREE_ASM_WRITTEN (decl) = 1;
1198 TREE_ASM_WRITTEN (id) = 1;
1199 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1200 shadow_base = expand_binop (Pmode, lshr_optab, base,
1201 GEN_INT (ASAN_SHADOW_SHIFT),
1202 NULL_RTX, 1, OPTAB_DIRECT);
1203 shadow_base
1204 = plus_constant (Pmode, shadow_base,
1205 asan_shadow_offset ()
1206 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1207 gcc_assert (asan_shadow_set != -1
1208 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1209 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1210 set_mem_alias_set (shadow_mem, asan_shadow_set);
1211 if (STRICT_ALIGNMENT)
1212 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1213 prev_offset = base_offset;
1214 for (l = length; l; l -= 2)
1216 if (l == 2)
1217 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1218 offset = offsets[l - 1];
1219 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1221 int i;
1222 HOST_WIDE_INT aoff
1223 = base_offset + ((offset - base_offset)
1224 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1225 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1226 (aoff - prev_offset)
1227 >> ASAN_SHADOW_SHIFT);
1228 prev_offset = aoff;
1229 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1230 if (aoff < offset)
1232 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1233 shadow_bytes[i] = 0;
1234 else
1235 shadow_bytes[i] = offset - aoff;
1237 else
1238 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1239 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1240 offset = aoff;
1242 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1244 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1245 (offset - prev_offset)
1246 >> ASAN_SHADOW_SHIFT);
1247 prev_offset = offset;
1248 memset (shadow_bytes, cur_shadow_byte, 4);
1249 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1250 offset += ASAN_RED_ZONE_SIZE;
1252 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1254 do_pending_stack_adjust ();
1256 /* Construct epilogue sequence. */
1257 start_sequence ();
1259 lab = NULL;
1260 if (use_after_return_class != -1)
1262 rtx_code_label *lab2 = gen_label_rtx ();
1263 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1264 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1265 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1266 VOIDmode, 0, lab2, very_likely);
1267 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1268 set_mem_alias_set (shadow_mem, asan_shadow_set);
1269 mem = gen_rtx_MEM (ptr_mode, base);
1270 mem = adjust_address (mem, VOIDmode, base_align_bias);
1271 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1272 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1273 if (use_after_return_class < 5
1274 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1275 BITS_PER_UNIT, true))
1276 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1277 BITS_PER_UNIT, true, 0);
1278 else if (use_after_return_class >= 5
1279 || !set_storage_via_setmem (shadow_mem,
1280 GEN_INT (sz),
1281 gen_int_mode (c, QImode),
1282 BITS_PER_UNIT, BITS_PER_UNIT,
1283 -1, sz, sz, sz))
1285 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1286 use_after_return_class);
1287 ret = init_one_libfunc (buf);
1288 rtx addr = convert_memory_address (ptr_mode, base);
1289 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1290 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1291 GEN_INT (asan_frame_size + base_align_bias),
1292 TYPE_MODE (pointer_sized_int_node),
1293 orig_addr, ptr_mode);
1295 lab = gen_label_rtx ();
1296 emit_jump (lab);
1297 emit_label (lab2);
1300 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1301 set_mem_alias_set (shadow_mem, asan_shadow_set);
1303 if (STRICT_ALIGNMENT)
1304 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1306 prev_offset = base_offset;
1307 last_offset = base_offset;
1308 last_size = 0;
1309 for (l = length; l; l -= 2)
1311 offset = base_offset + ((offsets[l - 1] - base_offset)
1312 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1313 if (last_offset + last_size != offset)
1315 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1316 (last_offset - prev_offset)
1317 >> ASAN_SHADOW_SHIFT);
1318 prev_offset = last_offset;
1319 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1320 last_offset = offset;
1321 last_size = 0;
1323 last_size += base_offset + ((offsets[l - 2] - base_offset)
1324 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1325 - offset;
1327 if (last_size)
1329 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1330 (last_offset - prev_offset)
1331 >> ASAN_SHADOW_SHIFT);
1332 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1335 do_pending_stack_adjust ();
1336 if (lab)
1337 emit_label (lab);
1339 insns = get_insns ();
1340 end_sequence ();
1341 return insns;
1344 /* Return true if DECL, a global var, might be overridden and needs
1345 therefore a local alias. */
1347 static bool
1348 asan_needs_local_alias (tree decl)
1350 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1353 /* Return true if DECL is a VAR_DECL that should be protected
1354 by Address Sanitizer, by appending a red zone with protected
1355 shadow memory after it and aligning it to at least
1356 ASAN_RED_ZONE_SIZE bytes. */
1358 bool
1359 asan_protect_global (tree decl)
1361 if (!ASAN_GLOBALS)
1362 return false;
1364 rtx rtl, symbol;
1366 if (TREE_CODE (decl) == STRING_CST)
1368 /* Instrument all STRING_CSTs except those created
1369 by asan_pp_string here. */
1370 if (shadow_ptr_types[0] != NULL_TREE
1371 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1372 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1373 return false;
1374 return true;
1376 if (TREE_CODE (decl) != VAR_DECL
1377 /* TLS vars aren't statically protectable. */
1378 || DECL_THREAD_LOCAL_P (decl)
1379 /* Externs will be protected elsewhere. */
1380 || DECL_EXTERNAL (decl)
1381 || !DECL_RTL_SET_P (decl)
1382 /* Comdat vars pose an ABI problem, we can't know if
1383 the var that is selected by the linker will have
1384 padding or not. */
1385 || DECL_ONE_ONLY (decl)
1386 /* Similarly for common vars. People can use -fno-common.
1387 Note: Linux kernel is built with -fno-common, so we do instrument
1388 globals there even if it is C. */
1389 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1390 /* Don't protect if using user section, often vars placed
1391 into user section from multiple TUs are then assumed
1392 to be an array of such vars, putting padding in there
1393 breaks this assumption. */
1394 || (DECL_SECTION_NAME (decl) != NULL
1395 && !symtab_node::get (decl)->implicit_section
1396 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1397 || DECL_SIZE (decl) == 0
1398 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1399 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1400 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1401 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1402 return false;
1404 rtl = DECL_RTL (decl);
1405 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1406 return false;
1407 symbol = XEXP (rtl, 0);
1409 if (CONSTANT_POOL_ADDRESS_P (symbol)
1410 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1411 return false;
1413 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1414 return false;
1416 #ifndef ASM_OUTPUT_DEF
1417 if (asan_needs_local_alias (decl))
1418 return false;
1419 #endif
1421 return true;
1424 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1425 IS_STORE is either 1 (for a store) or 0 (for a load). */
1427 static tree
1428 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1429 int *nargs)
1431 static enum built_in_function report[2][2][6]
1432 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1433 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1434 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1435 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1436 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1437 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1438 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1439 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1440 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1441 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1442 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1443 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1444 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1445 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1446 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1447 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1448 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1449 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1450 if (size_in_bytes == -1)
1452 *nargs = 2;
1453 return builtin_decl_implicit (report[recover_p][is_store][5]);
1455 *nargs = 1;
1456 int size_log2 = exact_log2 (size_in_bytes);
1457 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1460 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1461 IS_STORE is either 1 (for a store) or 0 (for a load). */
1463 static tree
1464 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1465 int *nargs)
1467 static enum built_in_function check[2][2][6]
1468 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1469 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1470 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1471 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1472 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1473 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1474 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1475 BUILT_IN_ASAN_LOAD2_NOABORT,
1476 BUILT_IN_ASAN_LOAD4_NOABORT,
1477 BUILT_IN_ASAN_LOAD8_NOABORT,
1478 BUILT_IN_ASAN_LOAD16_NOABORT,
1479 BUILT_IN_ASAN_LOADN_NOABORT },
1480 { BUILT_IN_ASAN_STORE1_NOABORT,
1481 BUILT_IN_ASAN_STORE2_NOABORT,
1482 BUILT_IN_ASAN_STORE4_NOABORT,
1483 BUILT_IN_ASAN_STORE8_NOABORT,
1484 BUILT_IN_ASAN_STORE16_NOABORT,
1485 BUILT_IN_ASAN_STOREN_NOABORT } } };
1486 if (size_in_bytes == -1)
1488 *nargs = 2;
1489 return builtin_decl_implicit (check[recover_p][is_store][5]);
1491 *nargs = 1;
1492 int size_log2 = exact_log2 (size_in_bytes);
1493 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1496 /* Split the current basic block and create a condition statement
1497 insertion point right before or after the statement pointed to by
1498 ITER. Return an iterator to the point at which the caller might
1499 safely insert the condition statement.
1501 THEN_BLOCK must be set to the address of an uninitialized instance
1502 of basic_block. The function will then set *THEN_BLOCK to the
1503 'then block' of the condition statement to be inserted by the
1504 caller.
1506 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1507 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1509 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1510 block' of the condition statement to be inserted by the caller.
1512 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1513 statements starting from *ITER, and *THEN_BLOCK is a new empty
1514 block.
1516 *ITER is adjusted to point to always point to the first statement
1517 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1518 same as what ITER was pointing to prior to calling this function,
1519 if BEFORE_P is true; otherwise, it is its following statement. */
1521 gimple_stmt_iterator
1522 create_cond_insert_point (gimple_stmt_iterator *iter,
1523 bool before_p,
1524 bool then_more_likely_p,
1525 bool create_then_fallthru_edge,
1526 basic_block *then_block,
1527 basic_block *fallthrough_block)
1529 gimple_stmt_iterator gsi = *iter;
1531 if (!gsi_end_p (gsi) && before_p)
1532 gsi_prev (&gsi);
1534 basic_block cur_bb = gsi_bb (*iter);
1536 edge e = split_block (cur_bb, gsi_stmt (gsi));
1538 /* Get a hold on the 'condition block', the 'then block' and the
1539 'else block'. */
1540 basic_block cond_bb = e->src;
1541 basic_block fallthru_bb = e->dest;
1542 basic_block then_bb = create_empty_bb (cond_bb);
1543 if (current_loops)
1545 add_bb_to_loop (then_bb, cond_bb->loop_father);
1546 loops_state_set (LOOPS_NEED_FIXUP);
1549 /* Set up the newly created 'then block'. */
1550 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1551 int fallthrough_probability
1552 = then_more_likely_p
1553 ? PROB_VERY_UNLIKELY
1554 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1555 e->probability = PROB_ALWAYS - fallthrough_probability;
1556 if (create_then_fallthru_edge)
1557 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1559 /* Set up the fallthrough basic block. */
1560 e = find_edge (cond_bb, fallthru_bb);
1561 e->flags = EDGE_FALSE_VALUE;
1562 e->count = cond_bb->count;
1563 e->probability = fallthrough_probability;
1565 /* Update dominance info for the newly created then_bb; note that
1566 fallthru_bb's dominance info has already been updated by
1567 split_bock. */
1568 if (dom_info_available_p (CDI_DOMINATORS))
1569 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1571 *then_block = then_bb;
1572 *fallthrough_block = fallthru_bb;
1573 *iter = gsi_start_bb (fallthru_bb);
1575 return gsi_last_bb (cond_bb);
1578 /* Insert an if condition followed by a 'then block' right before the
1579 statement pointed to by ITER. The fallthrough block -- which is the
1580 else block of the condition as well as the destination of the
1581 outcoming edge of the 'then block' -- starts with the statement
1582 pointed to by ITER.
1584 COND is the condition of the if.
1586 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1587 'then block' is higher than the probability of the edge to the
1588 fallthrough block.
1590 Upon completion of the function, *THEN_BB is set to the newly
1591 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1592 fallthrough block.
1594 *ITER is adjusted to still point to the same statement it was
1595 pointing to initially. */
1597 static void
1598 insert_if_then_before_iter (gcond *cond,
1599 gimple_stmt_iterator *iter,
1600 bool then_more_likely_p,
1601 basic_block *then_bb,
1602 basic_block *fallthrough_bb)
1604 gimple_stmt_iterator cond_insert_point =
1605 create_cond_insert_point (iter,
1606 /*before_p=*/true,
1607 then_more_likely_p,
1608 /*create_then_fallthru_edge=*/true,
1609 then_bb,
1610 fallthrough_bb);
1611 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1614 /* Build
1615 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1617 static tree
1618 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1619 tree base_addr, tree shadow_ptr_type)
1621 tree t, uintptr_type = TREE_TYPE (base_addr);
1622 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1623 gimple g;
1625 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1626 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1627 base_addr, t);
1628 gimple_set_location (g, location);
1629 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1631 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1632 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1633 gimple_assign_lhs (g), t);
1634 gimple_set_location (g, location);
1635 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1637 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1638 gimple_assign_lhs (g));
1639 gimple_set_location (g, location);
1640 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1642 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1643 build_int_cst (shadow_ptr_type, 0));
1644 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1645 gimple_set_location (g, location);
1646 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1647 return gimple_assign_lhs (g);
1650 /* BASE can already be an SSA_NAME; in that case, do not create a
1651 new SSA_NAME for it. */
1653 static tree
1654 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1655 bool before_p)
1657 if (TREE_CODE (base) == SSA_NAME)
1658 return base;
1659 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1660 TREE_CODE (base), base);
1661 gimple_set_location (g, loc);
1662 if (before_p)
1663 gsi_insert_before (iter, g, GSI_SAME_STMT);
1664 else
1665 gsi_insert_after (iter, g, GSI_NEW_STMT);
1666 return gimple_assign_lhs (g);
1669 /* LEN can already have necessary size and precision;
1670 in that case, do not create a new variable. */
1672 tree
1673 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1674 bool before_p)
1676 if (ptrofftype_p (len))
1677 return len;
1678 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1679 NOP_EXPR, len);
1680 gimple_set_location (g, loc);
1681 if (before_p)
1682 gsi_insert_before (iter, g, GSI_SAME_STMT);
1683 else
1684 gsi_insert_after (iter, g, GSI_NEW_STMT);
1685 return gimple_assign_lhs (g);
1688 /* Instrument the memory access instruction BASE. Insert new
1689 statements before or after ITER.
1691 Note that the memory access represented by BASE can be either an
1692 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1693 location. IS_STORE is TRUE for a store, FALSE for a load.
1694 BEFORE_P is TRUE for inserting the instrumentation code before
1695 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1696 for a scalar memory access and FALSE for memory region access.
1697 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1698 length. ALIGN tells alignment of accessed memory object.
1700 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1701 memory region have already been instrumented.
1703 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1704 statement it was pointing to prior to calling this function,
1705 otherwise, it points to the statement logically following it. */
1707 static void
1708 build_check_stmt (location_t loc, tree base, tree len,
1709 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1710 bool is_non_zero_len, bool before_p, bool is_store,
1711 bool is_scalar_access, unsigned int align = 0)
1713 gimple_stmt_iterator gsi = *iter;
1714 gimple g;
1716 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1718 gsi = *iter;
1720 base = unshare_expr (base);
1721 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1723 if (len)
1725 len = unshare_expr (len);
1726 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1728 else
1730 gcc_assert (size_in_bytes != -1);
1731 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1734 if (size_in_bytes > 1)
1736 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1737 || size_in_bytes > 16)
1738 is_scalar_access = false;
1739 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1741 /* On non-strict alignment targets, if
1742 16-byte access is just 8-byte aligned,
1743 this will result in misaligned shadow
1744 memory 2 byte load, but otherwise can
1745 be handled using one read. */
1746 if (size_in_bytes != 16
1747 || STRICT_ALIGNMENT
1748 || align < 8 * BITS_PER_UNIT)
1749 is_scalar_access = false;
1753 HOST_WIDE_INT flags = 0;
1754 if (is_store)
1755 flags |= ASAN_CHECK_STORE;
1756 if (is_non_zero_len)
1757 flags |= ASAN_CHECK_NON_ZERO_LEN;
1758 if (is_scalar_access)
1759 flags |= ASAN_CHECK_SCALAR_ACCESS;
1761 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1762 build_int_cst (integer_type_node, flags),
1763 base, len,
1764 build_int_cst (integer_type_node,
1765 align / BITS_PER_UNIT));
1766 gimple_set_location (g, loc);
1767 if (before_p)
1768 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1769 else
1771 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1772 gsi_next (&gsi);
1773 *iter = gsi;
1777 /* If T represents a memory access, add instrumentation code before ITER.
1778 LOCATION is source code location.
1779 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1781 static void
1782 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1783 location_t location, bool is_store)
1785 if (is_store && !ASAN_INSTRUMENT_WRITES)
1786 return;
1787 if (!is_store && !ASAN_INSTRUMENT_READS)
1788 return;
1790 tree type, base;
1791 HOST_WIDE_INT size_in_bytes;
1793 type = TREE_TYPE (t);
1794 switch (TREE_CODE (t))
1796 case ARRAY_REF:
1797 case COMPONENT_REF:
1798 case INDIRECT_REF:
1799 case MEM_REF:
1800 case VAR_DECL:
1801 case BIT_FIELD_REF:
1802 break;
1803 /* FALLTHRU */
1804 default:
1805 return;
1808 size_in_bytes = int_size_in_bytes (type);
1809 if (size_in_bytes <= 0)
1810 return;
1812 HOST_WIDE_INT bitsize, bitpos;
1813 tree offset;
1814 machine_mode mode;
1815 int volatilep = 0, unsignedp = 0;
1816 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1817 &mode, &unsignedp, &volatilep, false);
1819 if (TREE_CODE (t) == COMPONENT_REF
1820 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1822 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1823 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1824 TREE_OPERAND (t, 0), repr,
1825 NULL_TREE), location, is_store);
1826 return;
1829 if (bitpos % BITS_PER_UNIT
1830 || bitsize != size_in_bytes * BITS_PER_UNIT)
1831 return;
1833 if (TREE_CODE (inner) == VAR_DECL
1834 && offset == NULL_TREE
1835 && bitpos >= 0
1836 && DECL_SIZE (inner)
1837 && tree_fits_shwi_p (DECL_SIZE (inner))
1838 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1840 if (DECL_THREAD_LOCAL_P (inner))
1841 return;
1842 if (!ASAN_GLOBALS && is_global_var (inner))
1843 return;
1844 if (!TREE_STATIC (inner))
1846 /* Automatic vars in the current function will be always
1847 accessible. */
1848 if (decl_function_context (inner) == current_function_decl)
1849 return;
1851 /* Always instrument external vars, they might be dynamically
1852 initialized. */
1853 else if (!DECL_EXTERNAL (inner))
1855 /* For static vars if they are known not to be dynamically
1856 initialized, they will be always accessible. */
1857 varpool_node *vnode = varpool_node::get (inner);
1858 if (vnode && !vnode->dynamically_initialized)
1859 return;
1863 base = build_fold_addr_expr (t);
1864 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1866 unsigned int align = get_object_alignment (t);
1867 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1868 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1869 is_store, /*is_scalar_access*/true, align);
1870 update_mem_ref_hash_table (base, size_in_bytes);
1871 update_mem_ref_hash_table (t, size_in_bytes);
1876 /* Insert a memory reference into the hash table if access length
1877 can be determined in compile time. */
1879 static void
1880 maybe_update_mem_ref_hash_table (tree base, tree len)
1882 if (!POINTER_TYPE_P (TREE_TYPE (base))
1883 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1884 return;
1886 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1888 if (size_in_bytes != -1)
1889 update_mem_ref_hash_table (base, size_in_bytes);
1892 /* Instrument an access to a contiguous memory region that starts at
1893 the address pointed to by BASE, over a length of LEN (expressed in
1894 the sizeof (*BASE) bytes). ITER points to the instruction before
1895 which the instrumentation instructions must be inserted. LOCATION
1896 is the source location that the instrumentation instructions must
1897 have. If IS_STORE is true, then the memory access is a store;
1898 otherwise, it's a load. */
1900 static void
1901 instrument_mem_region_access (tree base, tree len,
1902 gimple_stmt_iterator *iter,
1903 location_t location, bool is_store)
1905 if (!POINTER_TYPE_P (TREE_TYPE (base))
1906 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1907 || integer_zerop (len))
1908 return;
1910 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1912 if ((size_in_bytes == -1)
1913 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1915 build_check_stmt (location, base, len, size_in_bytes, iter,
1916 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1917 is_store, /*is_scalar_access*/false, /*align*/0);
1920 maybe_update_mem_ref_hash_table (base, len);
1921 *iter = gsi_for_stmt (gsi_stmt (*iter));
1924 /* Instrument the call to a built-in memory access function that is
1925 pointed to by the iterator ITER.
1927 Upon completion, return TRUE iff *ITER has been advanced to the
1928 statement following the one it was originally pointing to. */
1930 static bool
1931 instrument_builtin_call (gimple_stmt_iterator *iter)
1933 if (!ASAN_MEMINTRIN)
1934 return false;
1936 bool iter_advanced_p = false;
1937 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1939 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1941 location_t loc = gimple_location (call);
1943 asan_mem_ref src0, src1, dest;
1944 asan_mem_ref_init (&src0, NULL, 1);
1945 asan_mem_ref_init (&src1, NULL, 1);
1946 asan_mem_ref_init (&dest, NULL, 1);
1948 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1949 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1950 dest_is_deref = false, intercepted_p = true;
1952 if (get_mem_refs_of_builtin_call (call,
1953 &src0, &src0_len, &src0_is_store,
1954 &src1, &src1_len, &src1_is_store,
1955 &dest, &dest_len, &dest_is_store,
1956 &dest_is_deref, &intercepted_p))
1958 if (dest_is_deref)
1960 instrument_derefs (iter, dest.start, loc, dest_is_store);
1961 gsi_next (iter);
1962 iter_advanced_p = true;
1964 else if (!intercepted_p
1965 && (src0_len || src1_len || dest_len))
1967 if (src0.start != NULL_TREE)
1968 instrument_mem_region_access (src0.start, src0_len,
1969 iter, loc, /*is_store=*/false);
1970 if (src1.start != NULL_TREE)
1971 instrument_mem_region_access (src1.start, src1_len,
1972 iter, loc, /*is_store=*/false);
1973 if (dest.start != NULL_TREE)
1974 instrument_mem_region_access (dest.start, dest_len,
1975 iter, loc, /*is_store=*/true);
1977 *iter = gsi_for_stmt (call);
1978 gsi_next (iter);
1979 iter_advanced_p = true;
1981 else
1983 if (src0.start != NULL_TREE)
1984 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1985 if (src1.start != NULL_TREE)
1986 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1987 if (dest.start != NULL_TREE)
1988 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1991 return iter_advanced_p;
1994 /* Instrument the assignment statement ITER if it is subject to
1995 instrumentation. Return TRUE iff instrumentation actually
1996 happened. In that case, the iterator ITER is advanced to the next
1997 logical expression following the one initially pointed to by ITER,
1998 and the relevant memory reference that which access has been
1999 instrumented is added to the memory references hash table. */
2001 static bool
2002 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2004 gimple s = gsi_stmt (*iter);
2006 gcc_assert (gimple_assign_single_p (s));
2008 tree ref_expr = NULL_TREE;
2009 bool is_store, is_instrumented = false;
2011 if (gimple_store_p (s))
2013 ref_expr = gimple_assign_lhs (s);
2014 is_store = true;
2015 instrument_derefs (iter, ref_expr,
2016 gimple_location (s),
2017 is_store);
2018 is_instrumented = true;
2021 if (gimple_assign_load_p (s))
2023 ref_expr = gimple_assign_rhs1 (s);
2024 is_store = false;
2025 instrument_derefs (iter, ref_expr,
2026 gimple_location (s),
2027 is_store);
2028 is_instrumented = true;
2031 if (is_instrumented)
2032 gsi_next (iter);
2034 return is_instrumented;
2037 /* Instrument the function call pointed to by the iterator ITER, if it
2038 is subject to instrumentation. At the moment, the only function
2039 calls that are instrumented are some built-in functions that access
2040 memory. Look at instrument_builtin_call to learn more.
2042 Upon completion return TRUE iff *ITER was advanced to the statement
2043 following the one it was originally pointing to. */
2045 static bool
2046 maybe_instrument_call (gimple_stmt_iterator *iter)
2048 gimple stmt = gsi_stmt (*iter);
2049 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2051 if (is_builtin && instrument_builtin_call (iter))
2052 return true;
2054 if (gimple_call_noreturn_p (stmt))
2056 if (is_builtin)
2058 tree callee = gimple_call_fndecl (stmt);
2059 switch (DECL_FUNCTION_CODE (callee))
2061 case BUILT_IN_UNREACHABLE:
2062 case BUILT_IN_TRAP:
2063 /* Don't instrument these. */
2064 return false;
2065 default:
2066 break;
2069 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2070 gimple g = gimple_build_call (decl, 0);
2071 gimple_set_location (g, gimple_location (stmt));
2072 gsi_insert_before (iter, g, GSI_SAME_STMT);
2074 return false;
2077 /* Walk each instruction of all basic block and instrument those that
2078 represent memory references: loads, stores, or function calls.
2079 In a given basic block, this function avoids instrumenting memory
2080 references that have already been instrumented. */
2082 static void
2083 transform_statements (void)
2085 basic_block bb, last_bb = NULL;
2086 gimple_stmt_iterator i;
2087 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2089 FOR_EACH_BB_FN (bb, cfun)
2091 basic_block prev_bb = bb;
2093 if (bb->index >= saved_last_basic_block) continue;
2095 /* Flush the mem ref hash table, if current bb doesn't have
2096 exactly one predecessor, or if that predecessor (skipping
2097 over asan created basic blocks) isn't the last processed
2098 basic block. Thus we effectively flush on extended basic
2099 block boundaries. */
2100 while (single_pred_p (prev_bb))
2102 prev_bb = single_pred (prev_bb);
2103 if (prev_bb->index < saved_last_basic_block)
2104 break;
2106 if (prev_bb != last_bb)
2107 empty_mem_ref_hash_table ();
2108 last_bb = bb;
2110 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2112 gimple s = gsi_stmt (i);
2114 if (has_stmt_been_instrumented_p (s))
2115 gsi_next (&i);
2116 else if (gimple_assign_single_p (s)
2117 && !gimple_clobber_p (s)
2118 && maybe_instrument_assignment (&i))
2119 /* Nothing to do as maybe_instrument_assignment advanced
2120 the iterator I. */;
2121 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2122 /* Nothing to do as maybe_instrument_call
2123 advanced the iterator I. */;
2124 else
2126 /* No instrumentation happened.
2128 If the current instruction is a function call that
2129 might free something, let's forget about the memory
2130 references that got instrumented. Otherwise we might
2131 miss some instrumentation opportunities. */
2132 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2133 empty_mem_ref_hash_table ();
2135 gsi_next (&i);
2139 free_mem_ref_resources ();
2142 /* Build
2143 __asan_before_dynamic_init (module_name)
2145 __asan_after_dynamic_init ()
2146 call. */
2148 tree
2149 asan_dynamic_init_call (bool after_p)
2151 tree fn = builtin_decl_implicit (after_p
2152 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2153 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2154 tree module_name_cst = NULL_TREE;
2155 if (!after_p)
2157 pretty_printer module_name_pp;
2158 pp_string (&module_name_pp, main_input_filename);
2160 if (shadow_ptr_types[0] == NULL_TREE)
2161 asan_init_shadow_ptr_types ();
2162 module_name_cst = asan_pp_string (&module_name_pp);
2163 module_name_cst = fold_convert (const_ptr_type_node,
2164 module_name_cst);
2167 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2170 /* Build
2171 struct __asan_global
2173 const void *__beg;
2174 uptr __size;
2175 uptr __size_with_redzone;
2176 const void *__name;
2177 const void *__module_name;
2178 uptr __has_dynamic_init;
2179 __asan_global_source_location *__location;
2180 } type. */
2182 static tree
2183 asan_global_struct (void)
2185 static const char *field_names[7]
2186 = { "__beg", "__size", "__size_with_redzone",
2187 "__name", "__module_name", "__has_dynamic_init", "__location"};
2188 tree fields[7], ret;
2189 int i;
2191 ret = make_node (RECORD_TYPE);
2192 for (i = 0; i < 7; i++)
2194 fields[i]
2195 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2196 get_identifier (field_names[i]),
2197 (i == 0 || i == 3) ? const_ptr_type_node
2198 : pointer_sized_int_node);
2199 DECL_CONTEXT (fields[i]) = ret;
2200 if (i)
2201 DECL_CHAIN (fields[i - 1]) = fields[i];
2203 tree type_decl = build_decl (input_location, TYPE_DECL,
2204 get_identifier ("__asan_global"), ret);
2205 DECL_IGNORED_P (type_decl) = 1;
2206 DECL_ARTIFICIAL (type_decl) = 1;
2207 TYPE_FIELDS (ret) = fields[0];
2208 TYPE_NAME (ret) = type_decl;
2209 TYPE_STUB_DECL (ret) = type_decl;
2210 layout_type (ret);
2211 return ret;
2214 /* Append description of a single global DECL into vector V.
2215 TYPE is __asan_global struct type as returned by asan_global_struct. */
2217 static void
2218 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2220 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2221 unsigned HOST_WIDE_INT size;
2222 tree str_cst, module_name_cst, refdecl = decl;
2223 vec<constructor_elt, va_gc> *vinner = NULL;
2225 pretty_printer asan_pp, module_name_pp;
2227 if (DECL_NAME (decl))
2228 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2229 else
2230 pp_string (&asan_pp, "<unknown>");
2231 str_cst = asan_pp_string (&asan_pp);
2233 pp_string (&module_name_pp, main_input_filename);
2234 module_name_cst = asan_pp_string (&module_name_pp);
2236 if (asan_needs_local_alias (decl))
2238 char buf[20];
2239 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2240 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2241 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2242 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2243 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2244 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2245 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2246 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2247 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2248 TREE_STATIC (refdecl) = 1;
2249 TREE_PUBLIC (refdecl) = 0;
2250 TREE_USED (refdecl) = 1;
2251 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2254 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2255 fold_convert (const_ptr_type_node,
2256 build_fold_addr_expr (refdecl)));
2257 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2258 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2259 size += asan_red_zone_size (size);
2260 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2261 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2262 fold_convert (const_ptr_type_node, str_cst));
2263 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2264 fold_convert (const_ptr_type_node, module_name_cst));
2265 varpool_node *vnode = varpool_node::get (decl);
2266 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2267 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2268 build_int_cst (uptr, has_dynamic_init));
2269 tree locptr = NULL_TREE;
2270 location_t loc = DECL_SOURCE_LOCATION (decl);
2271 expanded_location xloc = expand_location (loc);
2272 if (xloc.file != NULL)
2274 static int lasanloccnt = 0;
2275 char buf[25];
2276 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2277 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2278 ubsan_get_source_location_type ());
2279 TREE_STATIC (var) = 1;
2280 TREE_PUBLIC (var) = 0;
2281 DECL_ARTIFICIAL (var) = 1;
2282 DECL_IGNORED_P (var) = 1;
2283 pretty_printer filename_pp;
2284 pp_string (&filename_pp, xloc.file);
2285 tree str = asan_pp_string (&filename_pp);
2286 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2287 NULL_TREE, str, NULL_TREE,
2288 build_int_cst (unsigned_type_node,
2289 xloc.line), NULL_TREE,
2290 build_int_cst (unsigned_type_node,
2291 xloc.column));
2292 TREE_CONSTANT (ctor) = 1;
2293 TREE_STATIC (ctor) = 1;
2294 DECL_INITIAL (var) = ctor;
2295 varpool_node::finalize_decl (var);
2296 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2298 else
2299 locptr = build_int_cst (uptr, 0);
2300 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2301 init = build_constructor (type, vinner);
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2305 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2306 void
2307 initialize_sanitizer_builtins (void)
2309 tree decl;
2311 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2312 return;
2314 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2315 tree BT_FN_VOID_PTR
2316 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2317 tree BT_FN_VOID_CONST_PTR
2318 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2319 tree BT_FN_VOID_PTR_PTR
2320 = build_function_type_list (void_type_node, ptr_type_node,
2321 ptr_type_node, NULL_TREE);
2322 tree BT_FN_VOID_PTR_PTR_PTR
2323 = build_function_type_list (void_type_node, ptr_type_node,
2324 ptr_type_node, ptr_type_node, NULL_TREE);
2325 tree BT_FN_VOID_PTR_PTRMODE
2326 = build_function_type_list (void_type_node, ptr_type_node,
2327 pointer_sized_int_node, NULL_TREE);
2328 tree BT_FN_VOID_INT
2329 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2330 tree BT_FN_SIZE_CONST_PTR_INT
2331 = build_function_type_list (size_type_node, const_ptr_type_node,
2332 integer_type_node, NULL_TREE);
2333 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2334 tree BT_FN_IX_CONST_VPTR_INT[5];
2335 tree BT_FN_IX_VPTR_IX_INT[5];
2336 tree BT_FN_VOID_VPTR_IX_INT[5];
2337 tree vptr
2338 = build_pointer_type (build_qualified_type (void_type_node,
2339 TYPE_QUAL_VOLATILE));
2340 tree cvptr
2341 = build_pointer_type (build_qualified_type (void_type_node,
2342 TYPE_QUAL_VOLATILE
2343 |TYPE_QUAL_CONST));
2344 tree boolt
2345 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2346 int i;
2347 for (i = 0; i < 5; i++)
2349 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2350 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2351 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2352 integer_type_node, integer_type_node,
2353 NULL_TREE);
2354 BT_FN_IX_CONST_VPTR_INT[i]
2355 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2356 BT_FN_IX_VPTR_IX_INT[i]
2357 = build_function_type_list (ix, vptr, ix, integer_type_node,
2358 NULL_TREE);
2359 BT_FN_VOID_VPTR_IX_INT[i]
2360 = build_function_type_list (void_type_node, vptr, ix,
2361 integer_type_node, NULL_TREE);
2363 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2364 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2365 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2366 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2367 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2368 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2369 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2370 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2371 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2372 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2373 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2374 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2375 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2376 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2377 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2378 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2379 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2380 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2381 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2382 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2383 #undef ATTR_NOTHROW_LEAF_LIST
2384 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2385 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2386 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2387 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2388 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2389 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2390 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2391 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2392 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2393 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2394 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2395 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2396 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2397 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2398 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2399 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2400 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2401 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2402 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2403 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2404 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2405 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2406 #undef DEF_SANITIZER_BUILTIN
2407 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2408 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2409 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2410 set_call_expr_flags (decl, ATTRS); \
2411 set_builtin_decl (ENUM, decl, true);
2413 #include "sanitizer.def"
2415 /* -fsanitize=object-size uses __builtin_object_size, but that might
2416 not be available for e.g. Fortran at this point. We use
2417 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2418 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2419 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2420 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2421 BT_FN_SIZE_CONST_PTR_INT,
2422 ATTR_PURE_NOTHROW_LEAF_LIST)
2424 #undef DEF_SANITIZER_BUILTIN
2427 /* Called via htab_traverse. Count number of emitted
2428 STRING_CSTs in the constant hash table. */
2431 count_string_csts (constant_descriptor_tree **slot,
2432 unsigned HOST_WIDE_INT *data)
2434 struct constant_descriptor_tree *desc = *slot;
2435 if (TREE_CODE (desc->value) == STRING_CST
2436 && TREE_ASM_WRITTEN (desc->value)
2437 && asan_protect_global (desc->value))
2438 ++*data;
2439 return 1;
2442 /* Helper structure to pass two parameters to
2443 add_string_csts. */
2445 struct asan_add_string_csts_data
2447 tree type;
2448 vec<constructor_elt, va_gc> *v;
2451 /* Called via hash_table::traverse. Call asan_add_global
2452 on emitted STRING_CSTs from the constant hash table. */
2455 add_string_csts (constant_descriptor_tree **slot,
2456 asan_add_string_csts_data *aascd)
2458 struct constant_descriptor_tree *desc = *slot;
2459 if (TREE_CODE (desc->value) == STRING_CST
2460 && TREE_ASM_WRITTEN (desc->value)
2461 && asan_protect_global (desc->value))
2463 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2464 aascd->type, aascd->v);
2466 return 1;
2469 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2470 invoke ggc_collect. */
2471 static GTY(()) tree asan_ctor_statements;
2473 /* Module-level instrumentation.
2474 - Insert __asan_init_vN() into the list of CTORs.
2475 - TODO: insert redzones around globals.
2478 void
2479 asan_finish_file (void)
2481 varpool_node *vnode;
2482 unsigned HOST_WIDE_INT gcount = 0;
2484 if (shadow_ptr_types[0] == NULL_TREE)
2485 asan_init_shadow_ptr_types ();
2486 /* Avoid instrumenting code in the asan ctors/dtors.
2487 We don't need to insert padding after the description strings,
2488 nor after .LASAN* array. */
2489 flag_sanitize &= ~SANITIZE_ADDRESS;
2491 /* For user-space we want asan constructors to run first.
2492 Linux kernel does not support priorities other than default, and the only
2493 other user of constructors is coverage. So we run with the default
2494 priority. */
2495 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2496 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2498 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2500 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2501 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2503 FOR_EACH_DEFINED_VARIABLE (vnode)
2504 if (TREE_ASM_WRITTEN (vnode->decl)
2505 && asan_protect_global (vnode->decl))
2506 ++gcount;
2507 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2508 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2509 (&gcount);
2510 if (gcount)
2512 tree type = asan_global_struct (), var, ctor;
2513 tree dtor_statements = NULL_TREE;
2514 vec<constructor_elt, va_gc> *v;
2515 char buf[20];
2517 type = build_array_type_nelts (type, gcount);
2518 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2519 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2520 type);
2521 TREE_STATIC (var) = 1;
2522 TREE_PUBLIC (var) = 0;
2523 DECL_ARTIFICIAL (var) = 1;
2524 DECL_IGNORED_P (var) = 1;
2525 vec_alloc (v, gcount);
2526 FOR_EACH_DEFINED_VARIABLE (vnode)
2527 if (TREE_ASM_WRITTEN (vnode->decl)
2528 && asan_protect_global (vnode->decl))
2529 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2530 struct asan_add_string_csts_data aascd;
2531 aascd.type = TREE_TYPE (type);
2532 aascd.v = v;
2533 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2534 (&aascd);
2535 ctor = build_constructor (type, v);
2536 TREE_CONSTANT (ctor) = 1;
2537 TREE_STATIC (ctor) = 1;
2538 DECL_INITIAL (var) = ctor;
2539 varpool_node::finalize_decl (var);
2541 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2542 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2543 append_to_statement_list (build_call_expr (fn, 2,
2544 build_fold_addr_expr (var),
2545 gcount_tree),
2546 &asan_ctor_statements);
2548 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2549 append_to_statement_list (build_call_expr (fn, 2,
2550 build_fold_addr_expr (var),
2551 gcount_tree),
2552 &dtor_statements);
2553 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2555 if (asan_ctor_statements)
2556 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2557 flag_sanitize |= SANITIZE_ADDRESS;
2560 /* Expand the ASAN_{LOAD,STORE} builtins. */
2562 bool
2563 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2565 gimple g = gsi_stmt (*iter);
2566 location_t loc = gimple_location (g);
2568 bool recover_p
2569 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2571 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2572 gcc_assert (flags < ASAN_CHECK_LAST);
2573 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2574 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2575 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2577 tree base = gimple_call_arg (g, 1);
2578 tree len = gimple_call_arg (g, 2);
2579 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2581 HOST_WIDE_INT size_in_bytes
2582 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2584 if (use_calls)
2586 /* Instrument using callbacks. */
2587 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2588 NOP_EXPR, base);
2589 gimple_set_location (g, loc);
2590 gsi_insert_before (iter, g, GSI_SAME_STMT);
2591 tree base_addr = gimple_assign_lhs (g);
2593 int nargs;
2594 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2595 if (nargs == 1)
2596 g = gimple_build_call (fun, 1, base_addr);
2597 else
2599 gcc_assert (nargs == 2);
2600 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2601 NOP_EXPR, len);
2602 gimple_set_location (g, loc);
2603 gsi_insert_before (iter, g, GSI_SAME_STMT);
2604 tree sz_arg = gimple_assign_lhs (g);
2605 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2607 gimple_set_location (g, loc);
2608 gsi_replace (iter, g, false);
2609 return false;
2612 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2614 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2615 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2617 gimple_stmt_iterator gsi = *iter;
2619 if (!is_non_zero_len)
2621 /* So, the length of the memory area to asan-protect is
2622 non-constant. Let's guard the generated instrumentation code
2623 like:
2625 if (len != 0)
2627 //asan instrumentation code goes here.
2629 // falltrough instructions, starting with *ITER. */
2631 g = gimple_build_cond (NE_EXPR,
2632 len,
2633 build_int_cst (TREE_TYPE (len), 0),
2634 NULL_TREE, NULL_TREE);
2635 gimple_set_location (g, loc);
2637 basic_block then_bb, fallthrough_bb;
2638 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2639 /*then_more_likely_p=*/true,
2640 &then_bb, &fallthrough_bb);
2641 /* Note that fallthrough_bb starts with the statement that was
2642 pointed to by ITER. */
2644 /* The 'then block' of the 'if (len != 0) condition is where
2645 we'll generate the asan instrumentation code now. */
2646 gsi = gsi_last_bb (then_bb);
2649 /* Get an iterator on the point where we can add the condition
2650 statement for the instrumentation. */
2651 basic_block then_bb, else_bb;
2652 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2653 /*then_more_likely_p=*/false,
2654 /*create_then_fallthru_edge*/recover_p,
2655 &then_bb,
2656 &else_bb);
2658 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2659 NOP_EXPR, base);
2660 gimple_set_location (g, loc);
2661 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2662 tree base_addr = gimple_assign_lhs (g);
2664 tree t = NULL_TREE;
2665 if (real_size_in_bytes >= 8)
2667 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2668 shadow_ptr_type);
2669 t = shadow;
2671 else
2673 /* Slow path for 1, 2 and 4 byte accesses. */
2674 /* Test (shadow != 0)
2675 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2676 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2677 shadow_ptr_type);
2678 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2679 gimple_seq seq = NULL;
2680 gimple_seq_add_stmt (&seq, shadow_test);
2681 /* Aligned (>= 8 bytes) can test just
2682 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2683 to be 0. */
2684 if (align < 8)
2686 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2687 base_addr, 7));
2688 gimple_seq_add_stmt (&seq,
2689 build_type_cast (shadow_type,
2690 gimple_seq_last (seq)));
2691 if (real_size_in_bytes > 1)
2692 gimple_seq_add_stmt (&seq,
2693 build_assign (PLUS_EXPR,
2694 gimple_seq_last (seq),
2695 real_size_in_bytes - 1));
2696 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2698 else
2699 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2700 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2701 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2702 gimple_seq_last (seq)));
2703 t = gimple_assign_lhs (gimple_seq_last (seq));
2704 gimple_seq_set_location (seq, loc);
2705 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2707 /* For non-constant, misaligned or otherwise weird access sizes,
2708 check first and last byte. */
2709 if (size_in_bytes == -1)
2711 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2712 MINUS_EXPR, len,
2713 build_int_cst (pointer_sized_int_node, 1));
2714 gimple_set_location (g, loc);
2715 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2716 tree last = gimple_assign_lhs (g);
2717 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2718 PLUS_EXPR, base_addr, last);
2719 gimple_set_location (g, loc);
2720 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2721 tree base_end_addr = gimple_assign_lhs (g);
2723 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2724 shadow_ptr_type);
2725 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2726 gimple_seq seq = NULL;
2727 gimple_seq_add_stmt (&seq, shadow_test);
2728 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2729 base_end_addr, 7));
2730 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2731 gimple_seq_last (seq)));
2732 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2733 gimple_seq_last (seq),
2734 shadow));
2735 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2736 gimple_seq_last (seq)));
2737 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2738 gimple_seq_last (seq)));
2739 t = gimple_assign_lhs (gimple_seq_last (seq));
2740 gimple_seq_set_location (seq, loc);
2741 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2745 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2746 NULL_TREE, NULL_TREE);
2747 gimple_set_location (g, loc);
2748 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2750 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2751 gsi = gsi_start_bb (then_bb);
2752 int nargs;
2753 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2754 g = gimple_build_call (fun, nargs, base_addr, len);
2755 gimple_set_location (g, loc);
2756 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2758 gsi_remove (iter, true);
2759 *iter = gsi_start_bb (else_bb);
2761 return true;
2764 /* Instrument the current function. */
2766 static unsigned int
2767 asan_instrument (void)
2769 if (shadow_ptr_types[0] == NULL_TREE)
2770 asan_init_shadow_ptr_types ();
2771 transform_statements ();
2772 return 0;
2775 static bool
2776 gate_asan (void)
2778 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2779 && !lookup_attribute ("no_sanitize_address",
2780 DECL_ATTRIBUTES (current_function_decl));
2783 namespace {
2785 const pass_data pass_data_asan =
2787 GIMPLE_PASS, /* type */
2788 "asan", /* name */
2789 OPTGROUP_NONE, /* optinfo_flags */
2790 TV_NONE, /* tv_id */
2791 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2792 0, /* properties_provided */
2793 0, /* properties_destroyed */
2794 0, /* todo_flags_start */
2795 TODO_update_ssa, /* todo_flags_finish */
2798 class pass_asan : public gimple_opt_pass
2800 public:
2801 pass_asan (gcc::context *ctxt)
2802 : gimple_opt_pass (pass_data_asan, ctxt)
2805 /* opt_pass methods: */
2806 opt_pass * clone () { return new pass_asan (m_ctxt); }
2807 virtual bool gate (function *) { return gate_asan (); }
2808 virtual unsigned int execute (function *) { return asan_instrument (); }
2810 }; // class pass_asan
2812 } // anon namespace
2814 gimple_opt_pass *
2815 make_pass_asan (gcc::context *ctxt)
2817 return new pass_asan (ctxt);
2820 namespace {
2822 const pass_data pass_data_asan_O0 =
2824 GIMPLE_PASS, /* type */
2825 "asan0", /* name */
2826 OPTGROUP_NONE, /* optinfo_flags */
2827 TV_NONE, /* tv_id */
2828 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2829 0, /* properties_provided */
2830 0, /* properties_destroyed */
2831 0, /* todo_flags_start */
2832 TODO_update_ssa, /* todo_flags_finish */
2835 class pass_asan_O0 : public gimple_opt_pass
2837 public:
2838 pass_asan_O0 (gcc::context *ctxt)
2839 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2842 /* opt_pass methods: */
2843 virtual bool gate (function *) { return !optimize && gate_asan (); }
2844 virtual unsigned int execute (function *) { return asan_instrument (); }
2846 }; // class pass_asan_O0
2848 } // anon namespace
2850 gimple_opt_pass *
2851 make_pass_asan_O0 (gcc::context *ctxt)
2853 return new pass_asan_O0 (ctxt);
2856 #include "gt-asan.h"