PR69195, Reload confused by invalid reg_equiv
[official-gcc.git] / gcc / asan.c
blob47bfdcde53d05ca9c1401a3f15e7409840a75124
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "optabs.h"
37 #include "emit-rtl.h"
38 #include "cgraph.h"
39 #include "gimple-pretty-print.h"
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "cfganal.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "varasm.h"
46 #include "stor-layout.h"
47 #include "tree-iterator.h"
48 #include "asan.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "expr.h"
52 #include "output.h"
53 #include "langhooks.h"
54 #include "cfgloop.h"
55 #include "gimple-builder.h"
56 #include "ubsan.h"
57 #include "params.h"
58 #include "builtins.h"
59 #include "fnmatch.h"
61 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
62 with <2x slowdown on average.
64 The tool consists of two parts:
65 instrumentation module (this file) and a run-time library.
66 The instrumentation module adds a run-time check before every memory insn.
67 For a 8- or 16- byte load accessing address X:
68 ShadowAddr = (X >> 3) + Offset
69 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
70 if (ShadowValue)
71 __asan_report_load8(X);
72 For a load of N bytes (N=1, 2 or 4) from address X:
73 ShadowAddr = (X >> 3) + Offset
74 ShadowValue = *(char*)ShadowAddr;
75 if (ShadowValue)
76 if ((X & 7) + N - 1 > ShadowValue)
77 __asan_report_loadN(X);
78 Stores are instrumented similarly, but using __asan_report_storeN functions.
79 A call too __asan_init_vN() is inserted to the list of module CTORs.
80 N is the version number of the AddressSanitizer API. The changes between the
81 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
83 The run-time library redefines malloc (so that redzone are inserted around
84 the allocated memory) and free (so that reuse of free-ed memory is delayed),
85 provides __asan_report* and __asan_init_vN functions.
87 Read more:
88 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
90 The current implementation supports detection of out-of-bounds and
91 use-after-free in the heap, on the stack and for global variables.
93 [Protection of stack variables]
95 To understand how detection of out-of-bounds and use-after-free works
96 for stack variables, lets look at this example on x86_64 where the
97 stack grows downward:
99 int
100 foo ()
102 char a[23] = {0};
103 int b[2] = {0};
105 a[5] = 1;
106 b[1] = 2;
108 return a[5] + b[1];
111 For this function, the stack protected by asan will be organized as
112 follows, from the top of the stack to the bottom:
114 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
116 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
117 the next slot be 32 bytes aligned; this one is called Partial
118 Redzone; this 32 bytes alignment is an asan constraint]
120 Slot 3/ [24 bytes for variable 'a']
122 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
124 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
126 Slot 6/ [8 bytes for variable 'b']
128 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
129 'LEFT RedZone']
131 The 32 bytes of LEFT red zone at the bottom of the stack can be
132 decomposed as such:
134 1/ The first 8 bytes contain a magical asan number that is always
135 0x41B58AB3.
137 2/ The following 8 bytes contains a pointer to a string (to be
138 parsed at runtime by the runtime asan library), which format is
139 the following:
141 "<function-name> <space> <num-of-variables-on-the-stack>
142 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
143 <length-of-var-in-bytes> ){n} "
145 where '(...){n}' means the content inside the parenthesis occurs 'n'
146 times, with 'n' being the number of variables on the stack.
148 3/ The following 8 bytes contain the PC of the current function which
149 will be used by the run-time library to print an error message.
151 4/ The following 8 bytes are reserved for internal use by the run-time.
153 The shadow memory for that stack layout is going to look like this:
155 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
156 The F1 byte pattern is a magic number called
157 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
158 the memory for that shadow byte is part of a the LEFT red zone
159 intended to seat at the bottom of the variables on the stack.
161 - content of shadow memory 8 bytes for slots 6 and 5:
162 0xF4F4F400. The F4 byte pattern is a magic number
163 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
164 memory region for this shadow byte is a PARTIAL red zone
165 intended to pad a variable A, so that the slot following
166 {A,padding} is 32 bytes aligned.
168 Note that the fact that the least significant byte of this
169 shadow memory content is 00 means that 8 bytes of its
170 corresponding memory (which corresponds to the memory of
171 variable 'b') is addressable.
173 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
174 The F2 byte pattern is a magic number called
175 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
176 region for this shadow byte is a MIDDLE red zone intended to
177 seat between two 32 aligned slots of {variable,padding}.
179 - content of shadow memory 8 bytes for slot 3 and 2:
180 0xF4000000. This represents is the concatenation of
181 variable 'a' and the partial red zone following it, like what we
182 had for variable 'b'. The least significant 3 bytes being 00
183 means that the 3 bytes of variable 'a' are addressable.
185 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
186 The F3 byte pattern is a magic number called
187 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
188 region for this shadow byte is a RIGHT red zone intended to seat
189 at the top of the variables of the stack.
191 Note that the real variable layout is done in expand_used_vars in
192 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
193 stack variables as well as the different red zones, emits some
194 prologue code to populate the shadow memory as to poison (mark as
195 non-accessible) the regions of the red zones and mark the regions of
196 stack variables as accessible, and emit some epilogue code to
197 un-poison (mark as accessible) the regions of red zones right before
198 the function exits.
200 [Protection of global variables]
202 The basic idea is to insert a red zone between two global variables
203 and install a constructor function that calls the asan runtime to do
204 the populating of the relevant shadow memory regions at load time.
206 So the global variables are laid out as to insert a red zone between
207 them. The size of the red zones is so that each variable starts on a
208 32 bytes boundary.
210 Then a constructor function is installed so that, for each global
211 variable, it calls the runtime asan library function
212 __asan_register_globals_with an instance of this type:
214 struct __asan_global
216 // Address of the beginning of the global variable.
217 const void *__beg;
219 // Initial size of the global variable.
220 uptr __size;
222 // Size of the global variable + size of the red zone. This
223 // size is 32 bytes aligned.
224 uptr __size_with_redzone;
226 // Name of the global variable.
227 const void *__name;
229 // Name of the module where the global variable is declared.
230 const void *__module_name;
232 // 1 if it has dynamic initialization, 0 otherwise.
233 uptr __has_dynamic_init;
235 // A pointer to struct that contains source location, could be NULL.
236 __asan_global_source_location *__location;
239 A destructor function that calls the runtime asan library function
240 _asan_unregister_globals is also installed. */
242 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
243 static bool asan_shadow_offset_computed;
244 static vec<char *> sanitized_sections;
246 /* Sets shadow offset to value in string VAL. */
248 bool
249 set_asan_shadow_offset (const char *val)
251 char *endp;
253 errno = 0;
254 #ifdef HAVE_LONG_LONG
255 asan_shadow_offset_value = strtoull (val, &endp, 0);
256 #else
257 asan_shadow_offset_value = strtoul (val, &endp, 0);
258 #endif
259 if (!(*val != '\0' && *endp == '\0' && errno == 0))
260 return false;
262 asan_shadow_offset_computed = true;
264 return true;
267 /* Set list of user-defined sections that need to be sanitized. */
269 void
270 set_sanitized_sections (const char *sections)
272 char *pat;
273 unsigned i;
274 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
275 free (pat);
276 sanitized_sections.truncate (0);
278 for (const char *s = sections; *s; )
280 const char *end;
281 for (end = s; *end && *end != ','; ++end);
282 size_t len = end - s;
283 sanitized_sections.safe_push (xstrndup (s, len));
284 s = *end ? end + 1 : end;
288 /* Checks whether section SEC should be sanitized. */
290 static bool
291 section_sanitized_p (const char *sec)
293 char *pat;
294 unsigned i;
295 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
296 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
297 return true;
298 return false;
301 /* Returns Asan shadow offset. */
303 static unsigned HOST_WIDE_INT
304 asan_shadow_offset ()
306 if (!asan_shadow_offset_computed)
308 asan_shadow_offset_computed = true;
309 asan_shadow_offset_value = targetm.asan_shadow_offset ();
311 return asan_shadow_offset_value;
314 alias_set_type asan_shadow_set = -1;
316 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
317 alias set is used for all shadow memory accesses. */
318 static GTY(()) tree shadow_ptr_types[2];
320 /* Decl for __asan_option_detect_stack_use_after_return. */
321 static GTY(()) tree asan_detect_stack_use_after_return;
323 /* Various flags for Asan builtins. */
324 enum asan_check_flags
326 ASAN_CHECK_STORE = 1 << 0,
327 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
328 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
329 ASAN_CHECK_LAST = 1 << 3
332 /* Hashtable support for memory references used by gimple
333 statements. */
335 /* This type represents a reference to a memory region. */
336 struct asan_mem_ref
338 /* The expression of the beginning of the memory region. */
339 tree start;
341 /* The size of the access. */
342 HOST_WIDE_INT access_size;
345 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
347 /* Initializes an instance of asan_mem_ref. */
349 static void
350 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
352 ref->start = start;
353 ref->access_size = access_size;
356 /* Allocates memory for an instance of asan_mem_ref into the memory
357 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
358 START is the address of (or the expression pointing to) the
359 beginning of memory reference. ACCESS_SIZE is the size of the
360 access to the referenced memory. */
362 static asan_mem_ref*
363 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
365 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
367 asan_mem_ref_init (ref, start, access_size);
368 return ref;
371 /* This builds and returns a pointer to the end of the memory region
372 that starts at START and of length LEN. */
374 tree
375 asan_mem_ref_get_end (tree start, tree len)
377 if (len == NULL_TREE || integer_zerop (len))
378 return start;
380 if (!ptrofftype_p (len))
381 len = convert_to_ptrofftype (len);
383 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
386 /* Return a tree expression that represents the end of the referenced
387 memory region. Beware that this function can actually build a new
388 tree expression. */
390 tree
391 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
393 return asan_mem_ref_get_end (ref->start, len);
396 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
398 static inline hashval_t hash (const asan_mem_ref *);
399 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
402 /* Hash a memory reference. */
404 inline hashval_t
405 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
407 return iterative_hash_expr (mem_ref->start, 0);
410 /* Compare two memory references. We accept the length of either
411 memory references to be NULL_TREE. */
413 inline bool
414 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
415 const asan_mem_ref *m2)
417 return operand_equal_p (m1->start, m2->start, 0);
420 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
422 /* Returns a reference to the hash table containing memory references.
423 This function ensures that the hash table is created. Note that
424 this hash table is updated by the function
425 update_mem_ref_hash_table. */
427 static hash_table<asan_mem_ref_hasher> *
428 get_mem_ref_hash_table ()
430 if (!asan_mem_ref_ht)
431 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
433 return asan_mem_ref_ht;
436 /* Clear all entries from the memory references hash table. */
438 static void
439 empty_mem_ref_hash_table ()
441 if (asan_mem_ref_ht)
442 asan_mem_ref_ht->empty ();
445 /* Free the memory references hash table. */
447 static void
448 free_mem_ref_resources ()
450 delete asan_mem_ref_ht;
451 asan_mem_ref_ht = NULL;
453 asan_mem_ref_pool.release ();
456 /* Return true iff the memory reference REF has been instrumented. */
458 static bool
459 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
461 asan_mem_ref r;
462 asan_mem_ref_init (&r, ref, access_size);
464 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
465 return saved_ref && saved_ref->access_size >= access_size;
468 /* Return true iff the memory reference REF has been instrumented. */
470 static bool
471 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
473 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
476 /* Return true iff access to memory region starting at REF and of
477 length LEN has been instrumented. */
479 static bool
480 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
482 HOST_WIDE_INT size_in_bytes
483 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
485 return size_in_bytes != -1
486 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
489 /* Set REF to the memory reference present in a gimple assignment
490 ASSIGNMENT. Return true upon successful completion, false
491 otherwise. */
493 static bool
494 get_mem_ref_of_assignment (const gassign *assignment,
495 asan_mem_ref *ref,
496 bool *ref_is_store)
498 gcc_assert (gimple_assign_single_p (assignment));
500 if (gimple_store_p (assignment)
501 && !gimple_clobber_p (assignment))
503 ref->start = gimple_assign_lhs (assignment);
504 *ref_is_store = true;
506 else if (gimple_assign_load_p (assignment))
508 ref->start = gimple_assign_rhs1 (assignment);
509 *ref_is_store = false;
511 else
512 return false;
514 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
515 return true;
518 /* Return the memory references contained in a gimple statement
519 representing a builtin call that has to do with memory access. */
521 static bool
522 get_mem_refs_of_builtin_call (const gcall *call,
523 asan_mem_ref *src0,
524 tree *src0_len,
525 bool *src0_is_store,
526 asan_mem_ref *src1,
527 tree *src1_len,
528 bool *src1_is_store,
529 asan_mem_ref *dst,
530 tree *dst_len,
531 bool *dst_is_store,
532 bool *dest_is_deref,
533 bool *intercepted_p)
535 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
537 tree callee = gimple_call_fndecl (call);
538 tree source0 = NULL_TREE, source1 = NULL_TREE,
539 dest = NULL_TREE, len = NULL_TREE;
540 bool is_store = true, got_reference_p = false;
541 HOST_WIDE_INT access_size = 1;
543 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
545 switch (DECL_FUNCTION_CODE (callee))
547 /* (s, s, n) style memops. */
548 case BUILT_IN_BCMP:
549 case BUILT_IN_MEMCMP:
550 source0 = gimple_call_arg (call, 0);
551 source1 = gimple_call_arg (call, 1);
552 len = gimple_call_arg (call, 2);
553 break;
555 /* (src, dest, n) style memops. */
556 case BUILT_IN_BCOPY:
557 source0 = gimple_call_arg (call, 0);
558 dest = gimple_call_arg (call, 1);
559 len = gimple_call_arg (call, 2);
560 break;
562 /* (dest, src, n) style memops. */
563 case BUILT_IN_MEMCPY:
564 case BUILT_IN_MEMCPY_CHK:
565 case BUILT_IN_MEMMOVE:
566 case BUILT_IN_MEMMOVE_CHK:
567 case BUILT_IN_MEMPCPY:
568 case BUILT_IN_MEMPCPY_CHK:
569 dest = gimple_call_arg (call, 0);
570 source0 = gimple_call_arg (call, 1);
571 len = gimple_call_arg (call, 2);
572 break;
574 /* (dest, n) style memops. */
575 case BUILT_IN_BZERO:
576 dest = gimple_call_arg (call, 0);
577 len = gimple_call_arg (call, 1);
578 break;
580 /* (dest, x, n) style memops*/
581 case BUILT_IN_MEMSET:
582 case BUILT_IN_MEMSET_CHK:
583 dest = gimple_call_arg (call, 0);
584 len = gimple_call_arg (call, 2);
585 break;
587 case BUILT_IN_STRLEN:
588 source0 = gimple_call_arg (call, 0);
589 len = gimple_call_lhs (call);
590 break ;
592 /* And now the __atomic* and __sync builtins.
593 These are handled differently from the classical memory memory
594 access builtins above. */
596 case BUILT_IN_ATOMIC_LOAD_1:
597 case BUILT_IN_ATOMIC_LOAD_2:
598 case BUILT_IN_ATOMIC_LOAD_4:
599 case BUILT_IN_ATOMIC_LOAD_8:
600 case BUILT_IN_ATOMIC_LOAD_16:
601 is_store = false;
602 /* fall through. */
604 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
605 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
606 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
607 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
608 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
610 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
611 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
612 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
613 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
614 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
616 case BUILT_IN_SYNC_FETCH_AND_OR_1:
617 case BUILT_IN_SYNC_FETCH_AND_OR_2:
618 case BUILT_IN_SYNC_FETCH_AND_OR_4:
619 case BUILT_IN_SYNC_FETCH_AND_OR_8:
620 case BUILT_IN_SYNC_FETCH_AND_OR_16:
622 case BUILT_IN_SYNC_FETCH_AND_AND_1:
623 case BUILT_IN_SYNC_FETCH_AND_AND_2:
624 case BUILT_IN_SYNC_FETCH_AND_AND_4:
625 case BUILT_IN_SYNC_FETCH_AND_AND_8:
626 case BUILT_IN_SYNC_FETCH_AND_AND_16:
628 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
629 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
630 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
631 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
632 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
634 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
635 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
636 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
637 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
639 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
640 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
641 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
642 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
643 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
645 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
646 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
647 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
648 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
649 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
651 case BUILT_IN_SYNC_OR_AND_FETCH_1:
652 case BUILT_IN_SYNC_OR_AND_FETCH_2:
653 case BUILT_IN_SYNC_OR_AND_FETCH_4:
654 case BUILT_IN_SYNC_OR_AND_FETCH_8:
655 case BUILT_IN_SYNC_OR_AND_FETCH_16:
657 case BUILT_IN_SYNC_AND_AND_FETCH_1:
658 case BUILT_IN_SYNC_AND_AND_FETCH_2:
659 case BUILT_IN_SYNC_AND_AND_FETCH_4:
660 case BUILT_IN_SYNC_AND_AND_FETCH_8:
661 case BUILT_IN_SYNC_AND_AND_FETCH_16:
663 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
664 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
665 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
666 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
667 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
669 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
670 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
671 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
672 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
674 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
675 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
676 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
677 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
678 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
681 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
682 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
683 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
684 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
686 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
687 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
688 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
692 case BUILT_IN_SYNC_LOCK_RELEASE_1:
693 case BUILT_IN_SYNC_LOCK_RELEASE_2:
694 case BUILT_IN_SYNC_LOCK_RELEASE_4:
695 case BUILT_IN_SYNC_LOCK_RELEASE_8:
696 case BUILT_IN_SYNC_LOCK_RELEASE_16:
698 case BUILT_IN_ATOMIC_EXCHANGE_1:
699 case BUILT_IN_ATOMIC_EXCHANGE_2:
700 case BUILT_IN_ATOMIC_EXCHANGE_4:
701 case BUILT_IN_ATOMIC_EXCHANGE_8:
702 case BUILT_IN_ATOMIC_EXCHANGE_16:
704 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
705 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
710 case BUILT_IN_ATOMIC_STORE_1:
711 case BUILT_IN_ATOMIC_STORE_2:
712 case BUILT_IN_ATOMIC_STORE_4:
713 case BUILT_IN_ATOMIC_STORE_8:
714 case BUILT_IN_ATOMIC_STORE_16:
716 case BUILT_IN_ATOMIC_ADD_FETCH_1:
717 case BUILT_IN_ATOMIC_ADD_FETCH_2:
718 case BUILT_IN_ATOMIC_ADD_FETCH_4:
719 case BUILT_IN_ATOMIC_ADD_FETCH_8:
720 case BUILT_IN_ATOMIC_ADD_FETCH_16:
722 case BUILT_IN_ATOMIC_SUB_FETCH_1:
723 case BUILT_IN_ATOMIC_SUB_FETCH_2:
724 case BUILT_IN_ATOMIC_SUB_FETCH_4:
725 case BUILT_IN_ATOMIC_SUB_FETCH_8:
726 case BUILT_IN_ATOMIC_SUB_FETCH_16:
728 case BUILT_IN_ATOMIC_AND_FETCH_1:
729 case BUILT_IN_ATOMIC_AND_FETCH_2:
730 case BUILT_IN_ATOMIC_AND_FETCH_4:
731 case BUILT_IN_ATOMIC_AND_FETCH_8:
732 case BUILT_IN_ATOMIC_AND_FETCH_16:
734 case BUILT_IN_ATOMIC_NAND_FETCH_1:
735 case BUILT_IN_ATOMIC_NAND_FETCH_2:
736 case BUILT_IN_ATOMIC_NAND_FETCH_4:
737 case BUILT_IN_ATOMIC_NAND_FETCH_8:
738 case BUILT_IN_ATOMIC_NAND_FETCH_16:
740 case BUILT_IN_ATOMIC_XOR_FETCH_1:
741 case BUILT_IN_ATOMIC_XOR_FETCH_2:
742 case BUILT_IN_ATOMIC_XOR_FETCH_4:
743 case BUILT_IN_ATOMIC_XOR_FETCH_8:
744 case BUILT_IN_ATOMIC_XOR_FETCH_16:
746 case BUILT_IN_ATOMIC_OR_FETCH_1:
747 case BUILT_IN_ATOMIC_OR_FETCH_2:
748 case BUILT_IN_ATOMIC_OR_FETCH_4:
749 case BUILT_IN_ATOMIC_OR_FETCH_8:
750 case BUILT_IN_ATOMIC_OR_FETCH_16:
752 case BUILT_IN_ATOMIC_FETCH_ADD_1:
753 case BUILT_IN_ATOMIC_FETCH_ADD_2:
754 case BUILT_IN_ATOMIC_FETCH_ADD_4:
755 case BUILT_IN_ATOMIC_FETCH_ADD_8:
756 case BUILT_IN_ATOMIC_FETCH_ADD_16:
758 case BUILT_IN_ATOMIC_FETCH_SUB_1:
759 case BUILT_IN_ATOMIC_FETCH_SUB_2:
760 case BUILT_IN_ATOMIC_FETCH_SUB_4:
761 case BUILT_IN_ATOMIC_FETCH_SUB_8:
762 case BUILT_IN_ATOMIC_FETCH_SUB_16:
764 case BUILT_IN_ATOMIC_FETCH_AND_1:
765 case BUILT_IN_ATOMIC_FETCH_AND_2:
766 case BUILT_IN_ATOMIC_FETCH_AND_4:
767 case BUILT_IN_ATOMIC_FETCH_AND_8:
768 case BUILT_IN_ATOMIC_FETCH_AND_16:
770 case BUILT_IN_ATOMIC_FETCH_NAND_1:
771 case BUILT_IN_ATOMIC_FETCH_NAND_2:
772 case BUILT_IN_ATOMIC_FETCH_NAND_4:
773 case BUILT_IN_ATOMIC_FETCH_NAND_8:
774 case BUILT_IN_ATOMIC_FETCH_NAND_16:
776 case BUILT_IN_ATOMIC_FETCH_XOR_1:
777 case BUILT_IN_ATOMIC_FETCH_XOR_2:
778 case BUILT_IN_ATOMIC_FETCH_XOR_4:
779 case BUILT_IN_ATOMIC_FETCH_XOR_8:
780 case BUILT_IN_ATOMIC_FETCH_XOR_16:
782 case BUILT_IN_ATOMIC_FETCH_OR_1:
783 case BUILT_IN_ATOMIC_FETCH_OR_2:
784 case BUILT_IN_ATOMIC_FETCH_OR_4:
785 case BUILT_IN_ATOMIC_FETCH_OR_8:
786 case BUILT_IN_ATOMIC_FETCH_OR_16:
788 dest = gimple_call_arg (call, 0);
789 /* DEST represents the address of a memory location.
790 instrument_derefs wants the memory location, so lets
791 dereference the address DEST before handing it to
792 instrument_derefs. */
793 if (TREE_CODE (dest) == ADDR_EXPR)
794 dest = TREE_OPERAND (dest, 0);
795 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
796 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
797 dest, build_int_cst (TREE_TYPE (dest), 0));
798 else
799 gcc_unreachable ();
801 access_size = int_size_in_bytes (TREE_TYPE (dest));
804 default:
805 /* The other builtins memory access are not instrumented in this
806 function because they either don't have any length parameter,
807 or their length parameter is just a limit. */
808 break;
811 if (len != NULL_TREE)
813 if (source0 != NULL_TREE)
815 src0->start = source0;
816 src0->access_size = access_size;
817 *src0_len = len;
818 *src0_is_store = false;
821 if (source1 != NULL_TREE)
823 src1->start = source1;
824 src1->access_size = access_size;
825 *src1_len = len;
826 *src1_is_store = false;
829 if (dest != NULL_TREE)
831 dst->start = dest;
832 dst->access_size = access_size;
833 *dst_len = len;
834 *dst_is_store = true;
837 got_reference_p = true;
839 else if (dest)
841 dst->start = dest;
842 dst->access_size = access_size;
843 *dst_len = NULL_TREE;
844 *dst_is_store = is_store;
845 *dest_is_deref = true;
846 got_reference_p = true;
849 return got_reference_p;
852 /* Return true iff a given gimple statement has been instrumented.
853 Note that the statement is "defined" by the memory references it
854 contains. */
856 static bool
857 has_stmt_been_instrumented_p (gimple *stmt)
859 if (gimple_assign_single_p (stmt))
861 bool r_is_store;
862 asan_mem_ref r;
863 asan_mem_ref_init (&r, NULL, 1);
865 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
866 &r_is_store))
867 return has_mem_ref_been_instrumented (&r);
869 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
871 asan_mem_ref src0, src1, dest;
872 asan_mem_ref_init (&src0, NULL, 1);
873 asan_mem_ref_init (&src1, NULL, 1);
874 asan_mem_ref_init (&dest, NULL, 1);
876 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
877 bool src0_is_store = false, src1_is_store = false,
878 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
879 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
880 &src0, &src0_len, &src0_is_store,
881 &src1, &src1_len, &src1_is_store,
882 &dest, &dest_len, &dest_is_store,
883 &dest_is_deref, &intercepted_p))
885 if (src0.start != NULL_TREE
886 && !has_mem_ref_been_instrumented (&src0, src0_len))
887 return false;
889 if (src1.start != NULL_TREE
890 && !has_mem_ref_been_instrumented (&src1, src1_len))
891 return false;
893 if (dest.start != NULL_TREE
894 && !has_mem_ref_been_instrumented (&dest, dest_len))
895 return false;
897 return true;
900 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
902 asan_mem_ref r;
903 asan_mem_ref_init (&r, NULL, 1);
905 r.start = gimple_call_lhs (stmt);
906 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
907 return has_mem_ref_been_instrumented (&r);
910 return false;
913 /* Insert a memory reference into the hash table. */
915 static void
916 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
918 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
920 asan_mem_ref r;
921 asan_mem_ref_init (&r, ref, access_size);
923 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
924 if (*slot == NULL || (*slot)->access_size < access_size)
925 *slot = asan_mem_ref_new (ref, access_size);
928 /* Initialize shadow_ptr_types array. */
930 static void
931 asan_init_shadow_ptr_types (void)
933 asan_shadow_set = new_alias_set ();
934 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
935 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
936 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
937 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
938 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
939 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
940 initialize_sanitizer_builtins ();
943 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
945 static tree
946 asan_pp_string (pretty_printer *pp)
948 const char *buf = pp_formatted_text (pp);
949 size_t len = strlen (buf);
950 tree ret = build_string (len + 1, buf);
951 TREE_TYPE (ret)
952 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
953 build_index_type (size_int (len)));
954 TREE_READONLY (ret) = 1;
955 TREE_STATIC (ret) = 1;
956 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
959 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
961 static rtx
962 asan_shadow_cst (unsigned char shadow_bytes[4])
964 int i;
965 unsigned HOST_WIDE_INT val = 0;
966 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
967 for (i = 0; i < 4; i++)
968 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
969 << (BITS_PER_UNIT * i);
970 return gen_int_mode (val, SImode);
973 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
974 though. */
976 static void
977 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
979 rtx_insn *insn, *insns, *jump;
980 rtx_code_label *top_label;
981 rtx end, addr, tmp;
983 start_sequence ();
984 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
985 insns = get_insns ();
986 end_sequence ();
987 for (insn = insns; insn; insn = NEXT_INSN (insn))
988 if (CALL_P (insn))
989 break;
990 if (insn == NULL_RTX)
992 emit_insn (insns);
993 return;
996 gcc_assert ((len & 3) == 0);
997 top_label = gen_label_rtx ();
998 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
999 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1000 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1001 emit_label (top_label);
1003 emit_move_insn (shadow_mem, const0_rtx);
1004 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1005 true, OPTAB_LIB_WIDEN);
1006 if (tmp != addr)
1007 emit_move_insn (addr, tmp);
1008 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1009 jump = get_last_insn ();
1010 gcc_assert (JUMP_P (jump));
1011 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1014 void
1015 asan_function_start (void)
1017 section *fnsec = function_section (current_function_decl);
1018 switch_to_section (fnsec);
1019 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1020 current_function_funcdef_no);
1023 /* Insert code to protect stack vars. The prologue sequence should be emitted
1024 directly, epilogue sequence returned. BASE is the register holding the
1025 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1026 array contains pairs of offsets in reverse order, always the end offset
1027 of some gap that needs protection followed by starting offset,
1028 and DECLS is an array of representative decls for each var partition.
1029 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1030 elements long (OFFSETS include gap before the first variable as well
1031 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1032 register which stack vars DECL_RTLs are based on. Either BASE should be
1033 assigned to PBASE, when not doing use after return protection, or
1034 corresponding address based on __asan_stack_malloc* return value. */
1036 rtx_insn *
1037 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1038 HOST_WIDE_INT *offsets, tree *decls, int length)
1040 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1041 rtx_code_label *lab;
1042 rtx_insn *insns;
1043 char buf[30];
1044 unsigned char shadow_bytes[4];
1045 HOST_WIDE_INT base_offset = offsets[length - 1];
1046 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1047 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1048 HOST_WIDE_INT last_offset, last_size;
1049 int l;
1050 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1051 tree str_cst, decl, id;
1052 int use_after_return_class = -1;
1054 if (shadow_ptr_types[0] == NULL_TREE)
1055 asan_init_shadow_ptr_types ();
1057 /* First of all, prepare the description string. */
1058 pretty_printer asan_pp;
1060 pp_decimal_int (&asan_pp, length / 2 - 1);
1061 pp_space (&asan_pp);
1062 for (l = length - 2; l; l -= 2)
1064 tree decl = decls[l / 2 - 1];
1065 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1066 pp_space (&asan_pp);
1067 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1068 pp_space (&asan_pp);
1069 if (DECL_P (decl) && DECL_NAME (decl))
1071 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1072 pp_space (&asan_pp);
1073 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1075 else
1076 pp_string (&asan_pp, "9 <unknown>");
1077 pp_space (&asan_pp);
1079 str_cst = asan_pp_string (&asan_pp);
1081 /* Emit the prologue sequence. */
1082 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1083 && ASAN_USE_AFTER_RETURN)
1085 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1086 /* __asan_stack_malloc_N guarantees alignment
1087 N < 6 ? (64 << N) : 4096 bytes. */
1088 if (alignb > (use_after_return_class < 6
1089 ? (64U << use_after_return_class) : 4096U))
1090 use_after_return_class = -1;
1091 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1092 base_align_bias = ((asan_frame_size + alignb - 1)
1093 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1095 /* Align base if target is STRICT_ALIGNMENT. */
1096 if (STRICT_ALIGNMENT)
1097 base = expand_binop (Pmode, and_optab, base,
1098 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1099 << ASAN_SHADOW_SHIFT)
1100 / BITS_PER_UNIT), Pmode), NULL_RTX,
1101 1, OPTAB_DIRECT);
1103 if (use_after_return_class == -1 && pbase)
1104 emit_move_insn (pbase, base);
1106 base = expand_binop (Pmode, add_optab, base,
1107 gen_int_mode (base_offset - base_align_bias, Pmode),
1108 NULL_RTX, 1, OPTAB_DIRECT);
1109 orig_base = NULL_RTX;
1110 if (use_after_return_class != -1)
1112 if (asan_detect_stack_use_after_return == NULL_TREE)
1114 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1115 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1116 integer_type_node);
1117 SET_DECL_ASSEMBLER_NAME (decl, id);
1118 TREE_ADDRESSABLE (decl) = 1;
1119 DECL_ARTIFICIAL (decl) = 1;
1120 DECL_IGNORED_P (decl) = 1;
1121 DECL_EXTERNAL (decl) = 1;
1122 TREE_STATIC (decl) = 1;
1123 TREE_PUBLIC (decl) = 1;
1124 TREE_USED (decl) = 1;
1125 asan_detect_stack_use_after_return = decl;
1127 orig_base = gen_reg_rtx (Pmode);
1128 emit_move_insn (orig_base, base);
1129 ret = expand_normal (asan_detect_stack_use_after_return);
1130 lab = gen_label_rtx ();
1131 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1132 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1133 VOIDmode, 0, lab, very_likely);
1134 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1135 use_after_return_class);
1136 ret = init_one_libfunc (buf);
1137 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1138 GEN_INT (asan_frame_size
1139 + base_align_bias),
1140 TYPE_MODE (pointer_sized_int_node));
1141 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1142 and NULL otherwise. Check RET value is NULL here and jump over the
1143 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1144 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1145 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1146 VOIDmode, 0, lab, very_unlikely);
1147 ret = convert_memory_address (Pmode, ret);
1148 emit_move_insn (base, ret);
1149 emit_label (lab);
1150 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1151 gen_int_mode (base_align_bias
1152 - base_offset, Pmode),
1153 NULL_RTX, 1, OPTAB_DIRECT));
1155 mem = gen_rtx_MEM (ptr_mode, base);
1156 mem = adjust_address (mem, VOIDmode, base_align_bias);
1157 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1158 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1159 emit_move_insn (mem, expand_normal (str_cst));
1160 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1161 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1162 id = get_identifier (buf);
1163 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1164 VAR_DECL, id, char_type_node);
1165 SET_DECL_ASSEMBLER_NAME (decl, id);
1166 TREE_ADDRESSABLE (decl) = 1;
1167 TREE_READONLY (decl) = 1;
1168 DECL_ARTIFICIAL (decl) = 1;
1169 DECL_IGNORED_P (decl) = 1;
1170 TREE_STATIC (decl) = 1;
1171 TREE_PUBLIC (decl) = 0;
1172 TREE_USED (decl) = 1;
1173 DECL_INITIAL (decl) = decl;
1174 TREE_ASM_WRITTEN (decl) = 1;
1175 TREE_ASM_WRITTEN (id) = 1;
1176 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1177 shadow_base = expand_binop (Pmode, lshr_optab, base,
1178 GEN_INT (ASAN_SHADOW_SHIFT),
1179 NULL_RTX, 1, OPTAB_DIRECT);
1180 shadow_base
1181 = plus_constant (Pmode, shadow_base,
1182 asan_shadow_offset ()
1183 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1184 gcc_assert (asan_shadow_set != -1
1185 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1186 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1187 set_mem_alias_set (shadow_mem, asan_shadow_set);
1188 if (STRICT_ALIGNMENT)
1189 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1190 prev_offset = base_offset;
1191 for (l = length; l; l -= 2)
1193 if (l == 2)
1194 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1195 offset = offsets[l - 1];
1196 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1198 int i;
1199 HOST_WIDE_INT aoff
1200 = base_offset + ((offset - base_offset)
1201 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1202 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1203 (aoff - prev_offset)
1204 >> ASAN_SHADOW_SHIFT);
1205 prev_offset = aoff;
1206 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1207 if (aoff < offset)
1209 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1210 shadow_bytes[i] = 0;
1211 else
1212 shadow_bytes[i] = offset - aoff;
1214 else
1215 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1216 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1217 offset = aoff;
1219 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1221 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1222 (offset - prev_offset)
1223 >> ASAN_SHADOW_SHIFT);
1224 prev_offset = offset;
1225 memset (shadow_bytes, cur_shadow_byte, 4);
1226 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1227 offset += ASAN_RED_ZONE_SIZE;
1229 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1231 do_pending_stack_adjust ();
1233 /* Construct epilogue sequence. */
1234 start_sequence ();
1236 lab = NULL;
1237 if (use_after_return_class != -1)
1239 rtx_code_label *lab2 = gen_label_rtx ();
1240 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1241 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1242 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1243 VOIDmode, 0, lab2, very_likely);
1244 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1245 set_mem_alias_set (shadow_mem, asan_shadow_set);
1246 mem = gen_rtx_MEM (ptr_mode, base);
1247 mem = adjust_address (mem, VOIDmode, base_align_bias);
1248 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1249 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1250 if (use_after_return_class < 5
1251 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1252 BITS_PER_UNIT, true))
1253 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1254 BITS_PER_UNIT, true, 0);
1255 else if (use_after_return_class >= 5
1256 || !set_storage_via_setmem (shadow_mem,
1257 GEN_INT (sz),
1258 gen_int_mode (c, QImode),
1259 BITS_PER_UNIT, BITS_PER_UNIT,
1260 -1, sz, sz, sz))
1262 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1263 use_after_return_class);
1264 ret = init_one_libfunc (buf);
1265 rtx addr = convert_memory_address (ptr_mode, base);
1266 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1267 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1268 GEN_INT (asan_frame_size + base_align_bias),
1269 TYPE_MODE (pointer_sized_int_node),
1270 orig_addr, ptr_mode);
1272 lab = gen_label_rtx ();
1273 emit_jump (lab);
1274 emit_label (lab2);
1277 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1278 set_mem_alias_set (shadow_mem, asan_shadow_set);
1280 if (STRICT_ALIGNMENT)
1281 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1283 prev_offset = base_offset;
1284 last_offset = base_offset;
1285 last_size = 0;
1286 for (l = length; l; l -= 2)
1288 offset = base_offset + ((offsets[l - 1] - base_offset)
1289 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1290 if (last_offset + last_size != offset)
1292 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1293 (last_offset - prev_offset)
1294 >> ASAN_SHADOW_SHIFT);
1295 prev_offset = last_offset;
1296 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1297 last_offset = offset;
1298 last_size = 0;
1300 last_size += base_offset + ((offsets[l - 2] - base_offset)
1301 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1302 - offset;
1304 if (last_size)
1306 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1307 (last_offset - prev_offset)
1308 >> ASAN_SHADOW_SHIFT);
1309 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1312 do_pending_stack_adjust ();
1313 if (lab)
1314 emit_label (lab);
1316 insns = get_insns ();
1317 end_sequence ();
1318 return insns;
1321 /* Return true if DECL, a global var, might be overridden and needs
1322 therefore a local alias. */
1324 static bool
1325 asan_needs_local_alias (tree decl)
1327 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1330 /* Return true if DECL is a VAR_DECL that should be protected
1331 by Address Sanitizer, by appending a red zone with protected
1332 shadow memory after it and aligning it to at least
1333 ASAN_RED_ZONE_SIZE bytes. */
1335 bool
1336 asan_protect_global (tree decl)
1338 if (!ASAN_GLOBALS)
1339 return false;
1341 rtx rtl, symbol;
1343 if (TREE_CODE (decl) == STRING_CST)
1345 /* Instrument all STRING_CSTs except those created
1346 by asan_pp_string here. */
1347 if (shadow_ptr_types[0] != NULL_TREE
1348 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1349 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1350 return false;
1351 return true;
1353 if (TREE_CODE (decl) != VAR_DECL
1354 /* TLS vars aren't statically protectable. */
1355 || DECL_THREAD_LOCAL_P (decl)
1356 /* Externs will be protected elsewhere. */
1357 || DECL_EXTERNAL (decl)
1358 || !DECL_RTL_SET_P (decl)
1359 /* Comdat vars pose an ABI problem, we can't know if
1360 the var that is selected by the linker will have
1361 padding or not. */
1362 || DECL_ONE_ONLY (decl)
1363 /* Similarly for common vars. People can use -fno-common.
1364 Note: Linux kernel is built with -fno-common, so we do instrument
1365 globals there even if it is C. */
1366 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1367 /* Don't protect if using user section, often vars placed
1368 into user section from multiple TUs are then assumed
1369 to be an array of such vars, putting padding in there
1370 breaks this assumption. */
1371 || (DECL_SECTION_NAME (decl) != NULL
1372 && !symtab_node::get (decl)->implicit_section
1373 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1374 || DECL_SIZE (decl) == 0
1375 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1376 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1377 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1378 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1379 return false;
1381 rtl = DECL_RTL (decl);
1382 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1383 return false;
1384 symbol = XEXP (rtl, 0);
1386 if (CONSTANT_POOL_ADDRESS_P (symbol)
1387 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1388 return false;
1390 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1391 return false;
1393 #ifndef ASM_OUTPUT_DEF
1394 if (asan_needs_local_alias (decl))
1395 return false;
1396 #endif
1398 return true;
1401 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1402 IS_STORE is either 1 (for a store) or 0 (for a load). */
1404 static tree
1405 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1406 int *nargs)
1408 static enum built_in_function report[2][2][6]
1409 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1410 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1411 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1412 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1413 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1414 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1415 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1416 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1417 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1418 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1419 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1420 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1421 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1422 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1423 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1424 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1425 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1426 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1427 if (size_in_bytes == -1)
1429 *nargs = 2;
1430 return builtin_decl_implicit (report[recover_p][is_store][5]);
1432 *nargs = 1;
1433 int size_log2 = exact_log2 (size_in_bytes);
1434 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1437 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1438 IS_STORE is either 1 (for a store) or 0 (for a load). */
1440 static tree
1441 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1442 int *nargs)
1444 static enum built_in_function check[2][2][6]
1445 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1446 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1447 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1448 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1449 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1450 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1451 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1452 BUILT_IN_ASAN_LOAD2_NOABORT,
1453 BUILT_IN_ASAN_LOAD4_NOABORT,
1454 BUILT_IN_ASAN_LOAD8_NOABORT,
1455 BUILT_IN_ASAN_LOAD16_NOABORT,
1456 BUILT_IN_ASAN_LOADN_NOABORT },
1457 { BUILT_IN_ASAN_STORE1_NOABORT,
1458 BUILT_IN_ASAN_STORE2_NOABORT,
1459 BUILT_IN_ASAN_STORE4_NOABORT,
1460 BUILT_IN_ASAN_STORE8_NOABORT,
1461 BUILT_IN_ASAN_STORE16_NOABORT,
1462 BUILT_IN_ASAN_STOREN_NOABORT } } };
1463 if (size_in_bytes == -1)
1465 *nargs = 2;
1466 return builtin_decl_implicit (check[recover_p][is_store][5]);
1468 *nargs = 1;
1469 int size_log2 = exact_log2 (size_in_bytes);
1470 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1473 /* Split the current basic block and create a condition statement
1474 insertion point right before or after the statement pointed to by
1475 ITER. Return an iterator to the point at which the caller might
1476 safely insert the condition statement.
1478 THEN_BLOCK must be set to the address of an uninitialized instance
1479 of basic_block. The function will then set *THEN_BLOCK to the
1480 'then block' of the condition statement to be inserted by the
1481 caller.
1483 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1484 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1486 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1487 block' of the condition statement to be inserted by the caller.
1489 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1490 statements starting from *ITER, and *THEN_BLOCK is a new empty
1491 block.
1493 *ITER is adjusted to point to always point to the first statement
1494 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1495 same as what ITER was pointing to prior to calling this function,
1496 if BEFORE_P is true; otherwise, it is its following statement. */
1498 gimple_stmt_iterator
1499 create_cond_insert_point (gimple_stmt_iterator *iter,
1500 bool before_p,
1501 bool then_more_likely_p,
1502 bool create_then_fallthru_edge,
1503 basic_block *then_block,
1504 basic_block *fallthrough_block)
1506 gimple_stmt_iterator gsi = *iter;
1508 if (!gsi_end_p (gsi) && before_p)
1509 gsi_prev (&gsi);
1511 basic_block cur_bb = gsi_bb (*iter);
1513 edge e = split_block (cur_bb, gsi_stmt (gsi));
1515 /* Get a hold on the 'condition block', the 'then block' and the
1516 'else block'. */
1517 basic_block cond_bb = e->src;
1518 basic_block fallthru_bb = e->dest;
1519 basic_block then_bb = create_empty_bb (cond_bb);
1520 if (current_loops)
1522 add_bb_to_loop (then_bb, cond_bb->loop_father);
1523 loops_state_set (LOOPS_NEED_FIXUP);
1526 /* Set up the newly created 'then block'. */
1527 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1528 int fallthrough_probability
1529 = then_more_likely_p
1530 ? PROB_VERY_UNLIKELY
1531 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1532 e->probability = PROB_ALWAYS - fallthrough_probability;
1533 if (create_then_fallthru_edge)
1534 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1536 /* Set up the fallthrough basic block. */
1537 e = find_edge (cond_bb, fallthru_bb);
1538 e->flags = EDGE_FALSE_VALUE;
1539 e->count = cond_bb->count;
1540 e->probability = fallthrough_probability;
1542 /* Update dominance info for the newly created then_bb; note that
1543 fallthru_bb's dominance info has already been updated by
1544 split_bock. */
1545 if (dom_info_available_p (CDI_DOMINATORS))
1546 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1548 *then_block = then_bb;
1549 *fallthrough_block = fallthru_bb;
1550 *iter = gsi_start_bb (fallthru_bb);
1552 return gsi_last_bb (cond_bb);
1555 /* Insert an if condition followed by a 'then block' right before the
1556 statement pointed to by ITER. The fallthrough block -- which is the
1557 else block of the condition as well as the destination of the
1558 outcoming edge of the 'then block' -- starts with the statement
1559 pointed to by ITER.
1561 COND is the condition of the if.
1563 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1564 'then block' is higher than the probability of the edge to the
1565 fallthrough block.
1567 Upon completion of the function, *THEN_BB is set to the newly
1568 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1569 fallthrough block.
1571 *ITER is adjusted to still point to the same statement it was
1572 pointing to initially. */
1574 static void
1575 insert_if_then_before_iter (gcond *cond,
1576 gimple_stmt_iterator *iter,
1577 bool then_more_likely_p,
1578 basic_block *then_bb,
1579 basic_block *fallthrough_bb)
1581 gimple_stmt_iterator cond_insert_point =
1582 create_cond_insert_point (iter,
1583 /*before_p=*/true,
1584 then_more_likely_p,
1585 /*create_then_fallthru_edge=*/true,
1586 then_bb,
1587 fallthrough_bb);
1588 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1591 /* Build
1592 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1594 static tree
1595 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1596 tree base_addr, tree shadow_ptr_type)
1598 tree t, uintptr_type = TREE_TYPE (base_addr);
1599 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1600 gimple *g;
1602 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1603 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1604 base_addr, t);
1605 gimple_set_location (g, location);
1606 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1608 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1609 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1610 gimple_assign_lhs (g), t);
1611 gimple_set_location (g, location);
1612 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1614 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1615 gimple_assign_lhs (g));
1616 gimple_set_location (g, location);
1617 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1619 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1620 build_int_cst (shadow_ptr_type, 0));
1621 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1622 gimple_set_location (g, location);
1623 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1624 return gimple_assign_lhs (g);
1627 /* BASE can already be an SSA_NAME; in that case, do not create a
1628 new SSA_NAME for it. */
1630 static tree
1631 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1632 bool before_p)
1634 if (TREE_CODE (base) == SSA_NAME)
1635 return base;
1636 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1637 TREE_CODE (base), base);
1638 gimple_set_location (g, loc);
1639 if (before_p)
1640 gsi_insert_before (iter, g, GSI_SAME_STMT);
1641 else
1642 gsi_insert_after (iter, g, GSI_NEW_STMT);
1643 return gimple_assign_lhs (g);
1646 /* LEN can already have necessary size and precision;
1647 in that case, do not create a new variable. */
1649 tree
1650 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1651 bool before_p)
1653 if (ptrofftype_p (len))
1654 return len;
1655 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1656 NOP_EXPR, len);
1657 gimple_set_location (g, loc);
1658 if (before_p)
1659 gsi_insert_before (iter, g, GSI_SAME_STMT);
1660 else
1661 gsi_insert_after (iter, g, GSI_NEW_STMT);
1662 return gimple_assign_lhs (g);
1665 /* Instrument the memory access instruction BASE. Insert new
1666 statements before or after ITER.
1668 Note that the memory access represented by BASE can be either an
1669 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1670 location. IS_STORE is TRUE for a store, FALSE for a load.
1671 BEFORE_P is TRUE for inserting the instrumentation code before
1672 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1673 for a scalar memory access and FALSE for memory region access.
1674 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1675 length. ALIGN tells alignment of accessed memory object.
1677 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1678 memory region have already been instrumented.
1680 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1681 statement it was pointing to prior to calling this function,
1682 otherwise, it points to the statement logically following it. */
1684 static void
1685 build_check_stmt (location_t loc, tree base, tree len,
1686 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1687 bool is_non_zero_len, bool before_p, bool is_store,
1688 bool is_scalar_access, unsigned int align = 0)
1690 gimple_stmt_iterator gsi = *iter;
1691 gimple *g;
1693 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1695 gsi = *iter;
1697 base = unshare_expr (base);
1698 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1700 if (len)
1702 len = unshare_expr (len);
1703 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1705 else
1707 gcc_assert (size_in_bytes != -1);
1708 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1711 if (size_in_bytes > 1)
1713 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1714 || size_in_bytes > 16)
1715 is_scalar_access = false;
1716 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1718 /* On non-strict alignment targets, if
1719 16-byte access is just 8-byte aligned,
1720 this will result in misaligned shadow
1721 memory 2 byte load, but otherwise can
1722 be handled using one read. */
1723 if (size_in_bytes != 16
1724 || STRICT_ALIGNMENT
1725 || align < 8 * BITS_PER_UNIT)
1726 is_scalar_access = false;
1730 HOST_WIDE_INT flags = 0;
1731 if (is_store)
1732 flags |= ASAN_CHECK_STORE;
1733 if (is_non_zero_len)
1734 flags |= ASAN_CHECK_NON_ZERO_LEN;
1735 if (is_scalar_access)
1736 flags |= ASAN_CHECK_SCALAR_ACCESS;
1738 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1739 build_int_cst (integer_type_node, flags),
1740 base, len,
1741 build_int_cst (integer_type_node,
1742 align / BITS_PER_UNIT));
1743 gimple_set_location (g, loc);
1744 if (before_p)
1745 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1746 else
1748 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1749 gsi_next (&gsi);
1750 *iter = gsi;
1754 /* If T represents a memory access, add instrumentation code before ITER.
1755 LOCATION is source code location.
1756 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1758 static void
1759 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1760 location_t location, bool is_store)
1762 if (is_store && !ASAN_INSTRUMENT_WRITES)
1763 return;
1764 if (!is_store && !ASAN_INSTRUMENT_READS)
1765 return;
1767 tree type, base;
1768 HOST_WIDE_INT size_in_bytes;
1770 type = TREE_TYPE (t);
1771 switch (TREE_CODE (t))
1773 case ARRAY_REF:
1774 case COMPONENT_REF:
1775 case INDIRECT_REF:
1776 case MEM_REF:
1777 case VAR_DECL:
1778 case BIT_FIELD_REF:
1779 break;
1780 /* FALLTHRU */
1781 default:
1782 return;
1785 size_in_bytes = int_size_in_bytes (type);
1786 if (size_in_bytes <= 0)
1787 return;
1789 HOST_WIDE_INT bitsize, bitpos;
1790 tree offset;
1791 machine_mode mode;
1792 int unsignedp, reversep, volatilep = 0;
1793 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1794 &unsignedp, &reversep, &volatilep, false);
1796 if (TREE_CODE (t) == COMPONENT_REF
1797 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1799 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1800 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1801 TREE_OPERAND (t, 0), repr,
1802 NULL_TREE), location, is_store);
1803 return;
1806 if (bitpos % BITS_PER_UNIT
1807 || bitsize != size_in_bytes * BITS_PER_UNIT)
1808 return;
1810 if (TREE_CODE (inner) == VAR_DECL
1811 && offset == NULL_TREE
1812 && bitpos >= 0
1813 && DECL_SIZE (inner)
1814 && tree_fits_shwi_p (DECL_SIZE (inner))
1815 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1817 if (DECL_THREAD_LOCAL_P (inner))
1818 return;
1819 if (!ASAN_GLOBALS && is_global_var (inner))
1820 return;
1821 if (!TREE_STATIC (inner))
1823 /* Automatic vars in the current function will be always
1824 accessible. */
1825 if (decl_function_context (inner) == current_function_decl)
1826 return;
1828 /* Always instrument external vars, they might be dynamically
1829 initialized. */
1830 else if (!DECL_EXTERNAL (inner))
1832 /* For static vars if they are known not to be dynamically
1833 initialized, they will be always accessible. */
1834 varpool_node *vnode = varpool_node::get (inner);
1835 if (vnode && !vnode->dynamically_initialized)
1836 return;
1840 base = build_fold_addr_expr (t);
1841 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1843 unsigned int align = get_object_alignment (t);
1844 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1845 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1846 is_store, /*is_scalar_access*/true, align);
1847 update_mem_ref_hash_table (base, size_in_bytes);
1848 update_mem_ref_hash_table (t, size_in_bytes);
1853 /* Insert a memory reference into the hash table if access length
1854 can be determined in compile time. */
1856 static void
1857 maybe_update_mem_ref_hash_table (tree base, tree len)
1859 if (!POINTER_TYPE_P (TREE_TYPE (base))
1860 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1861 return;
1863 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1865 if (size_in_bytes != -1)
1866 update_mem_ref_hash_table (base, size_in_bytes);
1869 /* Instrument an access to a contiguous memory region that starts at
1870 the address pointed to by BASE, over a length of LEN (expressed in
1871 the sizeof (*BASE) bytes). ITER points to the instruction before
1872 which the instrumentation instructions must be inserted. LOCATION
1873 is the source location that the instrumentation instructions must
1874 have. If IS_STORE is true, then the memory access is a store;
1875 otherwise, it's a load. */
1877 static void
1878 instrument_mem_region_access (tree base, tree len,
1879 gimple_stmt_iterator *iter,
1880 location_t location, bool is_store)
1882 if (!POINTER_TYPE_P (TREE_TYPE (base))
1883 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1884 || integer_zerop (len))
1885 return;
1887 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1889 if ((size_in_bytes == -1)
1890 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1892 build_check_stmt (location, base, len, size_in_bytes, iter,
1893 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1894 is_store, /*is_scalar_access*/false, /*align*/0);
1897 maybe_update_mem_ref_hash_table (base, len);
1898 *iter = gsi_for_stmt (gsi_stmt (*iter));
1901 /* Instrument the call to a built-in memory access function that is
1902 pointed to by the iterator ITER.
1904 Upon completion, return TRUE iff *ITER has been advanced to the
1905 statement following the one it was originally pointing to. */
1907 static bool
1908 instrument_builtin_call (gimple_stmt_iterator *iter)
1910 if (!ASAN_MEMINTRIN)
1911 return false;
1913 bool iter_advanced_p = false;
1914 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1916 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1918 location_t loc = gimple_location (call);
1920 asan_mem_ref src0, src1, dest;
1921 asan_mem_ref_init (&src0, NULL, 1);
1922 asan_mem_ref_init (&src1, NULL, 1);
1923 asan_mem_ref_init (&dest, NULL, 1);
1925 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1926 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1927 dest_is_deref = false, intercepted_p = true;
1929 if (get_mem_refs_of_builtin_call (call,
1930 &src0, &src0_len, &src0_is_store,
1931 &src1, &src1_len, &src1_is_store,
1932 &dest, &dest_len, &dest_is_store,
1933 &dest_is_deref, &intercepted_p))
1935 if (dest_is_deref)
1937 instrument_derefs (iter, dest.start, loc, dest_is_store);
1938 gsi_next (iter);
1939 iter_advanced_p = true;
1941 else if (!intercepted_p
1942 && (src0_len || src1_len || dest_len))
1944 if (src0.start != NULL_TREE)
1945 instrument_mem_region_access (src0.start, src0_len,
1946 iter, loc, /*is_store=*/false);
1947 if (src1.start != NULL_TREE)
1948 instrument_mem_region_access (src1.start, src1_len,
1949 iter, loc, /*is_store=*/false);
1950 if (dest.start != NULL_TREE)
1951 instrument_mem_region_access (dest.start, dest_len,
1952 iter, loc, /*is_store=*/true);
1954 *iter = gsi_for_stmt (call);
1955 gsi_next (iter);
1956 iter_advanced_p = true;
1958 else
1960 if (src0.start != NULL_TREE)
1961 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1962 if (src1.start != NULL_TREE)
1963 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1964 if (dest.start != NULL_TREE)
1965 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1968 return iter_advanced_p;
1971 /* Instrument the assignment statement ITER if it is subject to
1972 instrumentation. Return TRUE iff instrumentation actually
1973 happened. In that case, the iterator ITER is advanced to the next
1974 logical expression following the one initially pointed to by ITER,
1975 and the relevant memory reference that which access has been
1976 instrumented is added to the memory references hash table. */
1978 static bool
1979 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1981 gimple *s = gsi_stmt (*iter);
1983 gcc_assert (gimple_assign_single_p (s));
1985 tree ref_expr = NULL_TREE;
1986 bool is_store, is_instrumented = false;
1988 if (gimple_store_p (s))
1990 ref_expr = gimple_assign_lhs (s);
1991 is_store = true;
1992 instrument_derefs (iter, ref_expr,
1993 gimple_location (s),
1994 is_store);
1995 is_instrumented = true;
1998 if (gimple_assign_load_p (s))
2000 ref_expr = gimple_assign_rhs1 (s);
2001 is_store = false;
2002 instrument_derefs (iter, ref_expr,
2003 gimple_location (s),
2004 is_store);
2005 is_instrumented = true;
2008 if (is_instrumented)
2009 gsi_next (iter);
2011 return is_instrumented;
2014 /* Instrument the function call pointed to by the iterator ITER, if it
2015 is subject to instrumentation. At the moment, the only function
2016 calls that are instrumented are some built-in functions that access
2017 memory. Look at instrument_builtin_call to learn more.
2019 Upon completion return TRUE iff *ITER was advanced to the statement
2020 following the one it was originally pointing to. */
2022 static bool
2023 maybe_instrument_call (gimple_stmt_iterator *iter)
2025 gimple *stmt = gsi_stmt (*iter);
2026 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2028 if (is_builtin && instrument_builtin_call (iter))
2029 return true;
2031 if (gimple_call_noreturn_p (stmt))
2033 if (is_builtin)
2035 tree callee = gimple_call_fndecl (stmt);
2036 switch (DECL_FUNCTION_CODE (callee))
2038 case BUILT_IN_UNREACHABLE:
2039 case BUILT_IN_TRAP:
2040 /* Don't instrument these. */
2041 return false;
2042 default:
2043 break;
2046 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2047 gimple *g = gimple_build_call (decl, 0);
2048 gimple_set_location (g, gimple_location (stmt));
2049 gsi_insert_before (iter, g, GSI_SAME_STMT);
2052 if (gimple_store_p (stmt))
2054 tree ref_expr = gimple_call_lhs (stmt);
2055 instrument_derefs (iter, ref_expr,
2056 gimple_location (stmt),
2057 /*is_store=*/true);
2059 gsi_next (iter);
2060 return true;
2063 return false;
2066 /* Walk each instruction of all basic block and instrument those that
2067 represent memory references: loads, stores, or function calls.
2068 In a given basic block, this function avoids instrumenting memory
2069 references that have already been instrumented. */
2071 static void
2072 transform_statements (void)
2074 basic_block bb, last_bb = NULL;
2075 gimple_stmt_iterator i;
2076 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2078 FOR_EACH_BB_FN (bb, cfun)
2080 basic_block prev_bb = bb;
2082 if (bb->index >= saved_last_basic_block) continue;
2084 /* Flush the mem ref hash table, if current bb doesn't have
2085 exactly one predecessor, or if that predecessor (skipping
2086 over asan created basic blocks) isn't the last processed
2087 basic block. Thus we effectively flush on extended basic
2088 block boundaries. */
2089 while (single_pred_p (prev_bb))
2091 prev_bb = single_pred (prev_bb);
2092 if (prev_bb->index < saved_last_basic_block)
2093 break;
2095 if (prev_bb != last_bb)
2096 empty_mem_ref_hash_table ();
2097 last_bb = bb;
2099 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2101 gimple *s = gsi_stmt (i);
2103 if (has_stmt_been_instrumented_p (s))
2104 gsi_next (&i);
2105 else if (gimple_assign_single_p (s)
2106 && !gimple_clobber_p (s)
2107 && maybe_instrument_assignment (&i))
2108 /* Nothing to do as maybe_instrument_assignment advanced
2109 the iterator I. */;
2110 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2111 /* Nothing to do as maybe_instrument_call
2112 advanced the iterator I. */;
2113 else
2115 /* No instrumentation happened.
2117 If the current instruction is a function call that
2118 might free something, let's forget about the memory
2119 references that got instrumented. Otherwise we might
2120 miss some instrumentation opportunities. */
2121 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2122 empty_mem_ref_hash_table ();
2124 gsi_next (&i);
2128 free_mem_ref_resources ();
2131 /* Build
2132 __asan_before_dynamic_init (module_name)
2134 __asan_after_dynamic_init ()
2135 call. */
2137 tree
2138 asan_dynamic_init_call (bool after_p)
2140 tree fn = builtin_decl_implicit (after_p
2141 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2142 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2143 tree module_name_cst = NULL_TREE;
2144 if (!after_p)
2146 pretty_printer module_name_pp;
2147 pp_string (&module_name_pp, main_input_filename);
2149 if (shadow_ptr_types[0] == NULL_TREE)
2150 asan_init_shadow_ptr_types ();
2151 module_name_cst = asan_pp_string (&module_name_pp);
2152 module_name_cst = fold_convert (const_ptr_type_node,
2153 module_name_cst);
2156 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2159 /* Build
2160 struct __asan_global
2162 const void *__beg;
2163 uptr __size;
2164 uptr __size_with_redzone;
2165 const void *__name;
2166 const void *__module_name;
2167 uptr __has_dynamic_init;
2168 __asan_global_source_location *__location;
2169 } type. */
2171 static tree
2172 asan_global_struct (void)
2174 static const char *field_names[7]
2175 = { "__beg", "__size", "__size_with_redzone",
2176 "__name", "__module_name", "__has_dynamic_init", "__location"};
2177 tree fields[7], ret;
2178 int i;
2180 ret = make_node (RECORD_TYPE);
2181 for (i = 0; i < 7; i++)
2183 fields[i]
2184 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2185 get_identifier (field_names[i]),
2186 (i == 0 || i == 3) ? const_ptr_type_node
2187 : pointer_sized_int_node);
2188 DECL_CONTEXT (fields[i]) = ret;
2189 if (i)
2190 DECL_CHAIN (fields[i - 1]) = fields[i];
2192 tree type_decl = build_decl (input_location, TYPE_DECL,
2193 get_identifier ("__asan_global"), ret);
2194 DECL_IGNORED_P (type_decl) = 1;
2195 DECL_ARTIFICIAL (type_decl) = 1;
2196 TYPE_FIELDS (ret) = fields[0];
2197 TYPE_NAME (ret) = type_decl;
2198 TYPE_STUB_DECL (ret) = type_decl;
2199 layout_type (ret);
2200 return ret;
2203 /* Append description of a single global DECL into vector V.
2204 TYPE is __asan_global struct type as returned by asan_global_struct. */
2206 static void
2207 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2209 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2210 unsigned HOST_WIDE_INT size;
2211 tree str_cst, module_name_cst, refdecl = decl;
2212 vec<constructor_elt, va_gc> *vinner = NULL;
2214 pretty_printer asan_pp, module_name_pp;
2216 if (DECL_NAME (decl))
2217 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2218 else
2219 pp_string (&asan_pp, "<unknown>");
2220 str_cst = asan_pp_string (&asan_pp);
2222 pp_string (&module_name_pp, main_input_filename);
2223 module_name_cst = asan_pp_string (&module_name_pp);
2225 if (asan_needs_local_alias (decl))
2227 char buf[20];
2228 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2229 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2230 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2231 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2232 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2233 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2234 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2235 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2236 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2237 TREE_STATIC (refdecl) = 1;
2238 TREE_PUBLIC (refdecl) = 0;
2239 TREE_USED (refdecl) = 1;
2240 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2243 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2244 fold_convert (const_ptr_type_node,
2245 build_fold_addr_expr (refdecl)));
2246 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2247 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2248 size += asan_red_zone_size (size);
2249 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2250 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2251 fold_convert (const_ptr_type_node, str_cst));
2252 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2253 fold_convert (const_ptr_type_node, module_name_cst));
2254 varpool_node *vnode = varpool_node::get (decl);
2255 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2256 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2257 build_int_cst (uptr, has_dynamic_init));
2258 tree locptr = NULL_TREE;
2259 location_t loc = DECL_SOURCE_LOCATION (decl);
2260 expanded_location xloc = expand_location (loc);
2261 if (xloc.file != NULL)
2263 static int lasanloccnt = 0;
2264 char buf[25];
2265 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2266 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2267 ubsan_get_source_location_type ());
2268 TREE_STATIC (var) = 1;
2269 TREE_PUBLIC (var) = 0;
2270 DECL_ARTIFICIAL (var) = 1;
2271 DECL_IGNORED_P (var) = 1;
2272 pretty_printer filename_pp;
2273 pp_string (&filename_pp, xloc.file);
2274 tree str = asan_pp_string (&filename_pp);
2275 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2276 NULL_TREE, str, NULL_TREE,
2277 build_int_cst (unsigned_type_node,
2278 xloc.line), NULL_TREE,
2279 build_int_cst (unsigned_type_node,
2280 xloc.column));
2281 TREE_CONSTANT (ctor) = 1;
2282 TREE_STATIC (ctor) = 1;
2283 DECL_INITIAL (var) = ctor;
2284 varpool_node::finalize_decl (var);
2285 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2287 else
2288 locptr = build_int_cst (uptr, 0);
2289 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2290 init = build_constructor (type, vinner);
2291 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2294 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2295 void
2296 initialize_sanitizer_builtins (void)
2298 tree decl;
2300 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2301 return;
2303 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2304 tree BT_FN_VOID_PTR
2305 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2306 tree BT_FN_VOID_CONST_PTR
2307 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2308 tree BT_FN_VOID_PTR_PTR
2309 = build_function_type_list (void_type_node, ptr_type_node,
2310 ptr_type_node, NULL_TREE);
2311 tree BT_FN_VOID_PTR_PTR_PTR
2312 = build_function_type_list (void_type_node, ptr_type_node,
2313 ptr_type_node, ptr_type_node, NULL_TREE);
2314 tree BT_FN_VOID_PTR_PTRMODE
2315 = build_function_type_list (void_type_node, ptr_type_node,
2316 pointer_sized_int_node, NULL_TREE);
2317 tree BT_FN_VOID_INT
2318 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2319 tree BT_FN_SIZE_CONST_PTR_INT
2320 = build_function_type_list (size_type_node, const_ptr_type_node,
2321 integer_type_node, NULL_TREE);
2322 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2323 tree BT_FN_IX_CONST_VPTR_INT[5];
2324 tree BT_FN_IX_VPTR_IX_INT[5];
2325 tree BT_FN_VOID_VPTR_IX_INT[5];
2326 tree vptr
2327 = build_pointer_type (build_qualified_type (void_type_node,
2328 TYPE_QUAL_VOLATILE));
2329 tree cvptr
2330 = build_pointer_type (build_qualified_type (void_type_node,
2331 TYPE_QUAL_VOLATILE
2332 |TYPE_QUAL_CONST));
2333 tree boolt
2334 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2335 int i;
2336 for (i = 0; i < 5; i++)
2338 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2339 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2340 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2341 integer_type_node, integer_type_node,
2342 NULL_TREE);
2343 BT_FN_IX_CONST_VPTR_INT[i]
2344 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2345 BT_FN_IX_VPTR_IX_INT[i]
2346 = build_function_type_list (ix, vptr, ix, integer_type_node,
2347 NULL_TREE);
2348 BT_FN_VOID_VPTR_IX_INT[i]
2349 = build_function_type_list (void_type_node, vptr, ix,
2350 integer_type_node, NULL_TREE);
2352 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2353 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2354 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2355 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2356 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2357 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2358 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2359 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2360 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2361 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2362 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2363 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2364 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2365 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2366 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2367 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2368 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2369 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2370 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2371 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2372 #undef ATTR_NOTHROW_LEAF_LIST
2373 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2374 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2375 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2376 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2377 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2378 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2379 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2380 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2381 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2382 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2383 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2384 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2385 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2386 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2387 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2388 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2389 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2390 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2391 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2392 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2393 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2394 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2395 #undef DEF_BUILTIN_STUB
2396 #define DEF_BUILTIN_STUB(ENUM, NAME)
2397 #undef DEF_SANITIZER_BUILTIN
2398 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2399 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2400 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2401 set_call_expr_flags (decl, ATTRS); \
2402 set_builtin_decl (ENUM, decl, true);
2404 #include "sanitizer.def"
2406 /* -fsanitize=object-size uses __builtin_object_size, but that might
2407 not be available for e.g. Fortran at this point. We use
2408 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2409 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2410 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2411 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2412 BT_FN_SIZE_CONST_PTR_INT,
2413 ATTR_PURE_NOTHROW_LEAF_LIST)
2415 #undef DEF_SANITIZER_BUILTIN
2416 #undef DEF_BUILTIN_STUB
2419 /* Called via htab_traverse. Count number of emitted
2420 STRING_CSTs in the constant hash table. */
2423 count_string_csts (constant_descriptor_tree **slot,
2424 unsigned HOST_WIDE_INT *data)
2426 struct constant_descriptor_tree *desc = *slot;
2427 if (TREE_CODE (desc->value) == STRING_CST
2428 && TREE_ASM_WRITTEN (desc->value)
2429 && asan_protect_global (desc->value))
2430 ++*data;
2431 return 1;
2434 /* Helper structure to pass two parameters to
2435 add_string_csts. */
2437 struct asan_add_string_csts_data
2439 tree type;
2440 vec<constructor_elt, va_gc> *v;
2443 /* Called via hash_table::traverse. Call asan_add_global
2444 on emitted STRING_CSTs from the constant hash table. */
2447 add_string_csts (constant_descriptor_tree **slot,
2448 asan_add_string_csts_data *aascd)
2450 struct constant_descriptor_tree *desc = *slot;
2451 if (TREE_CODE (desc->value) == STRING_CST
2452 && TREE_ASM_WRITTEN (desc->value)
2453 && asan_protect_global (desc->value))
2455 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2456 aascd->type, aascd->v);
2458 return 1;
2461 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2462 invoke ggc_collect. */
2463 static GTY(()) tree asan_ctor_statements;
2465 /* Module-level instrumentation.
2466 - Insert __asan_init_vN() into the list of CTORs.
2467 - TODO: insert redzones around globals.
2470 void
2471 asan_finish_file (void)
2473 varpool_node *vnode;
2474 unsigned HOST_WIDE_INT gcount = 0;
2476 if (shadow_ptr_types[0] == NULL_TREE)
2477 asan_init_shadow_ptr_types ();
2478 /* Avoid instrumenting code in the asan ctors/dtors.
2479 We don't need to insert padding after the description strings,
2480 nor after .LASAN* array. */
2481 flag_sanitize &= ~SANITIZE_ADDRESS;
2483 /* For user-space we want asan constructors to run first.
2484 Linux kernel does not support priorities other than default, and the only
2485 other user of constructors is coverage. So we run with the default
2486 priority. */
2487 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2488 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2490 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2492 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2493 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2494 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2495 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2497 FOR_EACH_DEFINED_VARIABLE (vnode)
2498 if (TREE_ASM_WRITTEN (vnode->decl)
2499 && asan_protect_global (vnode->decl))
2500 ++gcount;
2501 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2502 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2503 (&gcount);
2504 if (gcount)
2506 tree type = asan_global_struct (), var, ctor;
2507 tree dtor_statements = NULL_TREE;
2508 vec<constructor_elt, va_gc> *v;
2509 char buf[20];
2511 type = build_array_type_nelts (type, gcount);
2512 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2513 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2514 type);
2515 TREE_STATIC (var) = 1;
2516 TREE_PUBLIC (var) = 0;
2517 DECL_ARTIFICIAL (var) = 1;
2518 DECL_IGNORED_P (var) = 1;
2519 vec_alloc (v, gcount);
2520 FOR_EACH_DEFINED_VARIABLE (vnode)
2521 if (TREE_ASM_WRITTEN (vnode->decl)
2522 && asan_protect_global (vnode->decl))
2523 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2524 struct asan_add_string_csts_data aascd;
2525 aascd.type = TREE_TYPE (type);
2526 aascd.v = v;
2527 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2528 (&aascd);
2529 ctor = build_constructor (type, v);
2530 TREE_CONSTANT (ctor) = 1;
2531 TREE_STATIC (ctor) = 1;
2532 DECL_INITIAL (var) = ctor;
2533 varpool_node::finalize_decl (var);
2535 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2536 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2537 append_to_statement_list (build_call_expr (fn, 2,
2538 build_fold_addr_expr (var),
2539 gcount_tree),
2540 &asan_ctor_statements);
2542 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2543 append_to_statement_list (build_call_expr (fn, 2,
2544 build_fold_addr_expr (var),
2545 gcount_tree),
2546 &dtor_statements);
2547 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2549 if (asan_ctor_statements)
2550 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2551 flag_sanitize |= SANITIZE_ADDRESS;
2554 /* Expand the ASAN_{LOAD,STORE} builtins. */
2556 bool
2557 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2559 gimple *g = gsi_stmt (*iter);
2560 location_t loc = gimple_location (g);
2561 bool recover_p;
2562 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2563 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2564 else
2565 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2567 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2568 gcc_assert (flags < ASAN_CHECK_LAST);
2569 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2570 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2571 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2573 tree base = gimple_call_arg (g, 1);
2574 tree len = gimple_call_arg (g, 2);
2575 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2577 HOST_WIDE_INT size_in_bytes
2578 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2580 if (use_calls)
2582 /* Instrument using callbacks. */
2583 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2584 NOP_EXPR, base);
2585 gimple_set_location (g, loc);
2586 gsi_insert_before (iter, g, GSI_SAME_STMT);
2587 tree base_addr = gimple_assign_lhs (g);
2589 int nargs;
2590 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2591 if (nargs == 1)
2592 g = gimple_build_call (fun, 1, base_addr);
2593 else
2595 gcc_assert (nargs == 2);
2596 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2597 NOP_EXPR, len);
2598 gimple_set_location (g, loc);
2599 gsi_insert_before (iter, g, GSI_SAME_STMT);
2600 tree sz_arg = gimple_assign_lhs (g);
2601 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2603 gimple_set_location (g, loc);
2604 gsi_replace (iter, g, false);
2605 return false;
2608 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2610 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2611 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2613 gimple_stmt_iterator gsi = *iter;
2615 if (!is_non_zero_len)
2617 /* So, the length of the memory area to asan-protect is
2618 non-constant. Let's guard the generated instrumentation code
2619 like:
2621 if (len != 0)
2623 //asan instrumentation code goes here.
2625 // falltrough instructions, starting with *ITER. */
2627 g = gimple_build_cond (NE_EXPR,
2628 len,
2629 build_int_cst (TREE_TYPE (len), 0),
2630 NULL_TREE, NULL_TREE);
2631 gimple_set_location (g, loc);
2633 basic_block then_bb, fallthrough_bb;
2634 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2635 /*then_more_likely_p=*/true,
2636 &then_bb, &fallthrough_bb);
2637 /* Note that fallthrough_bb starts with the statement that was
2638 pointed to by ITER. */
2640 /* The 'then block' of the 'if (len != 0) condition is where
2641 we'll generate the asan instrumentation code now. */
2642 gsi = gsi_last_bb (then_bb);
2645 /* Get an iterator on the point where we can add the condition
2646 statement for the instrumentation. */
2647 basic_block then_bb, else_bb;
2648 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2649 /*then_more_likely_p=*/false,
2650 /*create_then_fallthru_edge*/recover_p,
2651 &then_bb,
2652 &else_bb);
2654 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2655 NOP_EXPR, base);
2656 gimple_set_location (g, loc);
2657 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2658 tree base_addr = gimple_assign_lhs (g);
2660 tree t = NULL_TREE;
2661 if (real_size_in_bytes >= 8)
2663 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2664 shadow_ptr_type);
2665 t = shadow;
2667 else
2669 /* Slow path for 1, 2 and 4 byte accesses. */
2670 /* Test (shadow != 0)
2671 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2672 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2673 shadow_ptr_type);
2674 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2675 gimple_seq seq = NULL;
2676 gimple_seq_add_stmt (&seq, shadow_test);
2677 /* Aligned (>= 8 bytes) can test just
2678 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2679 to be 0. */
2680 if (align < 8)
2682 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2683 base_addr, 7));
2684 gimple_seq_add_stmt (&seq,
2685 build_type_cast (shadow_type,
2686 gimple_seq_last (seq)));
2687 if (real_size_in_bytes > 1)
2688 gimple_seq_add_stmt (&seq,
2689 build_assign (PLUS_EXPR,
2690 gimple_seq_last (seq),
2691 real_size_in_bytes - 1));
2692 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2694 else
2695 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2696 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2697 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2698 gimple_seq_last (seq)));
2699 t = gimple_assign_lhs (gimple_seq_last (seq));
2700 gimple_seq_set_location (seq, loc);
2701 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2703 /* For non-constant, misaligned or otherwise weird access sizes,
2704 check first and last byte. */
2705 if (size_in_bytes == -1)
2707 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2708 MINUS_EXPR, len,
2709 build_int_cst (pointer_sized_int_node, 1));
2710 gimple_set_location (g, loc);
2711 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2712 tree last = gimple_assign_lhs (g);
2713 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2714 PLUS_EXPR, base_addr, last);
2715 gimple_set_location (g, loc);
2716 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2717 tree base_end_addr = gimple_assign_lhs (g);
2719 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2720 shadow_ptr_type);
2721 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2722 gimple_seq seq = NULL;
2723 gimple_seq_add_stmt (&seq, shadow_test);
2724 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2725 base_end_addr, 7));
2726 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2727 gimple_seq_last (seq)));
2728 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2729 gimple_seq_last (seq),
2730 shadow));
2731 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2732 gimple_seq_last (seq)));
2733 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2734 gimple_seq_last (seq)));
2735 t = gimple_assign_lhs (gimple_seq_last (seq));
2736 gimple_seq_set_location (seq, loc);
2737 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2741 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2742 NULL_TREE, NULL_TREE);
2743 gimple_set_location (g, loc);
2744 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2746 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2747 gsi = gsi_start_bb (then_bb);
2748 int nargs;
2749 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2750 g = gimple_build_call (fun, nargs, base_addr, len);
2751 gimple_set_location (g, loc);
2752 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2754 gsi_remove (iter, true);
2755 *iter = gsi_start_bb (else_bb);
2757 return true;
2760 /* Instrument the current function. */
2762 static unsigned int
2763 asan_instrument (void)
2765 if (shadow_ptr_types[0] == NULL_TREE)
2766 asan_init_shadow_ptr_types ();
2767 transform_statements ();
2768 return 0;
2771 static bool
2772 gate_asan (void)
2774 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2775 && !lookup_attribute ("no_sanitize_address",
2776 DECL_ATTRIBUTES (current_function_decl));
2779 namespace {
2781 const pass_data pass_data_asan =
2783 GIMPLE_PASS, /* type */
2784 "asan", /* name */
2785 OPTGROUP_NONE, /* optinfo_flags */
2786 TV_NONE, /* tv_id */
2787 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2788 0, /* properties_provided */
2789 0, /* properties_destroyed */
2790 0, /* todo_flags_start */
2791 TODO_update_ssa, /* todo_flags_finish */
2794 class pass_asan : public gimple_opt_pass
2796 public:
2797 pass_asan (gcc::context *ctxt)
2798 : gimple_opt_pass (pass_data_asan, ctxt)
2801 /* opt_pass methods: */
2802 opt_pass * clone () { return new pass_asan (m_ctxt); }
2803 virtual bool gate (function *) { return gate_asan (); }
2804 virtual unsigned int execute (function *) { return asan_instrument (); }
2806 }; // class pass_asan
2808 } // anon namespace
2810 gimple_opt_pass *
2811 make_pass_asan (gcc::context *ctxt)
2813 return new pass_asan (ctxt);
2816 namespace {
2818 const pass_data pass_data_asan_O0 =
2820 GIMPLE_PASS, /* type */
2821 "asan0", /* name */
2822 OPTGROUP_NONE, /* optinfo_flags */
2823 TV_NONE, /* tv_id */
2824 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2825 0, /* properties_provided */
2826 0, /* properties_destroyed */
2827 0, /* todo_flags_start */
2828 TODO_update_ssa, /* todo_flags_finish */
2831 class pass_asan_O0 : public gimple_opt_pass
2833 public:
2834 pass_asan_O0 (gcc::context *ctxt)
2835 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2838 /* opt_pass methods: */
2839 virtual bool gate (function *) { return !optimize && gate_asan (); }
2840 virtual unsigned int execute (function *) { return asan_instrument (); }
2842 }; // class pass_asan_O0
2844 } // anon namespace
2846 gimple_opt_pass *
2847 make_pass_asan_O0 (gcc::context *ctxt)
2849 return new pass_asan_O0 (ctxt);
2852 #include "gt-asan.h"