remove need for store_values_directly
[official-gcc.git] / gcc / asan.c
blobcd6ccdc90706a80832961ea5c508b6b3e476e276
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
38 #include "predict.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "gimple-iterator.h"
53 #include "calls.h"
54 #include "varasm.h"
55 #include "stor-layout.h"
56 #include "tree-iterator.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-pass.h"
64 #include "asan.h"
65 #include "gimple-pretty-print.h"
66 #include "target.h"
67 #include "hashtab.h"
68 #include "rtl.h"
69 #include "flags.h"
70 #include "statistics.h"
71 #include "real.h"
72 #include "fixed-value.h"
73 #include "insn-config.h"
74 #include "expmed.h"
75 #include "dojump.h"
76 #include "explow.h"
77 #include "emit-rtl.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "insn-codes.h"
81 #include "optabs.h"
82 #include "output.h"
83 #include "tm_p.h"
84 #include "langhooks.h"
85 #include "alloc-pool.h"
86 #include "cfgloop.h"
87 #include "gimple-builder.h"
88 #include "ubsan.h"
89 #include "params.h"
90 #include "builtins.h"
92 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
93 with <2x slowdown on average.
95 The tool consists of two parts:
96 instrumentation module (this file) and a run-time library.
97 The instrumentation module adds a run-time check before every memory insn.
98 For a 8- or 16- byte load accessing address X:
99 ShadowAddr = (X >> 3) + Offset
100 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
101 if (ShadowValue)
102 __asan_report_load8(X);
103 For a load of N bytes (N=1, 2 or 4) from address X:
104 ShadowAddr = (X >> 3) + Offset
105 ShadowValue = *(char*)ShadowAddr;
106 if (ShadowValue)
107 if ((X & 7) + N - 1 > ShadowValue)
108 __asan_report_loadN(X);
109 Stores are instrumented similarly, but using __asan_report_storeN functions.
110 A call too __asan_init_vN() is inserted to the list of module CTORs.
111 N is the version number of the AddressSanitizer API. The changes between the
112 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
114 The run-time library redefines malloc (so that redzone are inserted around
115 the allocated memory) and free (so that reuse of free-ed memory is delayed),
116 provides __asan_report* and __asan_init_vN functions.
118 Read more:
119 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
121 The current implementation supports detection of out-of-bounds and
122 use-after-free in the heap, on the stack and for global variables.
124 [Protection of stack variables]
126 To understand how detection of out-of-bounds and use-after-free works
127 for stack variables, lets look at this example on x86_64 where the
128 stack grows downward:
131 foo ()
133 char a[23] = {0};
134 int b[2] = {0};
136 a[5] = 1;
137 b[1] = 2;
139 return a[5] + b[1];
142 For this function, the stack protected by asan will be organized as
143 follows, from the top of the stack to the bottom:
145 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
147 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
148 the next slot be 32 bytes aligned; this one is called Partial
149 Redzone; this 32 bytes alignment is an asan constraint]
151 Slot 3/ [24 bytes for variable 'a']
153 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
155 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
157 Slot 6/ [8 bytes for variable 'b']
159 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
160 'LEFT RedZone']
162 The 32 bytes of LEFT red zone at the bottom of the stack can be
163 decomposed as such:
165 1/ The first 8 bytes contain a magical asan number that is always
166 0x41B58AB3.
168 2/ The following 8 bytes contains a pointer to a string (to be
169 parsed at runtime by the runtime asan library), which format is
170 the following:
172 "<function-name> <space> <num-of-variables-on-the-stack>
173 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
174 <length-of-var-in-bytes> ){n} "
176 where '(...){n}' means the content inside the parenthesis occurs 'n'
177 times, with 'n' being the number of variables on the stack.
179 3/ The following 8 bytes contain the PC of the current function which
180 will be used by the run-time library to print an error message.
182 4/ The following 8 bytes are reserved for internal use by the run-time.
184 The shadow memory for that stack layout is going to look like this:
186 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
187 The F1 byte pattern is a magic number called
188 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
189 the memory for that shadow byte is part of a the LEFT red zone
190 intended to seat at the bottom of the variables on the stack.
192 - content of shadow memory 8 bytes for slots 6 and 5:
193 0xF4F4F400. The F4 byte pattern is a magic number
194 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
195 memory region for this shadow byte is a PARTIAL red zone
196 intended to pad a variable A, so that the slot following
197 {A,padding} is 32 bytes aligned.
199 Note that the fact that the least significant byte of this
200 shadow memory content is 00 means that 8 bytes of its
201 corresponding memory (which corresponds to the memory of
202 variable 'b') is addressable.
204 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
205 The F2 byte pattern is a magic number called
206 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
207 region for this shadow byte is a MIDDLE red zone intended to
208 seat between two 32 aligned slots of {variable,padding}.
210 - content of shadow memory 8 bytes for slot 3 and 2:
211 0xF4000000. This represents is the concatenation of
212 variable 'a' and the partial red zone following it, like what we
213 had for variable 'b'. The least significant 3 bytes being 00
214 means that the 3 bytes of variable 'a' are addressable.
216 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
217 The F3 byte pattern is a magic number called
218 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
219 region for this shadow byte is a RIGHT red zone intended to seat
220 at the top of the variables of the stack.
222 Note that the real variable layout is done in expand_used_vars in
223 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
224 stack variables as well as the different red zones, emits some
225 prologue code to populate the shadow memory as to poison (mark as
226 non-accessible) the regions of the red zones and mark the regions of
227 stack variables as accessible, and emit some epilogue code to
228 un-poison (mark as accessible) the regions of red zones right before
229 the function exits.
231 [Protection of global variables]
233 The basic idea is to insert a red zone between two global variables
234 and install a constructor function that calls the asan runtime to do
235 the populating of the relevant shadow memory regions at load time.
237 So the global variables are laid out as to insert a red zone between
238 them. The size of the red zones is so that each variable starts on a
239 32 bytes boundary.
241 Then a constructor function is installed so that, for each global
242 variable, it calls the runtime asan library function
243 __asan_register_globals_with an instance of this type:
245 struct __asan_global
247 // Address of the beginning of the global variable.
248 const void *__beg;
250 // Initial size of the global variable.
251 uptr __size;
253 // Size of the global variable + size of the red zone. This
254 // size is 32 bytes aligned.
255 uptr __size_with_redzone;
257 // Name of the global variable.
258 const void *__name;
260 // Name of the module where the global variable is declared.
261 const void *__module_name;
263 // 1 if it has dynamic initialization, 0 otherwise.
264 uptr __has_dynamic_init;
266 // A pointer to struct that contains source location, could be NULL.
267 __asan_global_source_location *__location;
270 A destructor function that calls the runtime asan library function
271 _asan_unregister_globals is also installed. */
273 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
274 static bool asan_shadow_offset_computed;
275 static const char *sanitized_sections;
277 /* Sets shadow offset to value in string VAL. */
279 bool
280 set_asan_shadow_offset (const char *val)
282 char *endp;
284 errno = 0;
285 #ifdef HAVE_LONG_LONG
286 asan_shadow_offset_value = strtoull (val, &endp, 0);
287 #else
288 asan_shadow_offset_value = strtoul (val, &endp, 0);
289 #endif
290 if (!(*val != '\0' && *endp == '\0' && errno == 0))
291 return false;
293 asan_shadow_offset_computed = true;
295 return true;
298 /* Set list of user-defined sections that need to be sanitized. */
300 void
301 set_sanitized_sections (const char *secs)
303 sanitized_sections = secs;
306 /* Checks whether section SEC should be sanitized. */
308 static bool
309 section_sanitized_p (const char *sec)
311 if (!sanitized_sections)
312 return false;
313 size_t len = strlen (sec);
314 const char *p = sanitized_sections;
315 while ((p = strstr (p, sec)))
317 if ((p == sanitized_sections || p[-1] == ',')
318 && (p[len] == 0 || p[len] == ','))
319 return true;
320 ++p;
322 return false;
325 /* Returns Asan shadow offset. */
327 static unsigned HOST_WIDE_INT
328 asan_shadow_offset ()
330 if (!asan_shadow_offset_computed)
332 asan_shadow_offset_computed = true;
333 asan_shadow_offset_value = targetm.asan_shadow_offset ();
335 return asan_shadow_offset_value;
338 alias_set_type asan_shadow_set = -1;
340 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
341 alias set is used for all shadow memory accesses. */
342 static GTY(()) tree shadow_ptr_types[2];
344 /* Decl for __asan_option_detect_stack_use_after_return. */
345 static GTY(()) tree asan_detect_stack_use_after_return;
347 /* Various flags for Asan builtins. */
348 enum asan_check_flags
350 ASAN_CHECK_STORE = 1 << 0,
351 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
352 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
353 ASAN_CHECK_LAST = 1 << 3
356 /* Hashtable support for memory references used by gimple
357 statements. */
359 /* This type represents a reference to a memory region. */
360 struct asan_mem_ref
362 /* The expression of the beginning of the memory region. */
363 tree start;
365 /* The size of the access. */
366 HOST_WIDE_INT access_size;
369 static alloc_pool asan_mem_ref_alloc_pool;
371 /* This creates the alloc pool used to store the instances of
372 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
374 static alloc_pool
375 asan_mem_ref_get_alloc_pool ()
377 if (asan_mem_ref_alloc_pool == NULL)
378 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
379 sizeof (asan_mem_ref),
380 10);
381 return asan_mem_ref_alloc_pool;
385 /* Initializes an instance of asan_mem_ref. */
387 static void
388 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
390 ref->start = start;
391 ref->access_size = access_size;
394 /* Allocates memory for an instance of asan_mem_ref into the memory
395 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
396 START is the address of (or the expression pointing to) the
397 beginning of memory reference. ACCESS_SIZE is the size of the
398 access to the referenced memory. */
400 static asan_mem_ref*
401 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
403 asan_mem_ref *ref =
404 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
406 asan_mem_ref_init (ref, start, access_size);
407 return ref;
410 /* This builds and returns a pointer to the end of the memory region
411 that starts at START and of length LEN. */
413 tree
414 asan_mem_ref_get_end (tree start, tree len)
416 if (len == NULL_TREE || integer_zerop (len))
417 return start;
419 if (!ptrofftype_p (len))
420 len = convert_to_ptrofftype (len);
422 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
425 /* Return a tree expression that represents the end of the referenced
426 memory region. Beware that this function can actually build a new
427 tree expression. */
429 tree
430 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
432 return asan_mem_ref_get_end (ref->start, len);
435 struct asan_mem_ref_hasher
436 : typed_noop_remove <asan_mem_ref>
438 typedef asan_mem_ref *value_type;
439 typedef asan_mem_ref *compare_type;
441 static inline hashval_t hash (const asan_mem_ref *);
442 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
445 /* Hash a memory reference. */
447 inline hashval_t
448 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
450 return iterative_hash_expr (mem_ref->start, 0);
453 /* Compare two memory references. We accept the length of either
454 memory references to be NULL_TREE. */
456 inline bool
457 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
458 const asan_mem_ref *m2)
460 return operand_equal_p (m1->start, m2->start, 0);
463 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
465 /* Returns a reference to the hash table containing memory references.
466 This function ensures that the hash table is created. Note that
467 this hash table is updated by the function
468 update_mem_ref_hash_table. */
470 static hash_table<asan_mem_ref_hasher> *
471 get_mem_ref_hash_table ()
473 if (!asan_mem_ref_ht)
474 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
476 return asan_mem_ref_ht;
479 /* Clear all entries from the memory references hash table. */
481 static void
482 empty_mem_ref_hash_table ()
484 if (asan_mem_ref_ht)
485 asan_mem_ref_ht->empty ();
488 /* Free the memory references hash table. */
490 static void
491 free_mem_ref_resources ()
493 delete asan_mem_ref_ht;
494 asan_mem_ref_ht = NULL;
496 if (asan_mem_ref_alloc_pool)
498 free_alloc_pool (asan_mem_ref_alloc_pool);
499 asan_mem_ref_alloc_pool = NULL;
503 /* Return true iff the memory reference REF has been instrumented. */
505 static bool
506 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
508 asan_mem_ref r;
509 asan_mem_ref_init (&r, ref, access_size);
511 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
512 return saved_ref && saved_ref->access_size >= access_size;
515 /* Return true iff the memory reference REF has been instrumented. */
517 static bool
518 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
520 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
523 /* Return true iff access to memory region starting at REF and of
524 length LEN has been instrumented. */
526 static bool
527 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
529 HOST_WIDE_INT size_in_bytes
530 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
532 return size_in_bytes != -1
533 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
536 /* Set REF to the memory reference present in a gimple assignment
537 ASSIGNMENT. Return true upon successful completion, false
538 otherwise. */
540 static bool
541 get_mem_ref_of_assignment (const gassign *assignment,
542 asan_mem_ref *ref,
543 bool *ref_is_store)
545 gcc_assert (gimple_assign_single_p (assignment));
547 if (gimple_store_p (assignment)
548 && !gimple_clobber_p (assignment))
550 ref->start = gimple_assign_lhs (assignment);
551 *ref_is_store = true;
553 else if (gimple_assign_load_p (assignment))
555 ref->start = gimple_assign_rhs1 (assignment);
556 *ref_is_store = false;
558 else
559 return false;
561 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
562 return true;
565 /* Return the memory references contained in a gimple statement
566 representing a builtin call that has to do with memory access. */
568 static bool
569 get_mem_refs_of_builtin_call (const gcall *call,
570 asan_mem_ref *src0,
571 tree *src0_len,
572 bool *src0_is_store,
573 asan_mem_ref *src1,
574 tree *src1_len,
575 bool *src1_is_store,
576 asan_mem_ref *dst,
577 tree *dst_len,
578 bool *dst_is_store,
579 bool *dest_is_deref,
580 bool *intercepted_p)
582 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
584 tree callee = gimple_call_fndecl (call);
585 tree source0 = NULL_TREE, source1 = NULL_TREE,
586 dest = NULL_TREE, len = NULL_TREE;
587 bool is_store = true, got_reference_p = false;
588 HOST_WIDE_INT access_size = 1;
590 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
592 switch (DECL_FUNCTION_CODE (callee))
594 /* (s, s, n) style memops. */
595 case BUILT_IN_BCMP:
596 case BUILT_IN_MEMCMP:
597 source0 = gimple_call_arg (call, 0);
598 source1 = gimple_call_arg (call, 1);
599 len = gimple_call_arg (call, 2);
600 break;
602 /* (src, dest, n) style memops. */
603 case BUILT_IN_BCOPY:
604 source0 = gimple_call_arg (call, 0);
605 dest = gimple_call_arg (call, 1);
606 len = gimple_call_arg (call, 2);
607 break;
609 /* (dest, src, n) style memops. */
610 case BUILT_IN_MEMCPY:
611 case BUILT_IN_MEMCPY_CHK:
612 case BUILT_IN_MEMMOVE:
613 case BUILT_IN_MEMMOVE_CHK:
614 case BUILT_IN_MEMPCPY:
615 case BUILT_IN_MEMPCPY_CHK:
616 dest = gimple_call_arg (call, 0);
617 source0 = gimple_call_arg (call, 1);
618 len = gimple_call_arg (call, 2);
619 break;
621 /* (dest, n) style memops. */
622 case BUILT_IN_BZERO:
623 dest = gimple_call_arg (call, 0);
624 len = gimple_call_arg (call, 1);
625 break;
627 /* (dest, x, n) style memops*/
628 case BUILT_IN_MEMSET:
629 case BUILT_IN_MEMSET_CHK:
630 dest = gimple_call_arg (call, 0);
631 len = gimple_call_arg (call, 2);
632 break;
634 case BUILT_IN_STRLEN:
635 source0 = gimple_call_arg (call, 0);
636 len = gimple_call_lhs (call);
637 break ;
639 /* And now the __atomic* and __sync builtins.
640 These are handled differently from the classical memory memory
641 access builtins above. */
643 case BUILT_IN_ATOMIC_LOAD_1:
644 case BUILT_IN_ATOMIC_LOAD_2:
645 case BUILT_IN_ATOMIC_LOAD_4:
646 case BUILT_IN_ATOMIC_LOAD_8:
647 case BUILT_IN_ATOMIC_LOAD_16:
648 is_store = false;
649 /* fall through. */
651 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
652 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
653 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
654 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
655 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
657 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
658 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
659 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
660 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
661 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
663 case BUILT_IN_SYNC_FETCH_AND_OR_1:
664 case BUILT_IN_SYNC_FETCH_AND_OR_2:
665 case BUILT_IN_SYNC_FETCH_AND_OR_4:
666 case BUILT_IN_SYNC_FETCH_AND_OR_8:
667 case BUILT_IN_SYNC_FETCH_AND_OR_16:
669 case BUILT_IN_SYNC_FETCH_AND_AND_1:
670 case BUILT_IN_SYNC_FETCH_AND_AND_2:
671 case BUILT_IN_SYNC_FETCH_AND_AND_4:
672 case BUILT_IN_SYNC_FETCH_AND_AND_8:
673 case BUILT_IN_SYNC_FETCH_AND_AND_16:
675 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
676 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
677 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
678 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
679 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
681 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
682 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
683 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
684 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
686 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
687 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
688 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
689 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
690 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
692 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
693 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
694 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
695 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
696 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
698 case BUILT_IN_SYNC_OR_AND_FETCH_1:
699 case BUILT_IN_SYNC_OR_AND_FETCH_2:
700 case BUILT_IN_SYNC_OR_AND_FETCH_4:
701 case BUILT_IN_SYNC_OR_AND_FETCH_8:
702 case BUILT_IN_SYNC_OR_AND_FETCH_16:
704 case BUILT_IN_SYNC_AND_AND_FETCH_1:
705 case BUILT_IN_SYNC_AND_AND_FETCH_2:
706 case BUILT_IN_SYNC_AND_AND_FETCH_4:
707 case BUILT_IN_SYNC_AND_AND_FETCH_8:
708 case BUILT_IN_SYNC_AND_AND_FETCH_16:
710 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
711 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
712 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
713 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
714 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
716 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
717 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
718 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
719 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
721 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
722 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
723 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
724 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
725 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
727 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
728 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
729 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
730 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
731 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
733 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
734 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
735 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
736 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
737 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
739 case BUILT_IN_SYNC_LOCK_RELEASE_1:
740 case BUILT_IN_SYNC_LOCK_RELEASE_2:
741 case BUILT_IN_SYNC_LOCK_RELEASE_4:
742 case BUILT_IN_SYNC_LOCK_RELEASE_8:
743 case BUILT_IN_SYNC_LOCK_RELEASE_16:
745 case BUILT_IN_ATOMIC_EXCHANGE_1:
746 case BUILT_IN_ATOMIC_EXCHANGE_2:
747 case BUILT_IN_ATOMIC_EXCHANGE_4:
748 case BUILT_IN_ATOMIC_EXCHANGE_8:
749 case BUILT_IN_ATOMIC_EXCHANGE_16:
751 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
752 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
753 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
754 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
757 case BUILT_IN_ATOMIC_STORE_1:
758 case BUILT_IN_ATOMIC_STORE_2:
759 case BUILT_IN_ATOMIC_STORE_4:
760 case BUILT_IN_ATOMIC_STORE_8:
761 case BUILT_IN_ATOMIC_STORE_16:
763 case BUILT_IN_ATOMIC_ADD_FETCH_1:
764 case BUILT_IN_ATOMIC_ADD_FETCH_2:
765 case BUILT_IN_ATOMIC_ADD_FETCH_4:
766 case BUILT_IN_ATOMIC_ADD_FETCH_8:
767 case BUILT_IN_ATOMIC_ADD_FETCH_16:
769 case BUILT_IN_ATOMIC_SUB_FETCH_1:
770 case BUILT_IN_ATOMIC_SUB_FETCH_2:
771 case BUILT_IN_ATOMIC_SUB_FETCH_4:
772 case BUILT_IN_ATOMIC_SUB_FETCH_8:
773 case BUILT_IN_ATOMIC_SUB_FETCH_16:
775 case BUILT_IN_ATOMIC_AND_FETCH_1:
776 case BUILT_IN_ATOMIC_AND_FETCH_2:
777 case BUILT_IN_ATOMIC_AND_FETCH_4:
778 case BUILT_IN_ATOMIC_AND_FETCH_8:
779 case BUILT_IN_ATOMIC_AND_FETCH_16:
781 case BUILT_IN_ATOMIC_NAND_FETCH_1:
782 case BUILT_IN_ATOMIC_NAND_FETCH_2:
783 case BUILT_IN_ATOMIC_NAND_FETCH_4:
784 case BUILT_IN_ATOMIC_NAND_FETCH_8:
785 case BUILT_IN_ATOMIC_NAND_FETCH_16:
787 case BUILT_IN_ATOMIC_XOR_FETCH_1:
788 case BUILT_IN_ATOMIC_XOR_FETCH_2:
789 case BUILT_IN_ATOMIC_XOR_FETCH_4:
790 case BUILT_IN_ATOMIC_XOR_FETCH_8:
791 case BUILT_IN_ATOMIC_XOR_FETCH_16:
793 case BUILT_IN_ATOMIC_OR_FETCH_1:
794 case BUILT_IN_ATOMIC_OR_FETCH_2:
795 case BUILT_IN_ATOMIC_OR_FETCH_4:
796 case BUILT_IN_ATOMIC_OR_FETCH_8:
797 case BUILT_IN_ATOMIC_OR_FETCH_16:
799 case BUILT_IN_ATOMIC_FETCH_ADD_1:
800 case BUILT_IN_ATOMIC_FETCH_ADD_2:
801 case BUILT_IN_ATOMIC_FETCH_ADD_4:
802 case BUILT_IN_ATOMIC_FETCH_ADD_8:
803 case BUILT_IN_ATOMIC_FETCH_ADD_16:
805 case BUILT_IN_ATOMIC_FETCH_SUB_1:
806 case BUILT_IN_ATOMIC_FETCH_SUB_2:
807 case BUILT_IN_ATOMIC_FETCH_SUB_4:
808 case BUILT_IN_ATOMIC_FETCH_SUB_8:
809 case BUILT_IN_ATOMIC_FETCH_SUB_16:
811 case BUILT_IN_ATOMIC_FETCH_AND_1:
812 case BUILT_IN_ATOMIC_FETCH_AND_2:
813 case BUILT_IN_ATOMIC_FETCH_AND_4:
814 case BUILT_IN_ATOMIC_FETCH_AND_8:
815 case BUILT_IN_ATOMIC_FETCH_AND_16:
817 case BUILT_IN_ATOMIC_FETCH_NAND_1:
818 case BUILT_IN_ATOMIC_FETCH_NAND_2:
819 case BUILT_IN_ATOMIC_FETCH_NAND_4:
820 case BUILT_IN_ATOMIC_FETCH_NAND_8:
821 case BUILT_IN_ATOMIC_FETCH_NAND_16:
823 case BUILT_IN_ATOMIC_FETCH_XOR_1:
824 case BUILT_IN_ATOMIC_FETCH_XOR_2:
825 case BUILT_IN_ATOMIC_FETCH_XOR_4:
826 case BUILT_IN_ATOMIC_FETCH_XOR_8:
827 case BUILT_IN_ATOMIC_FETCH_XOR_16:
829 case BUILT_IN_ATOMIC_FETCH_OR_1:
830 case BUILT_IN_ATOMIC_FETCH_OR_2:
831 case BUILT_IN_ATOMIC_FETCH_OR_4:
832 case BUILT_IN_ATOMIC_FETCH_OR_8:
833 case BUILT_IN_ATOMIC_FETCH_OR_16:
835 dest = gimple_call_arg (call, 0);
836 /* DEST represents the address of a memory location.
837 instrument_derefs wants the memory location, so lets
838 dereference the address DEST before handing it to
839 instrument_derefs. */
840 if (TREE_CODE (dest) == ADDR_EXPR)
841 dest = TREE_OPERAND (dest, 0);
842 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
843 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
844 dest, build_int_cst (TREE_TYPE (dest), 0));
845 else
846 gcc_unreachable ();
848 access_size = int_size_in_bytes (TREE_TYPE (dest));
851 default:
852 /* The other builtins memory access are not instrumented in this
853 function because they either don't have any length parameter,
854 or their length parameter is just a limit. */
855 break;
858 if (len != NULL_TREE)
860 if (source0 != NULL_TREE)
862 src0->start = source0;
863 src0->access_size = access_size;
864 *src0_len = len;
865 *src0_is_store = false;
868 if (source1 != NULL_TREE)
870 src1->start = source1;
871 src1->access_size = access_size;
872 *src1_len = len;
873 *src1_is_store = false;
876 if (dest != NULL_TREE)
878 dst->start = dest;
879 dst->access_size = access_size;
880 *dst_len = len;
881 *dst_is_store = true;
884 got_reference_p = true;
886 else if (dest)
888 dst->start = dest;
889 dst->access_size = access_size;
890 *dst_len = NULL_TREE;
891 *dst_is_store = is_store;
892 *dest_is_deref = true;
893 got_reference_p = true;
896 return got_reference_p;
899 /* Return true iff a given gimple statement has been instrumented.
900 Note that the statement is "defined" by the memory references it
901 contains. */
903 static bool
904 has_stmt_been_instrumented_p (gimple stmt)
906 if (gimple_assign_single_p (stmt))
908 bool r_is_store;
909 asan_mem_ref r;
910 asan_mem_ref_init (&r, NULL, 1);
912 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
913 &r_is_store))
914 return has_mem_ref_been_instrumented (&r);
916 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
918 asan_mem_ref src0, src1, dest;
919 asan_mem_ref_init (&src0, NULL, 1);
920 asan_mem_ref_init (&src1, NULL, 1);
921 asan_mem_ref_init (&dest, NULL, 1);
923 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
924 bool src0_is_store = false, src1_is_store = false,
925 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
926 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
927 &src0, &src0_len, &src0_is_store,
928 &src1, &src1_len, &src1_is_store,
929 &dest, &dest_len, &dest_is_store,
930 &dest_is_deref, &intercepted_p))
932 if (src0.start != NULL_TREE
933 && !has_mem_ref_been_instrumented (&src0, src0_len))
934 return false;
936 if (src1.start != NULL_TREE
937 && !has_mem_ref_been_instrumented (&src1, src1_len))
938 return false;
940 if (dest.start != NULL_TREE
941 && !has_mem_ref_been_instrumented (&dest, dest_len))
942 return false;
944 return true;
947 return false;
950 /* Insert a memory reference into the hash table. */
952 static void
953 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
955 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
957 asan_mem_ref r;
958 asan_mem_ref_init (&r, ref, access_size);
960 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
961 if (*slot == NULL || (*slot)->access_size < access_size)
962 *slot = asan_mem_ref_new (ref, access_size);
965 /* Initialize shadow_ptr_types array. */
967 static void
968 asan_init_shadow_ptr_types (void)
970 asan_shadow_set = new_alias_set ();
971 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
972 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
973 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
974 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
975 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
976 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
977 initialize_sanitizer_builtins ();
980 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
982 static tree
983 asan_pp_string (pretty_printer *pp)
985 const char *buf = pp_formatted_text (pp);
986 size_t len = strlen (buf);
987 tree ret = build_string (len + 1, buf);
988 TREE_TYPE (ret)
989 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
990 build_index_type (size_int (len)));
991 TREE_READONLY (ret) = 1;
992 TREE_STATIC (ret) = 1;
993 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
996 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
998 static rtx
999 asan_shadow_cst (unsigned char shadow_bytes[4])
1001 int i;
1002 unsigned HOST_WIDE_INT val = 0;
1003 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1004 for (i = 0; i < 4; i++)
1005 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
1006 << (BITS_PER_UNIT * i);
1007 return gen_int_mode (val, SImode);
1010 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1011 though. */
1013 static void
1014 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1016 rtx_insn *insn, *insns, *jump;
1017 rtx_code_label *top_label;
1018 rtx end, addr, tmp;
1020 start_sequence ();
1021 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1022 insns = get_insns ();
1023 end_sequence ();
1024 for (insn = insns; insn; insn = NEXT_INSN (insn))
1025 if (CALL_P (insn))
1026 break;
1027 if (insn == NULL_RTX)
1029 emit_insn (insns);
1030 return;
1033 gcc_assert ((len & 3) == 0);
1034 top_label = gen_label_rtx ();
1035 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1036 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1037 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1038 emit_label (top_label);
1040 emit_move_insn (shadow_mem, const0_rtx);
1041 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1042 true, OPTAB_LIB_WIDEN);
1043 if (tmp != addr)
1044 emit_move_insn (addr, tmp);
1045 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1046 jump = get_last_insn ();
1047 gcc_assert (JUMP_P (jump));
1048 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1051 void
1052 asan_function_start (void)
1054 section *fnsec = function_section (current_function_decl);
1055 switch_to_section (fnsec);
1056 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1057 current_function_funcdef_no);
1060 /* Insert code to protect stack vars. The prologue sequence should be emitted
1061 directly, epilogue sequence returned. BASE is the register holding the
1062 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1063 array contains pairs of offsets in reverse order, always the end offset
1064 of some gap that needs protection followed by starting offset,
1065 and DECLS is an array of representative decls for each var partition.
1066 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1067 elements long (OFFSETS include gap before the first variable as well
1068 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1069 register which stack vars DECL_RTLs are based on. Either BASE should be
1070 assigned to PBASE, when not doing use after return protection, or
1071 corresponding address based on __asan_stack_malloc* return value. */
1073 rtx_insn *
1074 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1075 HOST_WIDE_INT *offsets, tree *decls, int length)
1077 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1078 rtx_code_label *lab;
1079 rtx_insn *insns;
1080 char buf[30];
1081 unsigned char shadow_bytes[4];
1082 HOST_WIDE_INT base_offset = offsets[length - 1];
1083 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1084 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1085 HOST_WIDE_INT last_offset, last_size;
1086 int l;
1087 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1088 tree str_cst, decl, id;
1089 int use_after_return_class = -1;
1091 if (shadow_ptr_types[0] == NULL_TREE)
1092 asan_init_shadow_ptr_types ();
1094 /* First of all, prepare the description string. */
1095 pretty_printer asan_pp;
1097 pp_decimal_int (&asan_pp, length / 2 - 1);
1098 pp_space (&asan_pp);
1099 for (l = length - 2; l; l -= 2)
1101 tree decl = decls[l / 2 - 1];
1102 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1103 pp_space (&asan_pp);
1104 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1105 pp_space (&asan_pp);
1106 if (DECL_P (decl) && DECL_NAME (decl))
1108 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1109 pp_space (&asan_pp);
1110 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1112 else
1113 pp_string (&asan_pp, "9 <unknown>");
1114 pp_space (&asan_pp);
1116 str_cst = asan_pp_string (&asan_pp);
1118 /* Emit the prologue sequence. */
1119 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1120 && ASAN_USE_AFTER_RETURN)
1122 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1123 /* __asan_stack_malloc_N guarantees alignment
1124 N < 6 ? (64 << N) : 4096 bytes. */
1125 if (alignb > (use_after_return_class < 6
1126 ? (64U << use_after_return_class) : 4096U))
1127 use_after_return_class = -1;
1128 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1129 base_align_bias = ((asan_frame_size + alignb - 1)
1130 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1132 /* Align base if target is STRICT_ALIGNMENT. */
1133 if (STRICT_ALIGNMENT)
1134 base = expand_binop (Pmode, and_optab, base,
1135 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1136 << ASAN_SHADOW_SHIFT)
1137 / BITS_PER_UNIT), Pmode), NULL_RTX,
1138 1, OPTAB_DIRECT);
1140 if (use_after_return_class == -1 && pbase)
1141 emit_move_insn (pbase, base);
1143 base = expand_binop (Pmode, add_optab, base,
1144 gen_int_mode (base_offset - base_align_bias, Pmode),
1145 NULL_RTX, 1, OPTAB_DIRECT);
1146 orig_base = NULL_RTX;
1147 if (use_after_return_class != -1)
1149 if (asan_detect_stack_use_after_return == NULL_TREE)
1151 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1152 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1153 integer_type_node);
1154 SET_DECL_ASSEMBLER_NAME (decl, id);
1155 TREE_ADDRESSABLE (decl) = 1;
1156 DECL_ARTIFICIAL (decl) = 1;
1157 DECL_IGNORED_P (decl) = 1;
1158 DECL_EXTERNAL (decl) = 1;
1159 TREE_STATIC (decl) = 1;
1160 TREE_PUBLIC (decl) = 1;
1161 TREE_USED (decl) = 1;
1162 asan_detect_stack_use_after_return = decl;
1164 orig_base = gen_reg_rtx (Pmode);
1165 emit_move_insn (orig_base, base);
1166 ret = expand_normal (asan_detect_stack_use_after_return);
1167 lab = gen_label_rtx ();
1168 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1169 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1170 VOIDmode, 0, lab, very_likely);
1171 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1172 use_after_return_class);
1173 ret = init_one_libfunc (buf);
1174 rtx addr = convert_memory_address (ptr_mode, base);
1175 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1176 GEN_INT (asan_frame_size
1177 + base_align_bias),
1178 TYPE_MODE (pointer_sized_int_node),
1179 addr, ptr_mode);
1180 ret = convert_memory_address (Pmode, ret);
1181 emit_move_insn (base, ret);
1182 emit_label (lab);
1183 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1184 gen_int_mode (base_align_bias
1185 - base_offset, Pmode),
1186 NULL_RTX, 1, OPTAB_DIRECT));
1188 mem = gen_rtx_MEM (ptr_mode, base);
1189 mem = adjust_address (mem, VOIDmode, base_align_bias);
1190 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1191 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1192 emit_move_insn (mem, expand_normal (str_cst));
1193 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1194 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1195 id = get_identifier (buf);
1196 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1197 VAR_DECL, id, char_type_node);
1198 SET_DECL_ASSEMBLER_NAME (decl, id);
1199 TREE_ADDRESSABLE (decl) = 1;
1200 TREE_READONLY (decl) = 1;
1201 DECL_ARTIFICIAL (decl) = 1;
1202 DECL_IGNORED_P (decl) = 1;
1203 TREE_STATIC (decl) = 1;
1204 TREE_PUBLIC (decl) = 0;
1205 TREE_USED (decl) = 1;
1206 DECL_INITIAL (decl) = decl;
1207 TREE_ASM_WRITTEN (decl) = 1;
1208 TREE_ASM_WRITTEN (id) = 1;
1209 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1210 shadow_base = expand_binop (Pmode, lshr_optab, base,
1211 GEN_INT (ASAN_SHADOW_SHIFT),
1212 NULL_RTX, 1, OPTAB_DIRECT);
1213 shadow_base
1214 = plus_constant (Pmode, shadow_base,
1215 asan_shadow_offset ()
1216 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1217 gcc_assert (asan_shadow_set != -1
1218 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1219 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1220 set_mem_alias_set (shadow_mem, asan_shadow_set);
1221 if (STRICT_ALIGNMENT)
1222 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1223 prev_offset = base_offset;
1224 for (l = length; l; l -= 2)
1226 if (l == 2)
1227 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1228 offset = offsets[l - 1];
1229 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1231 int i;
1232 HOST_WIDE_INT aoff
1233 = base_offset + ((offset - base_offset)
1234 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1235 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1236 (aoff - prev_offset)
1237 >> ASAN_SHADOW_SHIFT);
1238 prev_offset = aoff;
1239 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1240 if (aoff < offset)
1242 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1243 shadow_bytes[i] = 0;
1244 else
1245 shadow_bytes[i] = offset - aoff;
1247 else
1248 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1249 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1250 offset = aoff;
1252 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1254 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1255 (offset - prev_offset)
1256 >> ASAN_SHADOW_SHIFT);
1257 prev_offset = offset;
1258 memset (shadow_bytes, cur_shadow_byte, 4);
1259 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1260 offset += ASAN_RED_ZONE_SIZE;
1262 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1264 do_pending_stack_adjust ();
1266 /* Construct epilogue sequence. */
1267 start_sequence ();
1269 lab = NULL;
1270 if (use_after_return_class != -1)
1272 rtx_code_label *lab2 = gen_label_rtx ();
1273 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1274 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1275 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1276 VOIDmode, 0, lab2, very_likely);
1277 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1278 set_mem_alias_set (shadow_mem, asan_shadow_set);
1279 mem = gen_rtx_MEM (ptr_mode, base);
1280 mem = adjust_address (mem, VOIDmode, base_align_bias);
1281 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1282 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1283 if (use_after_return_class < 5
1284 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1285 BITS_PER_UNIT, true))
1286 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1287 BITS_PER_UNIT, true, 0);
1288 else if (use_after_return_class >= 5
1289 || !set_storage_via_setmem (shadow_mem,
1290 GEN_INT (sz),
1291 gen_int_mode (c, QImode),
1292 BITS_PER_UNIT, BITS_PER_UNIT,
1293 -1, sz, sz, sz))
1295 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1296 use_after_return_class);
1297 ret = init_one_libfunc (buf);
1298 rtx addr = convert_memory_address (ptr_mode, base);
1299 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1300 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1301 GEN_INT (asan_frame_size + base_align_bias),
1302 TYPE_MODE (pointer_sized_int_node),
1303 orig_addr, ptr_mode);
1305 lab = gen_label_rtx ();
1306 emit_jump (lab);
1307 emit_label (lab2);
1310 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1311 set_mem_alias_set (shadow_mem, asan_shadow_set);
1313 if (STRICT_ALIGNMENT)
1314 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1316 prev_offset = base_offset;
1317 last_offset = base_offset;
1318 last_size = 0;
1319 for (l = length; l; l -= 2)
1321 offset = base_offset + ((offsets[l - 1] - base_offset)
1322 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1323 if (last_offset + last_size != offset)
1325 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1326 (last_offset - prev_offset)
1327 >> ASAN_SHADOW_SHIFT);
1328 prev_offset = last_offset;
1329 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1330 last_offset = offset;
1331 last_size = 0;
1333 last_size += base_offset + ((offsets[l - 2] - base_offset)
1334 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1335 - offset;
1337 if (last_size)
1339 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1340 (last_offset - prev_offset)
1341 >> ASAN_SHADOW_SHIFT);
1342 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1345 do_pending_stack_adjust ();
1346 if (lab)
1347 emit_label (lab);
1349 insns = get_insns ();
1350 end_sequence ();
1351 return insns;
1354 /* Return true if DECL, a global var, might be overridden and needs
1355 therefore a local alias. */
1357 static bool
1358 asan_needs_local_alias (tree decl)
1360 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1363 /* Return true if DECL is a VAR_DECL that should be protected
1364 by Address Sanitizer, by appending a red zone with protected
1365 shadow memory after it and aligning it to at least
1366 ASAN_RED_ZONE_SIZE bytes. */
1368 bool
1369 asan_protect_global (tree decl)
1371 if (!ASAN_GLOBALS)
1372 return false;
1374 rtx rtl, symbol;
1376 if (TREE_CODE (decl) == STRING_CST)
1378 /* Instrument all STRING_CSTs except those created
1379 by asan_pp_string here. */
1380 if (shadow_ptr_types[0] != NULL_TREE
1381 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1382 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1383 return false;
1384 return true;
1386 if (TREE_CODE (decl) != VAR_DECL
1387 /* TLS vars aren't statically protectable. */
1388 || DECL_THREAD_LOCAL_P (decl)
1389 /* Externs will be protected elsewhere. */
1390 || DECL_EXTERNAL (decl)
1391 || !DECL_RTL_SET_P (decl)
1392 /* Comdat vars pose an ABI problem, we can't know if
1393 the var that is selected by the linker will have
1394 padding or not. */
1395 || DECL_ONE_ONLY (decl)
1396 /* Similarly for common vars. People can use -fno-common.
1397 Note: Linux kernel is built with -fno-common, so we do instrument
1398 globals there even if it is C. */
1399 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1400 /* Don't protect if using user section, often vars placed
1401 into user section from multiple TUs are then assumed
1402 to be an array of such vars, putting padding in there
1403 breaks this assumption. */
1404 || (DECL_SECTION_NAME (decl) != NULL
1405 && !symtab_node::get (decl)->implicit_section
1406 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1407 || DECL_SIZE (decl) == 0
1408 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1409 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1410 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1411 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1412 return false;
1414 rtl = DECL_RTL (decl);
1415 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1416 return false;
1417 symbol = XEXP (rtl, 0);
1419 if (CONSTANT_POOL_ADDRESS_P (symbol)
1420 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1421 return false;
1423 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1424 return false;
1426 #ifndef ASM_OUTPUT_DEF
1427 if (asan_needs_local_alias (decl))
1428 return false;
1429 #endif
1431 return true;
1434 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1435 IS_STORE is either 1 (for a store) or 0 (for a load). */
1437 static tree
1438 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1439 int *nargs)
1441 static enum built_in_function report[2][2][6]
1442 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1443 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1444 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1445 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1446 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1447 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1448 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1449 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1450 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1451 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1452 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1453 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1454 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1455 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1456 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1457 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1458 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1459 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1460 if (size_in_bytes == -1)
1462 *nargs = 2;
1463 return builtin_decl_implicit (report[recover_p][is_store][5]);
1465 *nargs = 1;
1466 int size_log2 = exact_log2 (size_in_bytes);
1467 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1470 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1471 IS_STORE is either 1 (for a store) or 0 (for a load). */
1473 static tree
1474 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1475 int *nargs)
1477 static enum built_in_function check[2][2][6]
1478 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1479 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1480 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1481 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1482 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1483 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1484 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1485 BUILT_IN_ASAN_LOAD2_NOABORT,
1486 BUILT_IN_ASAN_LOAD4_NOABORT,
1487 BUILT_IN_ASAN_LOAD8_NOABORT,
1488 BUILT_IN_ASAN_LOAD16_NOABORT,
1489 BUILT_IN_ASAN_LOADN_NOABORT },
1490 { BUILT_IN_ASAN_STORE1_NOABORT,
1491 BUILT_IN_ASAN_STORE2_NOABORT,
1492 BUILT_IN_ASAN_STORE4_NOABORT,
1493 BUILT_IN_ASAN_STORE8_NOABORT,
1494 BUILT_IN_ASAN_STORE16_NOABORT,
1495 BUILT_IN_ASAN_STOREN_NOABORT } } };
1496 if (size_in_bytes == -1)
1498 *nargs = 2;
1499 return builtin_decl_implicit (check[recover_p][is_store][5]);
1501 *nargs = 1;
1502 int size_log2 = exact_log2 (size_in_bytes);
1503 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1506 /* Split the current basic block and create a condition statement
1507 insertion point right before or after the statement pointed to by
1508 ITER. Return an iterator to the point at which the caller might
1509 safely insert the condition statement.
1511 THEN_BLOCK must be set to the address of an uninitialized instance
1512 of basic_block. The function will then set *THEN_BLOCK to the
1513 'then block' of the condition statement to be inserted by the
1514 caller.
1516 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1517 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1519 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1520 block' of the condition statement to be inserted by the caller.
1522 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1523 statements starting from *ITER, and *THEN_BLOCK is a new empty
1524 block.
1526 *ITER is adjusted to point to always point to the first statement
1527 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1528 same as what ITER was pointing to prior to calling this function,
1529 if BEFORE_P is true; otherwise, it is its following statement. */
1531 gimple_stmt_iterator
1532 create_cond_insert_point (gimple_stmt_iterator *iter,
1533 bool before_p,
1534 bool then_more_likely_p,
1535 bool create_then_fallthru_edge,
1536 basic_block *then_block,
1537 basic_block *fallthrough_block)
1539 gimple_stmt_iterator gsi = *iter;
1541 if (!gsi_end_p (gsi) && before_p)
1542 gsi_prev (&gsi);
1544 basic_block cur_bb = gsi_bb (*iter);
1546 edge e = split_block (cur_bb, gsi_stmt (gsi));
1548 /* Get a hold on the 'condition block', the 'then block' and the
1549 'else block'. */
1550 basic_block cond_bb = e->src;
1551 basic_block fallthru_bb = e->dest;
1552 basic_block then_bb = create_empty_bb (cond_bb);
1553 if (current_loops)
1555 add_bb_to_loop (then_bb, cond_bb->loop_father);
1556 loops_state_set (LOOPS_NEED_FIXUP);
1559 /* Set up the newly created 'then block'. */
1560 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1561 int fallthrough_probability
1562 = then_more_likely_p
1563 ? PROB_VERY_UNLIKELY
1564 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1565 e->probability = PROB_ALWAYS - fallthrough_probability;
1566 if (create_then_fallthru_edge)
1567 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1569 /* Set up the fallthrough basic block. */
1570 e = find_edge (cond_bb, fallthru_bb);
1571 e->flags = EDGE_FALSE_VALUE;
1572 e->count = cond_bb->count;
1573 e->probability = fallthrough_probability;
1575 /* Update dominance info for the newly created then_bb; note that
1576 fallthru_bb's dominance info has already been updated by
1577 split_bock. */
1578 if (dom_info_available_p (CDI_DOMINATORS))
1579 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1581 *then_block = then_bb;
1582 *fallthrough_block = fallthru_bb;
1583 *iter = gsi_start_bb (fallthru_bb);
1585 return gsi_last_bb (cond_bb);
1588 /* Insert an if condition followed by a 'then block' right before the
1589 statement pointed to by ITER. The fallthrough block -- which is the
1590 else block of the condition as well as the destination of the
1591 outcoming edge of the 'then block' -- starts with the statement
1592 pointed to by ITER.
1594 COND is the condition of the if.
1596 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1597 'then block' is higher than the probability of the edge to the
1598 fallthrough block.
1600 Upon completion of the function, *THEN_BB is set to the newly
1601 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1602 fallthrough block.
1604 *ITER is adjusted to still point to the same statement it was
1605 pointing to initially. */
1607 static void
1608 insert_if_then_before_iter (gcond *cond,
1609 gimple_stmt_iterator *iter,
1610 bool then_more_likely_p,
1611 basic_block *then_bb,
1612 basic_block *fallthrough_bb)
1614 gimple_stmt_iterator cond_insert_point =
1615 create_cond_insert_point (iter,
1616 /*before_p=*/true,
1617 then_more_likely_p,
1618 /*create_then_fallthru_edge=*/true,
1619 then_bb,
1620 fallthrough_bb);
1621 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1624 /* Build
1625 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1627 static tree
1628 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1629 tree base_addr, tree shadow_ptr_type)
1631 tree t, uintptr_type = TREE_TYPE (base_addr);
1632 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1633 gimple g;
1635 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1636 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1637 base_addr, t);
1638 gimple_set_location (g, location);
1639 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1641 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1642 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1643 gimple_assign_lhs (g), t);
1644 gimple_set_location (g, location);
1645 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1647 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1648 gimple_assign_lhs (g));
1649 gimple_set_location (g, location);
1650 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1652 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1653 build_int_cst (shadow_ptr_type, 0));
1654 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1655 gimple_set_location (g, location);
1656 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1657 return gimple_assign_lhs (g);
1660 /* BASE can already be an SSA_NAME; in that case, do not create a
1661 new SSA_NAME for it. */
1663 static tree
1664 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1665 bool before_p)
1667 if (TREE_CODE (base) == SSA_NAME)
1668 return base;
1669 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1670 TREE_CODE (base), base);
1671 gimple_set_location (g, loc);
1672 if (before_p)
1673 gsi_insert_before (iter, g, GSI_SAME_STMT);
1674 else
1675 gsi_insert_after (iter, g, GSI_NEW_STMT);
1676 return gimple_assign_lhs (g);
1679 /* LEN can already have necessary size and precision;
1680 in that case, do not create a new variable. */
1682 tree
1683 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1684 bool before_p)
1686 if (ptrofftype_p (len))
1687 return len;
1688 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1689 NOP_EXPR, len);
1690 gimple_set_location (g, loc);
1691 if (before_p)
1692 gsi_insert_before (iter, g, GSI_SAME_STMT);
1693 else
1694 gsi_insert_after (iter, g, GSI_NEW_STMT);
1695 return gimple_assign_lhs (g);
1698 /* Instrument the memory access instruction BASE. Insert new
1699 statements before or after ITER.
1701 Note that the memory access represented by BASE can be either an
1702 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1703 location. IS_STORE is TRUE for a store, FALSE for a load.
1704 BEFORE_P is TRUE for inserting the instrumentation code before
1705 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1706 for a scalar memory access and FALSE for memory region access.
1707 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1708 length. ALIGN tells alignment of accessed memory object.
1710 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1711 memory region have already been instrumented.
1713 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1714 statement it was pointing to prior to calling this function,
1715 otherwise, it points to the statement logically following it. */
1717 static void
1718 build_check_stmt (location_t loc, tree base, tree len,
1719 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1720 bool is_non_zero_len, bool before_p, bool is_store,
1721 bool is_scalar_access, unsigned int align = 0)
1723 gimple_stmt_iterator gsi = *iter;
1724 gimple g;
1726 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1728 gsi = *iter;
1730 base = unshare_expr (base);
1731 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1733 if (len)
1735 len = unshare_expr (len);
1736 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1738 else
1740 gcc_assert (size_in_bytes != -1);
1741 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1744 if (size_in_bytes > 1)
1746 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1747 || size_in_bytes > 16)
1748 is_scalar_access = false;
1749 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1751 /* On non-strict alignment targets, if
1752 16-byte access is just 8-byte aligned,
1753 this will result in misaligned shadow
1754 memory 2 byte load, but otherwise can
1755 be handled using one read. */
1756 if (size_in_bytes != 16
1757 || STRICT_ALIGNMENT
1758 || align < 8 * BITS_PER_UNIT)
1759 is_scalar_access = false;
1763 HOST_WIDE_INT flags = 0;
1764 if (is_store)
1765 flags |= ASAN_CHECK_STORE;
1766 if (is_non_zero_len)
1767 flags |= ASAN_CHECK_NON_ZERO_LEN;
1768 if (is_scalar_access)
1769 flags |= ASAN_CHECK_SCALAR_ACCESS;
1771 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1772 build_int_cst (integer_type_node, flags),
1773 base, len,
1774 build_int_cst (integer_type_node,
1775 align / BITS_PER_UNIT));
1776 gimple_set_location (g, loc);
1777 if (before_p)
1778 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1779 else
1781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1782 gsi_next (&gsi);
1783 *iter = gsi;
1787 /* If T represents a memory access, add instrumentation code before ITER.
1788 LOCATION is source code location.
1789 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1791 static void
1792 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1793 location_t location, bool is_store)
1795 if (is_store && !ASAN_INSTRUMENT_WRITES)
1796 return;
1797 if (!is_store && !ASAN_INSTRUMENT_READS)
1798 return;
1800 tree type, base;
1801 HOST_WIDE_INT size_in_bytes;
1803 type = TREE_TYPE (t);
1804 switch (TREE_CODE (t))
1806 case ARRAY_REF:
1807 case COMPONENT_REF:
1808 case INDIRECT_REF:
1809 case MEM_REF:
1810 case VAR_DECL:
1811 case BIT_FIELD_REF:
1812 break;
1813 /* FALLTHRU */
1814 default:
1815 return;
1818 size_in_bytes = int_size_in_bytes (type);
1819 if (size_in_bytes <= 0)
1820 return;
1822 HOST_WIDE_INT bitsize, bitpos;
1823 tree offset;
1824 machine_mode mode;
1825 int volatilep = 0, unsignedp = 0;
1826 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1827 &mode, &unsignedp, &volatilep, false);
1829 if (TREE_CODE (t) == COMPONENT_REF
1830 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1832 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1833 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1834 TREE_OPERAND (t, 0), repr,
1835 NULL_TREE), location, is_store);
1836 return;
1839 if (bitpos % BITS_PER_UNIT
1840 || bitsize != size_in_bytes * BITS_PER_UNIT)
1841 return;
1843 if (TREE_CODE (inner) == VAR_DECL
1844 && offset == NULL_TREE
1845 && bitpos >= 0
1846 && DECL_SIZE (inner)
1847 && tree_fits_shwi_p (DECL_SIZE (inner))
1848 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1850 if (DECL_THREAD_LOCAL_P (inner))
1851 return;
1852 if (!ASAN_GLOBALS && is_global_var (inner))
1853 return;
1854 if (!TREE_STATIC (inner))
1856 /* Automatic vars in the current function will be always
1857 accessible. */
1858 if (decl_function_context (inner) == current_function_decl)
1859 return;
1861 /* Always instrument external vars, they might be dynamically
1862 initialized. */
1863 else if (!DECL_EXTERNAL (inner))
1865 /* For static vars if they are known not to be dynamically
1866 initialized, they will be always accessible. */
1867 varpool_node *vnode = varpool_node::get (inner);
1868 if (vnode && !vnode->dynamically_initialized)
1869 return;
1873 base = build_fold_addr_expr (t);
1874 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1876 unsigned int align = get_object_alignment (t);
1877 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1878 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1879 is_store, /*is_scalar_access*/true, align);
1880 update_mem_ref_hash_table (base, size_in_bytes);
1881 update_mem_ref_hash_table (t, size_in_bytes);
1886 /* Insert a memory reference into the hash table if access length
1887 can be determined in compile time. */
1889 static void
1890 maybe_update_mem_ref_hash_table (tree base, tree len)
1892 if (!POINTER_TYPE_P (TREE_TYPE (base))
1893 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1894 return;
1896 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1898 if (size_in_bytes != -1)
1899 update_mem_ref_hash_table (base, size_in_bytes);
1902 /* Instrument an access to a contiguous memory region that starts at
1903 the address pointed to by BASE, over a length of LEN (expressed in
1904 the sizeof (*BASE) bytes). ITER points to the instruction before
1905 which the instrumentation instructions must be inserted. LOCATION
1906 is the source location that the instrumentation instructions must
1907 have. If IS_STORE is true, then the memory access is a store;
1908 otherwise, it's a load. */
1910 static void
1911 instrument_mem_region_access (tree base, tree len,
1912 gimple_stmt_iterator *iter,
1913 location_t location, bool is_store)
1915 if (!POINTER_TYPE_P (TREE_TYPE (base))
1916 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1917 || integer_zerop (len))
1918 return;
1920 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1922 if ((size_in_bytes == -1)
1923 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1925 build_check_stmt (location, base, len, size_in_bytes, iter,
1926 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1927 is_store, /*is_scalar_access*/false, /*align*/0);
1930 maybe_update_mem_ref_hash_table (base, len);
1931 *iter = gsi_for_stmt (gsi_stmt (*iter));
1934 /* Instrument the call to a built-in memory access function that is
1935 pointed to by the iterator ITER.
1937 Upon completion, return TRUE iff *ITER has been advanced to the
1938 statement following the one it was originally pointing to. */
1940 static bool
1941 instrument_builtin_call (gimple_stmt_iterator *iter)
1943 if (!ASAN_MEMINTRIN)
1944 return false;
1946 bool iter_advanced_p = false;
1947 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1949 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1951 location_t loc = gimple_location (call);
1953 asan_mem_ref src0, src1, dest;
1954 asan_mem_ref_init (&src0, NULL, 1);
1955 asan_mem_ref_init (&src1, NULL, 1);
1956 asan_mem_ref_init (&dest, NULL, 1);
1958 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1959 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1960 dest_is_deref = false, intercepted_p = true;
1962 if (get_mem_refs_of_builtin_call (call,
1963 &src0, &src0_len, &src0_is_store,
1964 &src1, &src1_len, &src1_is_store,
1965 &dest, &dest_len, &dest_is_store,
1966 &dest_is_deref, &intercepted_p))
1968 if (dest_is_deref)
1970 instrument_derefs (iter, dest.start, loc, dest_is_store);
1971 gsi_next (iter);
1972 iter_advanced_p = true;
1974 else if (!intercepted_p
1975 && (src0_len || src1_len || dest_len))
1977 if (src0.start != NULL_TREE)
1978 instrument_mem_region_access (src0.start, src0_len,
1979 iter, loc, /*is_store=*/false);
1980 if (src1.start != NULL_TREE)
1981 instrument_mem_region_access (src1.start, src1_len,
1982 iter, loc, /*is_store=*/false);
1983 if (dest.start != NULL_TREE)
1984 instrument_mem_region_access (dest.start, dest_len,
1985 iter, loc, /*is_store=*/true);
1987 *iter = gsi_for_stmt (call);
1988 gsi_next (iter);
1989 iter_advanced_p = true;
1991 else
1993 if (src0.start != NULL_TREE)
1994 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1995 if (src1.start != NULL_TREE)
1996 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1997 if (dest.start != NULL_TREE)
1998 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2001 return iter_advanced_p;
2004 /* Instrument the assignment statement ITER if it is subject to
2005 instrumentation. Return TRUE iff instrumentation actually
2006 happened. In that case, the iterator ITER is advanced to the next
2007 logical expression following the one initially pointed to by ITER,
2008 and the relevant memory reference that which access has been
2009 instrumented is added to the memory references hash table. */
2011 static bool
2012 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2014 gimple s = gsi_stmt (*iter);
2016 gcc_assert (gimple_assign_single_p (s));
2018 tree ref_expr = NULL_TREE;
2019 bool is_store, is_instrumented = false;
2021 if (gimple_store_p (s))
2023 ref_expr = gimple_assign_lhs (s);
2024 is_store = true;
2025 instrument_derefs (iter, ref_expr,
2026 gimple_location (s),
2027 is_store);
2028 is_instrumented = true;
2031 if (gimple_assign_load_p (s))
2033 ref_expr = gimple_assign_rhs1 (s);
2034 is_store = false;
2035 instrument_derefs (iter, ref_expr,
2036 gimple_location (s),
2037 is_store);
2038 is_instrumented = true;
2041 if (is_instrumented)
2042 gsi_next (iter);
2044 return is_instrumented;
2047 /* Instrument the function call pointed to by the iterator ITER, if it
2048 is subject to instrumentation. At the moment, the only function
2049 calls that are instrumented are some built-in functions that access
2050 memory. Look at instrument_builtin_call to learn more.
2052 Upon completion return TRUE iff *ITER was advanced to the statement
2053 following the one it was originally pointing to. */
2055 static bool
2056 maybe_instrument_call (gimple_stmt_iterator *iter)
2058 gimple stmt = gsi_stmt (*iter);
2059 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2061 if (is_builtin && instrument_builtin_call (iter))
2062 return true;
2064 if (gimple_call_noreturn_p (stmt))
2066 if (is_builtin)
2068 tree callee = gimple_call_fndecl (stmt);
2069 switch (DECL_FUNCTION_CODE (callee))
2071 case BUILT_IN_UNREACHABLE:
2072 case BUILT_IN_TRAP:
2073 /* Don't instrument these. */
2074 return false;
2075 default:
2076 break;
2079 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2080 gimple g = gimple_build_call (decl, 0);
2081 gimple_set_location (g, gimple_location (stmt));
2082 gsi_insert_before (iter, g, GSI_SAME_STMT);
2084 return false;
2087 /* Walk each instruction of all basic block and instrument those that
2088 represent memory references: loads, stores, or function calls.
2089 In a given basic block, this function avoids instrumenting memory
2090 references that have already been instrumented. */
2092 static void
2093 transform_statements (void)
2095 basic_block bb, last_bb = NULL;
2096 gimple_stmt_iterator i;
2097 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2099 FOR_EACH_BB_FN (bb, cfun)
2101 basic_block prev_bb = bb;
2103 if (bb->index >= saved_last_basic_block) continue;
2105 /* Flush the mem ref hash table, if current bb doesn't have
2106 exactly one predecessor, or if that predecessor (skipping
2107 over asan created basic blocks) isn't the last processed
2108 basic block. Thus we effectively flush on extended basic
2109 block boundaries. */
2110 while (single_pred_p (prev_bb))
2112 prev_bb = single_pred (prev_bb);
2113 if (prev_bb->index < saved_last_basic_block)
2114 break;
2116 if (prev_bb != last_bb)
2117 empty_mem_ref_hash_table ();
2118 last_bb = bb;
2120 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2122 gimple s = gsi_stmt (i);
2124 if (has_stmt_been_instrumented_p (s))
2125 gsi_next (&i);
2126 else if (gimple_assign_single_p (s)
2127 && !gimple_clobber_p (s)
2128 && maybe_instrument_assignment (&i))
2129 /* Nothing to do as maybe_instrument_assignment advanced
2130 the iterator I. */;
2131 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2132 /* Nothing to do as maybe_instrument_call
2133 advanced the iterator I. */;
2134 else
2136 /* No instrumentation happened.
2138 If the current instruction is a function call that
2139 might free something, let's forget about the memory
2140 references that got instrumented. Otherwise we might
2141 miss some instrumentation opportunities. */
2142 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2143 empty_mem_ref_hash_table ();
2145 gsi_next (&i);
2149 free_mem_ref_resources ();
2152 /* Build
2153 __asan_before_dynamic_init (module_name)
2155 __asan_after_dynamic_init ()
2156 call. */
2158 tree
2159 asan_dynamic_init_call (bool after_p)
2161 tree fn = builtin_decl_implicit (after_p
2162 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2163 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2164 tree module_name_cst = NULL_TREE;
2165 if (!after_p)
2167 pretty_printer module_name_pp;
2168 pp_string (&module_name_pp, main_input_filename);
2170 if (shadow_ptr_types[0] == NULL_TREE)
2171 asan_init_shadow_ptr_types ();
2172 module_name_cst = asan_pp_string (&module_name_pp);
2173 module_name_cst = fold_convert (const_ptr_type_node,
2174 module_name_cst);
2177 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2180 /* Build
2181 struct __asan_global
2183 const void *__beg;
2184 uptr __size;
2185 uptr __size_with_redzone;
2186 const void *__name;
2187 const void *__module_name;
2188 uptr __has_dynamic_init;
2189 __asan_global_source_location *__location;
2190 } type. */
2192 static tree
2193 asan_global_struct (void)
2195 static const char *field_names[7]
2196 = { "__beg", "__size", "__size_with_redzone",
2197 "__name", "__module_name", "__has_dynamic_init", "__location"};
2198 tree fields[7], ret;
2199 int i;
2201 ret = make_node (RECORD_TYPE);
2202 for (i = 0; i < 7; i++)
2204 fields[i]
2205 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2206 get_identifier (field_names[i]),
2207 (i == 0 || i == 3) ? const_ptr_type_node
2208 : pointer_sized_int_node);
2209 DECL_CONTEXT (fields[i]) = ret;
2210 if (i)
2211 DECL_CHAIN (fields[i - 1]) = fields[i];
2213 tree type_decl = build_decl (input_location, TYPE_DECL,
2214 get_identifier ("__asan_global"), ret);
2215 DECL_IGNORED_P (type_decl) = 1;
2216 DECL_ARTIFICIAL (type_decl) = 1;
2217 TYPE_FIELDS (ret) = fields[0];
2218 TYPE_NAME (ret) = type_decl;
2219 TYPE_STUB_DECL (ret) = type_decl;
2220 layout_type (ret);
2221 return ret;
2224 /* Append description of a single global DECL into vector V.
2225 TYPE is __asan_global struct type as returned by asan_global_struct. */
2227 static void
2228 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2230 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2231 unsigned HOST_WIDE_INT size;
2232 tree str_cst, module_name_cst, refdecl = decl;
2233 vec<constructor_elt, va_gc> *vinner = NULL;
2235 pretty_printer asan_pp, module_name_pp;
2237 if (DECL_NAME (decl))
2238 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2239 else
2240 pp_string (&asan_pp, "<unknown>");
2241 str_cst = asan_pp_string (&asan_pp);
2243 pp_string (&module_name_pp, main_input_filename);
2244 module_name_cst = asan_pp_string (&module_name_pp);
2246 if (asan_needs_local_alias (decl))
2248 char buf[20];
2249 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2250 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2251 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2252 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2253 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2254 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2255 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2256 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2257 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2258 TREE_STATIC (refdecl) = 1;
2259 TREE_PUBLIC (refdecl) = 0;
2260 TREE_USED (refdecl) = 1;
2261 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2264 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2265 fold_convert (const_ptr_type_node,
2266 build_fold_addr_expr (refdecl)));
2267 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2268 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2269 size += asan_red_zone_size (size);
2270 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2271 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2272 fold_convert (const_ptr_type_node, str_cst));
2273 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2274 fold_convert (const_ptr_type_node, module_name_cst));
2275 varpool_node *vnode = varpool_node::get (decl);
2276 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2277 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2278 build_int_cst (uptr, has_dynamic_init));
2279 tree locptr = NULL_TREE;
2280 location_t loc = DECL_SOURCE_LOCATION (decl);
2281 expanded_location xloc = expand_location (loc);
2282 if (xloc.file != NULL)
2284 static int lasanloccnt = 0;
2285 char buf[25];
2286 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2287 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2288 ubsan_get_source_location_type ());
2289 TREE_STATIC (var) = 1;
2290 TREE_PUBLIC (var) = 0;
2291 DECL_ARTIFICIAL (var) = 1;
2292 DECL_IGNORED_P (var) = 1;
2293 pretty_printer filename_pp;
2294 pp_string (&filename_pp, xloc.file);
2295 tree str = asan_pp_string (&filename_pp);
2296 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2297 NULL_TREE, str, NULL_TREE,
2298 build_int_cst (unsigned_type_node,
2299 xloc.line), NULL_TREE,
2300 build_int_cst (unsigned_type_node,
2301 xloc.column));
2302 TREE_CONSTANT (ctor) = 1;
2303 TREE_STATIC (ctor) = 1;
2304 DECL_INITIAL (var) = ctor;
2305 varpool_node::finalize_decl (var);
2306 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2308 else
2309 locptr = build_int_cst (uptr, 0);
2310 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2311 init = build_constructor (type, vinner);
2312 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2315 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2316 void
2317 initialize_sanitizer_builtins (void)
2319 tree decl;
2321 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2322 return;
2324 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2325 tree BT_FN_VOID_PTR
2326 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2327 tree BT_FN_VOID_CONST_PTR
2328 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2329 tree BT_FN_VOID_PTR_PTR
2330 = build_function_type_list (void_type_node, ptr_type_node,
2331 ptr_type_node, NULL_TREE);
2332 tree BT_FN_VOID_PTR_PTR_PTR
2333 = build_function_type_list (void_type_node, ptr_type_node,
2334 ptr_type_node, ptr_type_node, NULL_TREE);
2335 tree BT_FN_VOID_PTR_PTRMODE
2336 = build_function_type_list (void_type_node, ptr_type_node,
2337 pointer_sized_int_node, NULL_TREE);
2338 tree BT_FN_VOID_INT
2339 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2340 tree BT_FN_SIZE_CONST_PTR_INT
2341 = build_function_type_list (size_type_node, const_ptr_type_node,
2342 integer_type_node, NULL_TREE);
2343 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2344 tree BT_FN_IX_CONST_VPTR_INT[5];
2345 tree BT_FN_IX_VPTR_IX_INT[5];
2346 tree BT_FN_VOID_VPTR_IX_INT[5];
2347 tree vptr
2348 = build_pointer_type (build_qualified_type (void_type_node,
2349 TYPE_QUAL_VOLATILE));
2350 tree cvptr
2351 = build_pointer_type (build_qualified_type (void_type_node,
2352 TYPE_QUAL_VOLATILE
2353 |TYPE_QUAL_CONST));
2354 tree boolt
2355 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2356 int i;
2357 for (i = 0; i < 5; i++)
2359 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2360 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2361 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2362 integer_type_node, integer_type_node,
2363 NULL_TREE);
2364 BT_FN_IX_CONST_VPTR_INT[i]
2365 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2366 BT_FN_IX_VPTR_IX_INT[i]
2367 = build_function_type_list (ix, vptr, ix, integer_type_node,
2368 NULL_TREE);
2369 BT_FN_VOID_VPTR_IX_INT[i]
2370 = build_function_type_list (void_type_node, vptr, ix,
2371 integer_type_node, NULL_TREE);
2373 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2374 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2375 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2376 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2377 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2378 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2379 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2380 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2381 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2382 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2383 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2384 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2385 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2386 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2387 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2388 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2389 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2390 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2391 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2392 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2393 #undef ATTR_NOTHROW_LEAF_LIST
2394 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2395 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2396 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2397 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2398 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2399 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2400 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2401 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2402 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2403 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2404 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2405 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2406 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2407 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2408 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2409 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2410 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2411 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2412 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2413 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2414 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2415 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2416 #undef DEF_SANITIZER_BUILTIN
2417 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2418 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2419 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2420 set_call_expr_flags (decl, ATTRS); \
2421 set_builtin_decl (ENUM, decl, true);
2423 #include "sanitizer.def"
2425 /* -fsanitize=object-size uses __builtin_object_size, but that might
2426 not be available for e.g. Fortran at this point. We use
2427 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2428 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2429 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2430 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2431 BT_FN_SIZE_CONST_PTR_INT,
2432 ATTR_PURE_NOTHROW_LEAF_LIST)
2434 #undef DEF_SANITIZER_BUILTIN
2437 /* Called via htab_traverse. Count number of emitted
2438 STRING_CSTs in the constant hash table. */
2441 count_string_csts (constant_descriptor_tree **slot,
2442 unsigned HOST_WIDE_INT *data)
2444 struct constant_descriptor_tree *desc = *slot;
2445 if (TREE_CODE (desc->value) == STRING_CST
2446 && TREE_ASM_WRITTEN (desc->value)
2447 && asan_protect_global (desc->value))
2448 ++*data;
2449 return 1;
2452 /* Helper structure to pass two parameters to
2453 add_string_csts. */
2455 struct asan_add_string_csts_data
2457 tree type;
2458 vec<constructor_elt, va_gc> *v;
2461 /* Called via hash_table::traverse. Call asan_add_global
2462 on emitted STRING_CSTs from the constant hash table. */
2465 add_string_csts (constant_descriptor_tree **slot,
2466 asan_add_string_csts_data *aascd)
2468 struct constant_descriptor_tree *desc = *slot;
2469 if (TREE_CODE (desc->value) == STRING_CST
2470 && TREE_ASM_WRITTEN (desc->value)
2471 && asan_protect_global (desc->value))
2473 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2474 aascd->type, aascd->v);
2476 return 1;
2479 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2480 invoke ggc_collect. */
2481 static GTY(()) tree asan_ctor_statements;
2483 /* Module-level instrumentation.
2484 - Insert __asan_init_vN() into the list of CTORs.
2485 - TODO: insert redzones around globals.
2488 void
2489 asan_finish_file (void)
2491 varpool_node *vnode;
2492 unsigned HOST_WIDE_INT gcount = 0;
2494 if (shadow_ptr_types[0] == NULL_TREE)
2495 asan_init_shadow_ptr_types ();
2496 /* Avoid instrumenting code in the asan ctors/dtors.
2497 We don't need to insert padding after the description strings,
2498 nor after .LASAN* array. */
2499 flag_sanitize &= ~SANITIZE_ADDRESS;
2501 /* For user-space we want asan constructors to run first.
2502 Linux kernel does not support priorities other than default, and the only
2503 other user of constructors is coverage. So we run with the default
2504 priority. */
2505 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2506 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2508 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2510 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2511 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2513 FOR_EACH_DEFINED_VARIABLE (vnode)
2514 if (TREE_ASM_WRITTEN (vnode->decl)
2515 && asan_protect_global (vnode->decl))
2516 ++gcount;
2517 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2518 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2519 (&gcount);
2520 if (gcount)
2522 tree type = asan_global_struct (), var, ctor;
2523 tree dtor_statements = NULL_TREE;
2524 vec<constructor_elt, va_gc> *v;
2525 char buf[20];
2527 type = build_array_type_nelts (type, gcount);
2528 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2529 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2530 type);
2531 TREE_STATIC (var) = 1;
2532 TREE_PUBLIC (var) = 0;
2533 DECL_ARTIFICIAL (var) = 1;
2534 DECL_IGNORED_P (var) = 1;
2535 vec_alloc (v, gcount);
2536 FOR_EACH_DEFINED_VARIABLE (vnode)
2537 if (TREE_ASM_WRITTEN (vnode->decl)
2538 && asan_protect_global (vnode->decl))
2539 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2540 struct asan_add_string_csts_data aascd;
2541 aascd.type = TREE_TYPE (type);
2542 aascd.v = v;
2543 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2544 (&aascd);
2545 ctor = build_constructor (type, v);
2546 TREE_CONSTANT (ctor) = 1;
2547 TREE_STATIC (ctor) = 1;
2548 DECL_INITIAL (var) = ctor;
2549 varpool_node::finalize_decl (var);
2551 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2552 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2553 append_to_statement_list (build_call_expr (fn, 2,
2554 build_fold_addr_expr (var),
2555 gcount_tree),
2556 &asan_ctor_statements);
2558 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2559 append_to_statement_list (build_call_expr (fn, 2,
2560 build_fold_addr_expr (var),
2561 gcount_tree),
2562 &dtor_statements);
2563 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2565 if (asan_ctor_statements)
2566 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2567 flag_sanitize |= SANITIZE_ADDRESS;
2570 /* Expand the ASAN_{LOAD,STORE} builtins. */
2572 bool
2573 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2575 gimple g = gsi_stmt (*iter);
2576 location_t loc = gimple_location (g);
2578 bool recover_p
2579 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2581 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2582 gcc_assert (flags < ASAN_CHECK_LAST);
2583 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2584 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2585 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2587 tree base = gimple_call_arg (g, 1);
2588 tree len = gimple_call_arg (g, 2);
2589 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2591 HOST_WIDE_INT size_in_bytes
2592 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2594 if (use_calls)
2596 /* Instrument using callbacks. */
2597 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2598 NOP_EXPR, base);
2599 gimple_set_location (g, loc);
2600 gsi_insert_before (iter, g, GSI_SAME_STMT);
2601 tree base_addr = gimple_assign_lhs (g);
2603 int nargs;
2604 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2605 if (nargs == 1)
2606 g = gimple_build_call (fun, 1, base_addr);
2607 else
2609 gcc_assert (nargs == 2);
2610 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2611 NOP_EXPR, len);
2612 gimple_set_location (g, loc);
2613 gsi_insert_before (iter, g, GSI_SAME_STMT);
2614 tree sz_arg = gimple_assign_lhs (g);
2615 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2617 gimple_set_location (g, loc);
2618 gsi_replace (iter, g, false);
2619 return false;
2622 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2624 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2625 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2627 gimple_stmt_iterator gsi = *iter;
2629 if (!is_non_zero_len)
2631 /* So, the length of the memory area to asan-protect is
2632 non-constant. Let's guard the generated instrumentation code
2633 like:
2635 if (len != 0)
2637 //asan instrumentation code goes here.
2639 // falltrough instructions, starting with *ITER. */
2641 g = gimple_build_cond (NE_EXPR,
2642 len,
2643 build_int_cst (TREE_TYPE (len), 0),
2644 NULL_TREE, NULL_TREE);
2645 gimple_set_location (g, loc);
2647 basic_block then_bb, fallthrough_bb;
2648 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2649 /*then_more_likely_p=*/true,
2650 &then_bb, &fallthrough_bb);
2651 /* Note that fallthrough_bb starts with the statement that was
2652 pointed to by ITER. */
2654 /* The 'then block' of the 'if (len != 0) condition is where
2655 we'll generate the asan instrumentation code now. */
2656 gsi = gsi_last_bb (then_bb);
2659 /* Get an iterator on the point where we can add the condition
2660 statement for the instrumentation. */
2661 basic_block then_bb, else_bb;
2662 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2663 /*then_more_likely_p=*/false,
2664 /*create_then_fallthru_edge*/recover_p,
2665 &then_bb,
2666 &else_bb);
2668 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2669 NOP_EXPR, base);
2670 gimple_set_location (g, loc);
2671 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2672 tree base_addr = gimple_assign_lhs (g);
2674 tree t = NULL_TREE;
2675 if (real_size_in_bytes >= 8)
2677 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2678 shadow_ptr_type);
2679 t = shadow;
2681 else
2683 /* Slow path for 1, 2 and 4 byte accesses. */
2684 /* Test (shadow != 0)
2685 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2686 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2687 shadow_ptr_type);
2688 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2689 gimple_seq seq = NULL;
2690 gimple_seq_add_stmt (&seq, shadow_test);
2691 /* Aligned (>= 8 bytes) can test just
2692 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2693 to be 0. */
2694 if (align < 8)
2696 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2697 base_addr, 7));
2698 gimple_seq_add_stmt (&seq,
2699 build_type_cast (shadow_type,
2700 gimple_seq_last (seq)));
2701 if (real_size_in_bytes > 1)
2702 gimple_seq_add_stmt (&seq,
2703 build_assign (PLUS_EXPR,
2704 gimple_seq_last (seq),
2705 real_size_in_bytes - 1));
2706 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2708 else
2709 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2710 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2711 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2712 gimple_seq_last (seq)));
2713 t = gimple_assign_lhs (gimple_seq_last (seq));
2714 gimple_seq_set_location (seq, loc);
2715 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2717 /* For non-constant, misaligned or otherwise weird access sizes,
2718 check first and last byte. */
2719 if (size_in_bytes == -1)
2721 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2722 MINUS_EXPR, len,
2723 build_int_cst (pointer_sized_int_node, 1));
2724 gimple_set_location (g, loc);
2725 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2726 tree last = gimple_assign_lhs (g);
2727 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2728 PLUS_EXPR, base_addr, last);
2729 gimple_set_location (g, loc);
2730 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2731 tree base_end_addr = gimple_assign_lhs (g);
2733 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2734 shadow_ptr_type);
2735 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2736 gimple_seq seq = NULL;
2737 gimple_seq_add_stmt (&seq, shadow_test);
2738 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2739 base_end_addr, 7));
2740 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2741 gimple_seq_last (seq)));
2742 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2743 gimple_seq_last (seq),
2744 shadow));
2745 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2746 gimple_seq_last (seq)));
2747 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2748 gimple_seq_last (seq)));
2749 t = gimple_assign_lhs (gimple_seq_last (seq));
2750 gimple_seq_set_location (seq, loc);
2751 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2755 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2756 NULL_TREE, NULL_TREE);
2757 gimple_set_location (g, loc);
2758 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2760 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2761 gsi = gsi_start_bb (then_bb);
2762 int nargs;
2763 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2764 g = gimple_build_call (fun, nargs, base_addr, len);
2765 gimple_set_location (g, loc);
2766 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2768 gsi_remove (iter, true);
2769 *iter = gsi_start_bb (else_bb);
2771 return true;
2774 /* Instrument the current function. */
2776 static unsigned int
2777 asan_instrument (void)
2779 if (shadow_ptr_types[0] == NULL_TREE)
2780 asan_init_shadow_ptr_types ();
2781 transform_statements ();
2782 return 0;
2785 static bool
2786 gate_asan (void)
2788 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2789 && !lookup_attribute ("no_sanitize_address",
2790 DECL_ATTRIBUTES (current_function_decl));
2793 namespace {
2795 const pass_data pass_data_asan =
2797 GIMPLE_PASS, /* type */
2798 "asan", /* name */
2799 OPTGROUP_NONE, /* optinfo_flags */
2800 TV_NONE, /* tv_id */
2801 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2802 0, /* properties_provided */
2803 0, /* properties_destroyed */
2804 0, /* todo_flags_start */
2805 TODO_update_ssa, /* todo_flags_finish */
2808 class pass_asan : public gimple_opt_pass
2810 public:
2811 pass_asan (gcc::context *ctxt)
2812 : gimple_opt_pass (pass_data_asan, ctxt)
2815 /* opt_pass methods: */
2816 opt_pass * clone () { return new pass_asan (m_ctxt); }
2817 virtual bool gate (function *) { return gate_asan (); }
2818 virtual unsigned int execute (function *) { return asan_instrument (); }
2820 }; // class pass_asan
2822 } // anon namespace
2824 gimple_opt_pass *
2825 make_pass_asan (gcc::context *ctxt)
2827 return new pass_asan (ctxt);
2830 namespace {
2832 const pass_data pass_data_asan_O0 =
2834 GIMPLE_PASS, /* type */
2835 "asan0", /* name */
2836 OPTGROUP_NONE, /* optinfo_flags */
2837 TV_NONE, /* tv_id */
2838 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2839 0, /* properties_provided */
2840 0, /* properties_destroyed */
2841 0, /* todo_flags_start */
2842 TODO_update_ssa, /* todo_flags_finish */
2845 class pass_asan_O0 : public gimple_opt_pass
2847 public:
2848 pass_asan_O0 (gcc::context *ctxt)
2849 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2852 /* opt_pass methods: */
2853 virtual bool gate (function *) { return !optimize && gate_asan (); }
2854 virtual unsigned int execute (function *) { return asan_instrument (); }
2856 }; // class pass_asan_O0
2858 } // anon namespace
2860 gimple_opt_pass *
2861 make_pass_asan_O0 (gcc::context *ctxt)
2863 return new pass_asan_O0 (ctxt);
2866 #include "gt-asan.h"