[PATCH] Fix undefined behaviour in rl78 port
[official-gcc.git] / gcc / asan.c
blob05c4ad4739a2b16272e6acb6e8376ad216f49d73
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "backend.h"
27 #include "cfghooks.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "rtl.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "cfganal.h"
34 #include "internal-fn.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "asan.h"
46 #include "gimple-pretty-print.h"
47 #include "target.h"
48 #include "flags.h"
49 #include "insn-config.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "emit-rtl.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "output.h"
59 #include "tm_p.h"
60 #include "langhooks.h"
61 #include "alloc-pool.h"
62 #include "cfgloop.h"
63 #include "gimple-builder.h"
64 #include "ubsan.h"
65 #include "params.h"
66 #include "builtins.h"
67 #include "fnmatch.h"
69 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 with <2x slowdown on average.
72 The tool consists of two parts:
73 instrumentation module (this file) and a run-time library.
74 The instrumentation module adds a run-time check before every memory insn.
75 For a 8- or 16- byte load accessing address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 if (ShadowValue)
79 __asan_report_load8(X);
80 For a load of N bytes (N=1, 2 or 4) from address X:
81 ShadowAddr = (X >> 3) + Offset
82 ShadowValue = *(char*)ShadowAddr;
83 if (ShadowValue)
84 if ((X & 7) + N - 1 > ShadowValue)
85 __asan_report_loadN(X);
86 Stores are instrumented similarly, but using __asan_report_storeN functions.
87 A call too __asan_init_vN() is inserted to the list of module CTORs.
88 N is the version number of the AddressSanitizer API. The changes between the
89 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
91 The run-time library redefines malloc (so that redzone are inserted around
92 the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 provides __asan_report* and __asan_init_vN functions.
95 Read more:
96 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
98 The current implementation supports detection of out-of-bounds and
99 use-after-free in the heap, on the stack and for global variables.
101 [Protection of stack variables]
103 To understand how detection of out-of-bounds and use-after-free works
104 for stack variables, lets look at this example on x86_64 where the
105 stack grows downward:
108 foo ()
110 char a[23] = {0};
111 int b[2] = {0};
113 a[5] = 1;
114 b[1] = 2;
116 return a[5] + b[1];
119 For this function, the stack protected by asan will be organized as
120 follows, from the top of the stack to the bottom:
122 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
124 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 the next slot be 32 bytes aligned; this one is called Partial
126 Redzone; this 32 bytes alignment is an asan constraint]
128 Slot 3/ [24 bytes for variable 'a']
130 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
132 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
134 Slot 6/ [8 bytes for variable 'b']
136 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 'LEFT RedZone']
139 The 32 bytes of LEFT red zone at the bottom of the stack can be
140 decomposed as such:
142 1/ The first 8 bytes contain a magical asan number that is always
143 0x41B58AB3.
145 2/ The following 8 bytes contains a pointer to a string (to be
146 parsed at runtime by the runtime asan library), which format is
147 the following:
149 "<function-name> <space> <num-of-variables-on-the-stack>
150 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 <length-of-var-in-bytes> ){n} "
153 where '(...){n}' means the content inside the parenthesis occurs 'n'
154 times, with 'n' being the number of variables on the stack.
156 3/ The following 8 bytes contain the PC of the current function which
157 will be used by the run-time library to print an error message.
159 4/ The following 8 bytes are reserved for internal use by the run-time.
161 The shadow memory for that stack layout is going to look like this:
163 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 The F1 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 the memory for that shadow byte is part of a the LEFT red zone
167 intended to seat at the bottom of the variables on the stack.
169 - content of shadow memory 8 bytes for slots 6 and 5:
170 0xF4F4F400. The F4 byte pattern is a magic number
171 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 memory region for this shadow byte is a PARTIAL red zone
173 intended to pad a variable A, so that the slot following
174 {A,padding} is 32 bytes aligned.
176 Note that the fact that the least significant byte of this
177 shadow memory content is 00 means that 8 bytes of its
178 corresponding memory (which corresponds to the memory of
179 variable 'b') is addressable.
181 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 The F2 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 region for this shadow byte is a MIDDLE red zone intended to
185 seat between two 32 aligned slots of {variable,padding}.
187 - content of shadow memory 8 bytes for slot 3 and 2:
188 0xF4000000. This represents is the concatenation of
189 variable 'a' and the partial red zone following it, like what we
190 had for variable 'b'. The least significant 3 bytes being 00
191 means that the 3 bytes of variable 'a' are addressable.
193 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 The F3 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 region for this shadow byte is a RIGHT red zone intended to seat
197 at the top of the variables of the stack.
199 Note that the real variable layout is done in expand_used_vars in
200 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
201 stack variables as well as the different red zones, emits some
202 prologue code to populate the shadow memory as to poison (mark as
203 non-accessible) the regions of the red zones and mark the regions of
204 stack variables as accessible, and emit some epilogue code to
205 un-poison (mark as accessible) the regions of red zones right before
206 the function exits.
208 [Protection of global variables]
210 The basic idea is to insert a red zone between two global variables
211 and install a constructor function that calls the asan runtime to do
212 the populating of the relevant shadow memory regions at load time.
214 So the global variables are laid out as to insert a red zone between
215 them. The size of the red zones is so that each variable starts on a
216 32 bytes boundary.
218 Then a constructor function is installed so that, for each global
219 variable, it calls the runtime asan library function
220 __asan_register_globals_with an instance of this type:
222 struct __asan_global
224 // Address of the beginning of the global variable.
225 const void *__beg;
227 // Initial size of the global variable.
228 uptr __size;
230 // Size of the global variable + size of the red zone. This
231 // size is 32 bytes aligned.
232 uptr __size_with_redzone;
234 // Name of the global variable.
235 const void *__name;
237 // Name of the module where the global variable is declared.
238 const void *__module_name;
240 // 1 if it has dynamic initialization, 0 otherwise.
241 uptr __has_dynamic_init;
243 // A pointer to struct that contains source location, could be NULL.
244 __asan_global_source_location *__location;
247 A destructor function that calls the runtime asan library function
248 _asan_unregister_globals is also installed. */
250 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 static bool asan_shadow_offset_computed;
252 static vec<char *> sanitized_sections;
254 /* Sets shadow offset to value in string VAL. */
256 bool
257 set_asan_shadow_offset (const char *val)
259 char *endp;
261 errno = 0;
262 #ifdef HAVE_LONG_LONG
263 asan_shadow_offset_value = strtoull (val, &endp, 0);
264 #else
265 asan_shadow_offset_value = strtoul (val, &endp, 0);
266 #endif
267 if (!(*val != '\0' && *endp == '\0' && errno == 0))
268 return false;
270 asan_shadow_offset_computed = true;
272 return true;
275 /* Set list of user-defined sections that need to be sanitized. */
277 void
278 set_sanitized_sections (const char *sections)
280 char *pat;
281 unsigned i;
282 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
283 free (pat);
284 sanitized_sections.truncate (0);
286 for (const char *s = sections; *s; )
288 const char *end;
289 for (end = s; *end && *end != ','; ++end);
290 size_t len = end - s;
291 sanitized_sections.safe_push (xstrndup (s, len));
292 s = *end ? end + 1 : end;
296 /* Checks whether section SEC should be sanitized. */
298 static bool
299 section_sanitized_p (const char *sec)
301 char *pat;
302 unsigned i;
303 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
304 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
305 return true;
306 return false;
309 /* Returns Asan shadow offset. */
311 static unsigned HOST_WIDE_INT
312 asan_shadow_offset ()
314 if (!asan_shadow_offset_computed)
316 asan_shadow_offset_computed = true;
317 asan_shadow_offset_value = targetm.asan_shadow_offset ();
319 return asan_shadow_offset_value;
322 alias_set_type asan_shadow_set = -1;
324 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
325 alias set is used for all shadow memory accesses. */
326 static GTY(()) tree shadow_ptr_types[2];
328 /* Decl for __asan_option_detect_stack_use_after_return. */
329 static GTY(()) tree asan_detect_stack_use_after_return;
331 /* Various flags for Asan builtins. */
332 enum asan_check_flags
334 ASAN_CHECK_STORE = 1 << 0,
335 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
336 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
337 ASAN_CHECK_LAST = 1 << 3
340 /* Hashtable support for memory references used by gimple
341 statements. */
343 /* This type represents a reference to a memory region. */
344 struct asan_mem_ref
346 /* The expression of the beginning of the memory region. */
347 tree start;
349 /* The size of the access. */
350 HOST_WIDE_INT access_size;
353 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
355 /* Initializes an instance of asan_mem_ref. */
357 static void
358 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
360 ref->start = start;
361 ref->access_size = access_size;
364 /* Allocates memory for an instance of asan_mem_ref into the memory
365 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
366 START is the address of (or the expression pointing to) the
367 beginning of memory reference. ACCESS_SIZE is the size of the
368 access to the referenced memory. */
370 static asan_mem_ref*
371 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
373 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
375 asan_mem_ref_init (ref, start, access_size);
376 return ref;
379 /* This builds and returns a pointer to the end of the memory region
380 that starts at START and of length LEN. */
382 tree
383 asan_mem_ref_get_end (tree start, tree len)
385 if (len == NULL_TREE || integer_zerop (len))
386 return start;
388 if (!ptrofftype_p (len))
389 len = convert_to_ptrofftype (len);
391 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
394 /* Return a tree expression that represents the end of the referenced
395 memory region. Beware that this function can actually build a new
396 tree expression. */
398 tree
399 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
401 return asan_mem_ref_get_end (ref->start, len);
404 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
406 static inline hashval_t hash (const asan_mem_ref *);
407 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
410 /* Hash a memory reference. */
412 inline hashval_t
413 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
415 return iterative_hash_expr (mem_ref->start, 0);
418 /* Compare two memory references. We accept the length of either
419 memory references to be NULL_TREE. */
421 inline bool
422 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
423 const asan_mem_ref *m2)
425 return operand_equal_p (m1->start, m2->start, 0);
428 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
430 /* Returns a reference to the hash table containing memory references.
431 This function ensures that the hash table is created. Note that
432 this hash table is updated by the function
433 update_mem_ref_hash_table. */
435 static hash_table<asan_mem_ref_hasher> *
436 get_mem_ref_hash_table ()
438 if (!asan_mem_ref_ht)
439 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
441 return asan_mem_ref_ht;
444 /* Clear all entries from the memory references hash table. */
446 static void
447 empty_mem_ref_hash_table ()
449 if (asan_mem_ref_ht)
450 asan_mem_ref_ht->empty ();
453 /* Free the memory references hash table. */
455 static void
456 free_mem_ref_resources ()
458 delete asan_mem_ref_ht;
459 asan_mem_ref_ht = NULL;
461 asan_mem_ref_pool.release ();
464 /* Return true iff the memory reference REF has been instrumented. */
466 static bool
467 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
469 asan_mem_ref r;
470 asan_mem_ref_init (&r, ref, access_size);
472 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
473 return saved_ref && saved_ref->access_size >= access_size;
476 /* Return true iff the memory reference REF has been instrumented. */
478 static bool
479 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
481 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
484 /* Return true iff access to memory region starting at REF and of
485 length LEN has been instrumented. */
487 static bool
488 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
490 HOST_WIDE_INT size_in_bytes
491 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
493 return size_in_bytes != -1
494 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
497 /* Set REF to the memory reference present in a gimple assignment
498 ASSIGNMENT. Return true upon successful completion, false
499 otherwise. */
501 static bool
502 get_mem_ref_of_assignment (const gassign *assignment,
503 asan_mem_ref *ref,
504 bool *ref_is_store)
506 gcc_assert (gimple_assign_single_p (assignment));
508 if (gimple_store_p (assignment)
509 && !gimple_clobber_p (assignment))
511 ref->start = gimple_assign_lhs (assignment);
512 *ref_is_store = true;
514 else if (gimple_assign_load_p (assignment))
516 ref->start = gimple_assign_rhs1 (assignment);
517 *ref_is_store = false;
519 else
520 return false;
522 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
523 return true;
526 /* Return the memory references contained in a gimple statement
527 representing a builtin call that has to do with memory access. */
529 static bool
530 get_mem_refs_of_builtin_call (const gcall *call,
531 asan_mem_ref *src0,
532 tree *src0_len,
533 bool *src0_is_store,
534 asan_mem_ref *src1,
535 tree *src1_len,
536 bool *src1_is_store,
537 asan_mem_ref *dst,
538 tree *dst_len,
539 bool *dst_is_store,
540 bool *dest_is_deref,
541 bool *intercepted_p)
543 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
545 tree callee = gimple_call_fndecl (call);
546 tree source0 = NULL_TREE, source1 = NULL_TREE,
547 dest = NULL_TREE, len = NULL_TREE;
548 bool is_store = true, got_reference_p = false;
549 HOST_WIDE_INT access_size = 1;
551 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
553 switch (DECL_FUNCTION_CODE (callee))
555 /* (s, s, n) style memops. */
556 case BUILT_IN_BCMP:
557 case BUILT_IN_MEMCMP:
558 source0 = gimple_call_arg (call, 0);
559 source1 = gimple_call_arg (call, 1);
560 len = gimple_call_arg (call, 2);
561 break;
563 /* (src, dest, n) style memops. */
564 case BUILT_IN_BCOPY:
565 source0 = gimple_call_arg (call, 0);
566 dest = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
570 /* (dest, src, n) style memops. */
571 case BUILT_IN_MEMCPY:
572 case BUILT_IN_MEMCPY_CHK:
573 case BUILT_IN_MEMMOVE:
574 case BUILT_IN_MEMMOVE_CHK:
575 case BUILT_IN_MEMPCPY:
576 case BUILT_IN_MEMPCPY_CHK:
577 dest = gimple_call_arg (call, 0);
578 source0 = gimple_call_arg (call, 1);
579 len = gimple_call_arg (call, 2);
580 break;
582 /* (dest, n) style memops. */
583 case BUILT_IN_BZERO:
584 dest = gimple_call_arg (call, 0);
585 len = gimple_call_arg (call, 1);
586 break;
588 /* (dest, x, n) style memops*/
589 case BUILT_IN_MEMSET:
590 case BUILT_IN_MEMSET_CHK:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 2);
593 break;
595 case BUILT_IN_STRLEN:
596 source0 = gimple_call_arg (call, 0);
597 len = gimple_call_lhs (call);
598 break ;
600 /* And now the __atomic* and __sync builtins.
601 These are handled differently from the classical memory memory
602 access builtins above. */
604 case BUILT_IN_ATOMIC_LOAD_1:
605 case BUILT_IN_ATOMIC_LOAD_2:
606 case BUILT_IN_ATOMIC_LOAD_4:
607 case BUILT_IN_ATOMIC_LOAD_8:
608 case BUILT_IN_ATOMIC_LOAD_16:
609 is_store = false;
610 /* fall through. */
612 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
618 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
624 case BUILT_IN_SYNC_FETCH_AND_OR_1:
625 case BUILT_IN_SYNC_FETCH_AND_OR_2:
626 case BUILT_IN_SYNC_FETCH_AND_OR_4:
627 case BUILT_IN_SYNC_FETCH_AND_OR_8:
628 case BUILT_IN_SYNC_FETCH_AND_OR_16:
630 case BUILT_IN_SYNC_FETCH_AND_AND_1:
631 case BUILT_IN_SYNC_FETCH_AND_AND_2:
632 case BUILT_IN_SYNC_FETCH_AND_AND_4:
633 case BUILT_IN_SYNC_FETCH_AND_AND_8:
634 case BUILT_IN_SYNC_FETCH_AND_AND_16:
636 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
642 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
647 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
653 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
659 case BUILT_IN_SYNC_OR_AND_FETCH_1:
660 case BUILT_IN_SYNC_OR_AND_FETCH_2:
661 case BUILT_IN_SYNC_OR_AND_FETCH_4:
662 case BUILT_IN_SYNC_OR_AND_FETCH_8:
663 case BUILT_IN_SYNC_OR_AND_FETCH_16:
665 case BUILT_IN_SYNC_AND_AND_FETCH_1:
666 case BUILT_IN_SYNC_AND_AND_FETCH_2:
667 case BUILT_IN_SYNC_AND_AND_FETCH_4:
668 case BUILT_IN_SYNC_AND_AND_FETCH_8:
669 case BUILT_IN_SYNC_AND_AND_FETCH_16:
671 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
700 case BUILT_IN_SYNC_LOCK_RELEASE_1:
701 case BUILT_IN_SYNC_LOCK_RELEASE_2:
702 case BUILT_IN_SYNC_LOCK_RELEASE_4:
703 case BUILT_IN_SYNC_LOCK_RELEASE_8:
704 case BUILT_IN_SYNC_LOCK_RELEASE_16:
706 case BUILT_IN_ATOMIC_EXCHANGE_1:
707 case BUILT_IN_ATOMIC_EXCHANGE_2:
708 case BUILT_IN_ATOMIC_EXCHANGE_4:
709 case BUILT_IN_ATOMIC_EXCHANGE_8:
710 case BUILT_IN_ATOMIC_EXCHANGE_16:
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
718 case BUILT_IN_ATOMIC_STORE_1:
719 case BUILT_IN_ATOMIC_STORE_2:
720 case BUILT_IN_ATOMIC_STORE_4:
721 case BUILT_IN_ATOMIC_STORE_8:
722 case BUILT_IN_ATOMIC_STORE_16:
724 case BUILT_IN_ATOMIC_ADD_FETCH_1:
725 case BUILT_IN_ATOMIC_ADD_FETCH_2:
726 case BUILT_IN_ATOMIC_ADD_FETCH_4:
727 case BUILT_IN_ATOMIC_ADD_FETCH_8:
728 case BUILT_IN_ATOMIC_ADD_FETCH_16:
730 case BUILT_IN_ATOMIC_SUB_FETCH_1:
731 case BUILT_IN_ATOMIC_SUB_FETCH_2:
732 case BUILT_IN_ATOMIC_SUB_FETCH_4:
733 case BUILT_IN_ATOMIC_SUB_FETCH_8:
734 case BUILT_IN_ATOMIC_SUB_FETCH_16:
736 case BUILT_IN_ATOMIC_AND_FETCH_1:
737 case BUILT_IN_ATOMIC_AND_FETCH_2:
738 case BUILT_IN_ATOMIC_AND_FETCH_4:
739 case BUILT_IN_ATOMIC_AND_FETCH_8:
740 case BUILT_IN_ATOMIC_AND_FETCH_16:
742 case BUILT_IN_ATOMIC_NAND_FETCH_1:
743 case BUILT_IN_ATOMIC_NAND_FETCH_2:
744 case BUILT_IN_ATOMIC_NAND_FETCH_4:
745 case BUILT_IN_ATOMIC_NAND_FETCH_8:
746 case BUILT_IN_ATOMIC_NAND_FETCH_16:
748 case BUILT_IN_ATOMIC_XOR_FETCH_1:
749 case BUILT_IN_ATOMIC_XOR_FETCH_2:
750 case BUILT_IN_ATOMIC_XOR_FETCH_4:
751 case BUILT_IN_ATOMIC_XOR_FETCH_8:
752 case BUILT_IN_ATOMIC_XOR_FETCH_16:
754 case BUILT_IN_ATOMIC_OR_FETCH_1:
755 case BUILT_IN_ATOMIC_OR_FETCH_2:
756 case BUILT_IN_ATOMIC_OR_FETCH_4:
757 case BUILT_IN_ATOMIC_OR_FETCH_8:
758 case BUILT_IN_ATOMIC_OR_FETCH_16:
760 case BUILT_IN_ATOMIC_FETCH_ADD_1:
761 case BUILT_IN_ATOMIC_FETCH_ADD_2:
762 case BUILT_IN_ATOMIC_FETCH_ADD_4:
763 case BUILT_IN_ATOMIC_FETCH_ADD_8:
764 case BUILT_IN_ATOMIC_FETCH_ADD_16:
766 case BUILT_IN_ATOMIC_FETCH_SUB_1:
767 case BUILT_IN_ATOMIC_FETCH_SUB_2:
768 case BUILT_IN_ATOMIC_FETCH_SUB_4:
769 case BUILT_IN_ATOMIC_FETCH_SUB_8:
770 case BUILT_IN_ATOMIC_FETCH_SUB_16:
772 case BUILT_IN_ATOMIC_FETCH_AND_1:
773 case BUILT_IN_ATOMIC_FETCH_AND_2:
774 case BUILT_IN_ATOMIC_FETCH_AND_4:
775 case BUILT_IN_ATOMIC_FETCH_AND_8:
776 case BUILT_IN_ATOMIC_FETCH_AND_16:
778 case BUILT_IN_ATOMIC_FETCH_NAND_1:
779 case BUILT_IN_ATOMIC_FETCH_NAND_2:
780 case BUILT_IN_ATOMIC_FETCH_NAND_4:
781 case BUILT_IN_ATOMIC_FETCH_NAND_8:
782 case BUILT_IN_ATOMIC_FETCH_NAND_16:
784 case BUILT_IN_ATOMIC_FETCH_XOR_1:
785 case BUILT_IN_ATOMIC_FETCH_XOR_2:
786 case BUILT_IN_ATOMIC_FETCH_XOR_4:
787 case BUILT_IN_ATOMIC_FETCH_XOR_8:
788 case BUILT_IN_ATOMIC_FETCH_XOR_16:
790 case BUILT_IN_ATOMIC_FETCH_OR_1:
791 case BUILT_IN_ATOMIC_FETCH_OR_2:
792 case BUILT_IN_ATOMIC_FETCH_OR_4:
793 case BUILT_IN_ATOMIC_FETCH_OR_8:
794 case BUILT_IN_ATOMIC_FETCH_OR_16:
796 dest = gimple_call_arg (call, 0);
797 /* DEST represents the address of a memory location.
798 instrument_derefs wants the memory location, so lets
799 dereference the address DEST before handing it to
800 instrument_derefs. */
801 if (TREE_CODE (dest) == ADDR_EXPR)
802 dest = TREE_OPERAND (dest, 0);
803 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
804 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
805 dest, build_int_cst (TREE_TYPE (dest), 0));
806 else
807 gcc_unreachable ();
809 access_size = int_size_in_bytes (TREE_TYPE (dest));
812 default:
813 /* The other builtins memory access are not instrumented in this
814 function because they either don't have any length parameter,
815 or their length parameter is just a limit. */
816 break;
819 if (len != NULL_TREE)
821 if (source0 != NULL_TREE)
823 src0->start = source0;
824 src0->access_size = access_size;
825 *src0_len = len;
826 *src0_is_store = false;
829 if (source1 != NULL_TREE)
831 src1->start = source1;
832 src1->access_size = access_size;
833 *src1_len = len;
834 *src1_is_store = false;
837 if (dest != NULL_TREE)
839 dst->start = dest;
840 dst->access_size = access_size;
841 *dst_len = len;
842 *dst_is_store = true;
845 got_reference_p = true;
847 else if (dest)
849 dst->start = dest;
850 dst->access_size = access_size;
851 *dst_len = NULL_TREE;
852 *dst_is_store = is_store;
853 *dest_is_deref = true;
854 got_reference_p = true;
857 return got_reference_p;
860 /* Return true iff a given gimple statement has been instrumented.
861 Note that the statement is "defined" by the memory references it
862 contains. */
864 static bool
865 has_stmt_been_instrumented_p (gimple *stmt)
867 if (gimple_assign_single_p (stmt))
869 bool r_is_store;
870 asan_mem_ref r;
871 asan_mem_ref_init (&r, NULL, 1);
873 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
874 &r_is_store))
875 return has_mem_ref_been_instrumented (&r);
877 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
879 asan_mem_ref src0, src1, dest;
880 asan_mem_ref_init (&src0, NULL, 1);
881 asan_mem_ref_init (&src1, NULL, 1);
882 asan_mem_ref_init (&dest, NULL, 1);
884 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
885 bool src0_is_store = false, src1_is_store = false,
886 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
887 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
888 &src0, &src0_len, &src0_is_store,
889 &src1, &src1_len, &src1_is_store,
890 &dest, &dest_len, &dest_is_store,
891 &dest_is_deref, &intercepted_p))
893 if (src0.start != NULL_TREE
894 && !has_mem_ref_been_instrumented (&src0, src0_len))
895 return false;
897 if (src1.start != NULL_TREE
898 && !has_mem_ref_been_instrumented (&src1, src1_len))
899 return false;
901 if (dest.start != NULL_TREE
902 && !has_mem_ref_been_instrumented (&dest, dest_len))
903 return false;
905 return true;
908 return false;
911 /* Insert a memory reference into the hash table. */
913 static void
914 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
916 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
918 asan_mem_ref r;
919 asan_mem_ref_init (&r, ref, access_size);
921 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
922 if (*slot == NULL || (*slot)->access_size < access_size)
923 *slot = asan_mem_ref_new (ref, access_size);
926 /* Initialize shadow_ptr_types array. */
928 static void
929 asan_init_shadow_ptr_types (void)
931 asan_shadow_set = new_alias_set ();
932 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
933 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
934 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
935 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
936 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
937 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
938 initialize_sanitizer_builtins ();
941 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
943 static tree
944 asan_pp_string (pretty_printer *pp)
946 const char *buf = pp_formatted_text (pp);
947 size_t len = strlen (buf);
948 tree ret = build_string (len + 1, buf);
949 TREE_TYPE (ret)
950 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
951 build_index_type (size_int (len)));
952 TREE_READONLY (ret) = 1;
953 TREE_STATIC (ret) = 1;
954 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
957 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
959 static rtx
960 asan_shadow_cst (unsigned char shadow_bytes[4])
962 int i;
963 unsigned HOST_WIDE_INT val = 0;
964 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
965 for (i = 0; i < 4; i++)
966 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
967 << (BITS_PER_UNIT * i);
968 return gen_int_mode (val, SImode);
971 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
972 though. */
974 static void
975 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
977 rtx_insn *insn, *insns, *jump;
978 rtx_code_label *top_label;
979 rtx end, addr, tmp;
981 start_sequence ();
982 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
983 insns = get_insns ();
984 end_sequence ();
985 for (insn = insns; insn; insn = NEXT_INSN (insn))
986 if (CALL_P (insn))
987 break;
988 if (insn == NULL_RTX)
990 emit_insn (insns);
991 return;
994 gcc_assert ((len & 3) == 0);
995 top_label = gen_label_rtx ();
996 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
997 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
998 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
999 emit_label (top_label);
1001 emit_move_insn (shadow_mem, const0_rtx);
1002 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1003 true, OPTAB_LIB_WIDEN);
1004 if (tmp != addr)
1005 emit_move_insn (addr, tmp);
1006 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1007 jump = get_last_insn ();
1008 gcc_assert (JUMP_P (jump));
1009 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1012 void
1013 asan_function_start (void)
1015 section *fnsec = function_section (current_function_decl);
1016 switch_to_section (fnsec);
1017 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1018 current_function_funcdef_no);
1021 /* Insert code to protect stack vars. The prologue sequence should be emitted
1022 directly, epilogue sequence returned. BASE is the register holding the
1023 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1024 array contains pairs of offsets in reverse order, always the end offset
1025 of some gap that needs protection followed by starting offset,
1026 and DECLS is an array of representative decls for each var partition.
1027 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1028 elements long (OFFSETS include gap before the first variable as well
1029 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1030 register which stack vars DECL_RTLs are based on. Either BASE should be
1031 assigned to PBASE, when not doing use after return protection, or
1032 corresponding address based on __asan_stack_malloc* return value. */
1034 rtx_insn *
1035 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1036 HOST_WIDE_INT *offsets, tree *decls, int length)
1038 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1039 rtx_code_label *lab;
1040 rtx_insn *insns;
1041 char buf[30];
1042 unsigned char shadow_bytes[4];
1043 HOST_WIDE_INT base_offset = offsets[length - 1];
1044 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1045 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1046 HOST_WIDE_INT last_offset, last_size;
1047 int l;
1048 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1049 tree str_cst, decl, id;
1050 int use_after_return_class = -1;
1052 if (shadow_ptr_types[0] == NULL_TREE)
1053 asan_init_shadow_ptr_types ();
1055 /* First of all, prepare the description string. */
1056 pretty_printer asan_pp;
1058 pp_decimal_int (&asan_pp, length / 2 - 1);
1059 pp_space (&asan_pp);
1060 for (l = length - 2; l; l -= 2)
1062 tree decl = decls[l / 2 - 1];
1063 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1064 pp_space (&asan_pp);
1065 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1066 pp_space (&asan_pp);
1067 if (DECL_P (decl) && DECL_NAME (decl))
1069 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1070 pp_space (&asan_pp);
1071 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1073 else
1074 pp_string (&asan_pp, "9 <unknown>");
1075 pp_space (&asan_pp);
1077 str_cst = asan_pp_string (&asan_pp);
1079 /* Emit the prologue sequence. */
1080 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1081 && ASAN_USE_AFTER_RETURN)
1083 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1084 /* __asan_stack_malloc_N guarantees alignment
1085 N < 6 ? (64 << N) : 4096 bytes. */
1086 if (alignb > (use_after_return_class < 6
1087 ? (64U << use_after_return_class) : 4096U))
1088 use_after_return_class = -1;
1089 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1090 base_align_bias = ((asan_frame_size + alignb - 1)
1091 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1093 /* Align base if target is STRICT_ALIGNMENT. */
1094 if (STRICT_ALIGNMENT)
1095 base = expand_binop (Pmode, and_optab, base,
1096 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1097 << ASAN_SHADOW_SHIFT)
1098 / BITS_PER_UNIT), Pmode), NULL_RTX,
1099 1, OPTAB_DIRECT);
1101 if (use_after_return_class == -1 && pbase)
1102 emit_move_insn (pbase, base);
1104 base = expand_binop (Pmode, add_optab, base,
1105 gen_int_mode (base_offset - base_align_bias, Pmode),
1106 NULL_RTX, 1, OPTAB_DIRECT);
1107 orig_base = NULL_RTX;
1108 if (use_after_return_class != -1)
1110 if (asan_detect_stack_use_after_return == NULL_TREE)
1112 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1113 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1114 integer_type_node);
1115 SET_DECL_ASSEMBLER_NAME (decl, id);
1116 TREE_ADDRESSABLE (decl) = 1;
1117 DECL_ARTIFICIAL (decl) = 1;
1118 DECL_IGNORED_P (decl) = 1;
1119 DECL_EXTERNAL (decl) = 1;
1120 TREE_STATIC (decl) = 1;
1121 TREE_PUBLIC (decl) = 1;
1122 TREE_USED (decl) = 1;
1123 asan_detect_stack_use_after_return = decl;
1125 orig_base = gen_reg_rtx (Pmode);
1126 emit_move_insn (orig_base, base);
1127 ret = expand_normal (asan_detect_stack_use_after_return);
1128 lab = gen_label_rtx ();
1129 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1130 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1131 VOIDmode, 0, lab, very_likely);
1132 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1133 use_after_return_class);
1134 ret = init_one_libfunc (buf);
1135 rtx addr = convert_memory_address (ptr_mode, base);
1136 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1137 GEN_INT (asan_frame_size
1138 + base_align_bias),
1139 TYPE_MODE (pointer_sized_int_node),
1140 addr, ptr_mode);
1141 ret = convert_memory_address (Pmode, ret);
1142 emit_move_insn (base, ret);
1143 emit_label (lab);
1144 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1145 gen_int_mode (base_align_bias
1146 - base_offset, Pmode),
1147 NULL_RTX, 1, OPTAB_DIRECT));
1149 mem = gen_rtx_MEM (ptr_mode, base);
1150 mem = adjust_address (mem, VOIDmode, base_align_bias);
1151 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1152 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1153 emit_move_insn (mem, expand_normal (str_cst));
1154 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1155 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1156 id = get_identifier (buf);
1157 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1158 VAR_DECL, id, char_type_node);
1159 SET_DECL_ASSEMBLER_NAME (decl, id);
1160 TREE_ADDRESSABLE (decl) = 1;
1161 TREE_READONLY (decl) = 1;
1162 DECL_ARTIFICIAL (decl) = 1;
1163 DECL_IGNORED_P (decl) = 1;
1164 TREE_STATIC (decl) = 1;
1165 TREE_PUBLIC (decl) = 0;
1166 TREE_USED (decl) = 1;
1167 DECL_INITIAL (decl) = decl;
1168 TREE_ASM_WRITTEN (decl) = 1;
1169 TREE_ASM_WRITTEN (id) = 1;
1170 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1171 shadow_base = expand_binop (Pmode, lshr_optab, base,
1172 GEN_INT (ASAN_SHADOW_SHIFT),
1173 NULL_RTX, 1, OPTAB_DIRECT);
1174 shadow_base
1175 = plus_constant (Pmode, shadow_base,
1176 asan_shadow_offset ()
1177 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1178 gcc_assert (asan_shadow_set != -1
1179 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1180 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1181 set_mem_alias_set (shadow_mem, asan_shadow_set);
1182 if (STRICT_ALIGNMENT)
1183 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1184 prev_offset = base_offset;
1185 for (l = length; l; l -= 2)
1187 if (l == 2)
1188 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1189 offset = offsets[l - 1];
1190 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1192 int i;
1193 HOST_WIDE_INT aoff
1194 = base_offset + ((offset - base_offset)
1195 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1196 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1197 (aoff - prev_offset)
1198 >> ASAN_SHADOW_SHIFT);
1199 prev_offset = aoff;
1200 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1201 if (aoff < offset)
1203 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1204 shadow_bytes[i] = 0;
1205 else
1206 shadow_bytes[i] = offset - aoff;
1208 else
1209 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1210 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1211 offset = aoff;
1213 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1215 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1216 (offset - prev_offset)
1217 >> ASAN_SHADOW_SHIFT);
1218 prev_offset = offset;
1219 memset (shadow_bytes, cur_shadow_byte, 4);
1220 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1221 offset += ASAN_RED_ZONE_SIZE;
1223 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1225 do_pending_stack_adjust ();
1227 /* Construct epilogue sequence. */
1228 start_sequence ();
1230 lab = NULL;
1231 if (use_after_return_class != -1)
1233 rtx_code_label *lab2 = gen_label_rtx ();
1234 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1235 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1236 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1237 VOIDmode, 0, lab2, very_likely);
1238 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1239 set_mem_alias_set (shadow_mem, asan_shadow_set);
1240 mem = gen_rtx_MEM (ptr_mode, base);
1241 mem = adjust_address (mem, VOIDmode, base_align_bias);
1242 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1243 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1244 if (use_after_return_class < 5
1245 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1246 BITS_PER_UNIT, true))
1247 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1248 BITS_PER_UNIT, true, 0);
1249 else if (use_after_return_class >= 5
1250 || !set_storage_via_setmem (shadow_mem,
1251 GEN_INT (sz),
1252 gen_int_mode (c, QImode),
1253 BITS_PER_UNIT, BITS_PER_UNIT,
1254 -1, sz, sz, sz))
1256 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1257 use_after_return_class);
1258 ret = init_one_libfunc (buf);
1259 rtx addr = convert_memory_address (ptr_mode, base);
1260 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1261 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1262 GEN_INT (asan_frame_size + base_align_bias),
1263 TYPE_MODE (pointer_sized_int_node),
1264 orig_addr, ptr_mode);
1266 lab = gen_label_rtx ();
1267 emit_jump (lab);
1268 emit_label (lab2);
1271 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1272 set_mem_alias_set (shadow_mem, asan_shadow_set);
1274 if (STRICT_ALIGNMENT)
1275 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1277 prev_offset = base_offset;
1278 last_offset = base_offset;
1279 last_size = 0;
1280 for (l = length; l; l -= 2)
1282 offset = base_offset + ((offsets[l - 1] - base_offset)
1283 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1284 if (last_offset + last_size != offset)
1286 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1287 (last_offset - prev_offset)
1288 >> ASAN_SHADOW_SHIFT);
1289 prev_offset = last_offset;
1290 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1291 last_offset = offset;
1292 last_size = 0;
1294 last_size += base_offset + ((offsets[l - 2] - base_offset)
1295 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1296 - offset;
1298 if (last_size)
1300 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1301 (last_offset - prev_offset)
1302 >> ASAN_SHADOW_SHIFT);
1303 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1306 do_pending_stack_adjust ();
1307 if (lab)
1308 emit_label (lab);
1310 insns = get_insns ();
1311 end_sequence ();
1312 return insns;
1315 /* Return true if DECL, a global var, might be overridden and needs
1316 therefore a local alias. */
1318 static bool
1319 asan_needs_local_alias (tree decl)
1321 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1324 /* Return true if DECL is a VAR_DECL that should be protected
1325 by Address Sanitizer, by appending a red zone with protected
1326 shadow memory after it and aligning it to at least
1327 ASAN_RED_ZONE_SIZE bytes. */
1329 bool
1330 asan_protect_global (tree decl)
1332 if (!ASAN_GLOBALS)
1333 return false;
1335 rtx rtl, symbol;
1337 if (TREE_CODE (decl) == STRING_CST)
1339 /* Instrument all STRING_CSTs except those created
1340 by asan_pp_string here. */
1341 if (shadow_ptr_types[0] != NULL_TREE
1342 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1343 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1344 return false;
1345 return true;
1347 if (TREE_CODE (decl) != VAR_DECL
1348 /* TLS vars aren't statically protectable. */
1349 || DECL_THREAD_LOCAL_P (decl)
1350 /* Externs will be protected elsewhere. */
1351 || DECL_EXTERNAL (decl)
1352 || !DECL_RTL_SET_P (decl)
1353 /* Comdat vars pose an ABI problem, we can't know if
1354 the var that is selected by the linker will have
1355 padding or not. */
1356 || DECL_ONE_ONLY (decl)
1357 /* Similarly for common vars. People can use -fno-common.
1358 Note: Linux kernel is built with -fno-common, so we do instrument
1359 globals there even if it is C. */
1360 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1361 /* Don't protect if using user section, often vars placed
1362 into user section from multiple TUs are then assumed
1363 to be an array of such vars, putting padding in there
1364 breaks this assumption. */
1365 || (DECL_SECTION_NAME (decl) != NULL
1366 && !symtab_node::get (decl)->implicit_section
1367 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1368 || DECL_SIZE (decl) == 0
1369 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1370 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1371 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1372 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1373 return false;
1375 rtl = DECL_RTL (decl);
1376 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1377 return false;
1378 symbol = XEXP (rtl, 0);
1380 if (CONSTANT_POOL_ADDRESS_P (symbol)
1381 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1382 return false;
1384 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1385 return false;
1387 #ifndef ASM_OUTPUT_DEF
1388 if (asan_needs_local_alias (decl))
1389 return false;
1390 #endif
1392 return true;
1395 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1396 IS_STORE is either 1 (for a store) or 0 (for a load). */
1398 static tree
1399 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1400 int *nargs)
1402 static enum built_in_function report[2][2][6]
1403 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1404 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1405 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1406 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1407 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1408 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1409 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1410 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1411 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1412 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1413 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1414 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1415 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1416 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1417 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1418 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1419 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1420 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1421 if (size_in_bytes == -1)
1423 *nargs = 2;
1424 return builtin_decl_implicit (report[recover_p][is_store][5]);
1426 *nargs = 1;
1427 int size_log2 = exact_log2 (size_in_bytes);
1428 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1431 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1432 IS_STORE is either 1 (for a store) or 0 (for a load). */
1434 static tree
1435 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1436 int *nargs)
1438 static enum built_in_function check[2][2][6]
1439 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1440 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1441 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1442 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1443 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1444 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1445 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1446 BUILT_IN_ASAN_LOAD2_NOABORT,
1447 BUILT_IN_ASAN_LOAD4_NOABORT,
1448 BUILT_IN_ASAN_LOAD8_NOABORT,
1449 BUILT_IN_ASAN_LOAD16_NOABORT,
1450 BUILT_IN_ASAN_LOADN_NOABORT },
1451 { BUILT_IN_ASAN_STORE1_NOABORT,
1452 BUILT_IN_ASAN_STORE2_NOABORT,
1453 BUILT_IN_ASAN_STORE4_NOABORT,
1454 BUILT_IN_ASAN_STORE8_NOABORT,
1455 BUILT_IN_ASAN_STORE16_NOABORT,
1456 BUILT_IN_ASAN_STOREN_NOABORT } } };
1457 if (size_in_bytes == -1)
1459 *nargs = 2;
1460 return builtin_decl_implicit (check[recover_p][is_store][5]);
1462 *nargs = 1;
1463 int size_log2 = exact_log2 (size_in_bytes);
1464 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1467 /* Split the current basic block and create a condition statement
1468 insertion point right before or after the statement pointed to by
1469 ITER. Return an iterator to the point at which the caller might
1470 safely insert the condition statement.
1472 THEN_BLOCK must be set to the address of an uninitialized instance
1473 of basic_block. The function will then set *THEN_BLOCK to the
1474 'then block' of the condition statement to be inserted by the
1475 caller.
1477 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1478 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1480 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1481 block' of the condition statement to be inserted by the caller.
1483 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1484 statements starting from *ITER, and *THEN_BLOCK is a new empty
1485 block.
1487 *ITER is adjusted to point to always point to the first statement
1488 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1489 same as what ITER was pointing to prior to calling this function,
1490 if BEFORE_P is true; otherwise, it is its following statement. */
1492 gimple_stmt_iterator
1493 create_cond_insert_point (gimple_stmt_iterator *iter,
1494 bool before_p,
1495 bool then_more_likely_p,
1496 bool create_then_fallthru_edge,
1497 basic_block *then_block,
1498 basic_block *fallthrough_block)
1500 gimple_stmt_iterator gsi = *iter;
1502 if (!gsi_end_p (gsi) && before_p)
1503 gsi_prev (&gsi);
1505 basic_block cur_bb = gsi_bb (*iter);
1507 edge e = split_block (cur_bb, gsi_stmt (gsi));
1509 /* Get a hold on the 'condition block', the 'then block' and the
1510 'else block'. */
1511 basic_block cond_bb = e->src;
1512 basic_block fallthru_bb = e->dest;
1513 basic_block then_bb = create_empty_bb (cond_bb);
1514 if (current_loops)
1516 add_bb_to_loop (then_bb, cond_bb->loop_father);
1517 loops_state_set (LOOPS_NEED_FIXUP);
1520 /* Set up the newly created 'then block'. */
1521 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1522 int fallthrough_probability
1523 = then_more_likely_p
1524 ? PROB_VERY_UNLIKELY
1525 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1526 e->probability = PROB_ALWAYS - fallthrough_probability;
1527 if (create_then_fallthru_edge)
1528 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1530 /* Set up the fallthrough basic block. */
1531 e = find_edge (cond_bb, fallthru_bb);
1532 e->flags = EDGE_FALSE_VALUE;
1533 e->count = cond_bb->count;
1534 e->probability = fallthrough_probability;
1536 /* Update dominance info for the newly created then_bb; note that
1537 fallthru_bb's dominance info has already been updated by
1538 split_bock. */
1539 if (dom_info_available_p (CDI_DOMINATORS))
1540 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1542 *then_block = then_bb;
1543 *fallthrough_block = fallthru_bb;
1544 *iter = gsi_start_bb (fallthru_bb);
1546 return gsi_last_bb (cond_bb);
1549 /* Insert an if condition followed by a 'then block' right before the
1550 statement pointed to by ITER. The fallthrough block -- which is the
1551 else block of the condition as well as the destination of the
1552 outcoming edge of the 'then block' -- starts with the statement
1553 pointed to by ITER.
1555 COND is the condition of the if.
1557 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1558 'then block' is higher than the probability of the edge to the
1559 fallthrough block.
1561 Upon completion of the function, *THEN_BB is set to the newly
1562 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1563 fallthrough block.
1565 *ITER is adjusted to still point to the same statement it was
1566 pointing to initially. */
1568 static void
1569 insert_if_then_before_iter (gcond *cond,
1570 gimple_stmt_iterator *iter,
1571 bool then_more_likely_p,
1572 basic_block *then_bb,
1573 basic_block *fallthrough_bb)
1575 gimple_stmt_iterator cond_insert_point =
1576 create_cond_insert_point (iter,
1577 /*before_p=*/true,
1578 then_more_likely_p,
1579 /*create_then_fallthru_edge=*/true,
1580 then_bb,
1581 fallthrough_bb);
1582 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1585 /* Build
1586 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1588 static tree
1589 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1590 tree base_addr, tree shadow_ptr_type)
1592 tree t, uintptr_type = TREE_TYPE (base_addr);
1593 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1594 gimple *g;
1596 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1597 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1598 base_addr, t);
1599 gimple_set_location (g, location);
1600 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1602 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1603 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1604 gimple_assign_lhs (g), t);
1605 gimple_set_location (g, location);
1606 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1608 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1609 gimple_assign_lhs (g));
1610 gimple_set_location (g, location);
1611 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1613 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1614 build_int_cst (shadow_ptr_type, 0));
1615 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1616 gimple_set_location (g, location);
1617 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1618 return gimple_assign_lhs (g);
1621 /* BASE can already be an SSA_NAME; in that case, do not create a
1622 new SSA_NAME for it. */
1624 static tree
1625 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1626 bool before_p)
1628 if (TREE_CODE (base) == SSA_NAME)
1629 return base;
1630 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1631 TREE_CODE (base), base);
1632 gimple_set_location (g, loc);
1633 if (before_p)
1634 gsi_insert_before (iter, g, GSI_SAME_STMT);
1635 else
1636 gsi_insert_after (iter, g, GSI_NEW_STMT);
1637 return gimple_assign_lhs (g);
1640 /* LEN can already have necessary size and precision;
1641 in that case, do not create a new variable. */
1643 tree
1644 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1645 bool before_p)
1647 if (ptrofftype_p (len))
1648 return len;
1649 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1650 NOP_EXPR, len);
1651 gimple_set_location (g, loc);
1652 if (before_p)
1653 gsi_insert_before (iter, g, GSI_SAME_STMT);
1654 else
1655 gsi_insert_after (iter, g, GSI_NEW_STMT);
1656 return gimple_assign_lhs (g);
1659 /* Instrument the memory access instruction BASE. Insert new
1660 statements before or after ITER.
1662 Note that the memory access represented by BASE can be either an
1663 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1664 location. IS_STORE is TRUE for a store, FALSE for a load.
1665 BEFORE_P is TRUE for inserting the instrumentation code before
1666 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1667 for a scalar memory access and FALSE for memory region access.
1668 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1669 length. ALIGN tells alignment of accessed memory object.
1671 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1672 memory region have already been instrumented.
1674 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1675 statement it was pointing to prior to calling this function,
1676 otherwise, it points to the statement logically following it. */
1678 static void
1679 build_check_stmt (location_t loc, tree base, tree len,
1680 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1681 bool is_non_zero_len, bool before_p, bool is_store,
1682 bool is_scalar_access, unsigned int align = 0)
1684 gimple_stmt_iterator gsi = *iter;
1685 gimple *g;
1687 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1689 gsi = *iter;
1691 base = unshare_expr (base);
1692 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1694 if (len)
1696 len = unshare_expr (len);
1697 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1699 else
1701 gcc_assert (size_in_bytes != -1);
1702 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1705 if (size_in_bytes > 1)
1707 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1708 || size_in_bytes > 16)
1709 is_scalar_access = false;
1710 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1712 /* On non-strict alignment targets, if
1713 16-byte access is just 8-byte aligned,
1714 this will result in misaligned shadow
1715 memory 2 byte load, but otherwise can
1716 be handled using one read. */
1717 if (size_in_bytes != 16
1718 || STRICT_ALIGNMENT
1719 || align < 8 * BITS_PER_UNIT)
1720 is_scalar_access = false;
1724 HOST_WIDE_INT flags = 0;
1725 if (is_store)
1726 flags |= ASAN_CHECK_STORE;
1727 if (is_non_zero_len)
1728 flags |= ASAN_CHECK_NON_ZERO_LEN;
1729 if (is_scalar_access)
1730 flags |= ASAN_CHECK_SCALAR_ACCESS;
1732 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1733 build_int_cst (integer_type_node, flags),
1734 base, len,
1735 build_int_cst (integer_type_node,
1736 align / BITS_PER_UNIT));
1737 gimple_set_location (g, loc);
1738 if (before_p)
1739 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1740 else
1742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1743 gsi_next (&gsi);
1744 *iter = gsi;
1748 /* If T represents a memory access, add instrumentation code before ITER.
1749 LOCATION is source code location.
1750 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1752 static void
1753 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1754 location_t location, bool is_store)
1756 if (is_store && !ASAN_INSTRUMENT_WRITES)
1757 return;
1758 if (!is_store && !ASAN_INSTRUMENT_READS)
1759 return;
1761 tree type, base;
1762 HOST_WIDE_INT size_in_bytes;
1764 type = TREE_TYPE (t);
1765 switch (TREE_CODE (t))
1767 case ARRAY_REF:
1768 case COMPONENT_REF:
1769 case INDIRECT_REF:
1770 case MEM_REF:
1771 case VAR_DECL:
1772 case BIT_FIELD_REF:
1773 break;
1774 /* FALLTHRU */
1775 default:
1776 return;
1779 size_in_bytes = int_size_in_bytes (type);
1780 if (size_in_bytes <= 0)
1781 return;
1783 HOST_WIDE_INT bitsize, bitpos;
1784 tree offset;
1785 machine_mode mode;
1786 int volatilep = 0, unsignedp = 0;
1787 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1788 &mode, &unsignedp, &volatilep, false);
1790 if (TREE_CODE (t) == COMPONENT_REF
1791 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1793 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1794 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1795 TREE_OPERAND (t, 0), repr,
1796 NULL_TREE), location, is_store);
1797 return;
1800 if (bitpos % BITS_PER_UNIT
1801 || bitsize != size_in_bytes * BITS_PER_UNIT)
1802 return;
1804 if (TREE_CODE (inner) == VAR_DECL
1805 && offset == NULL_TREE
1806 && bitpos >= 0
1807 && DECL_SIZE (inner)
1808 && tree_fits_shwi_p (DECL_SIZE (inner))
1809 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1811 if (DECL_THREAD_LOCAL_P (inner))
1812 return;
1813 if (!ASAN_GLOBALS && is_global_var (inner))
1814 return;
1815 if (!TREE_STATIC (inner))
1817 /* Automatic vars in the current function will be always
1818 accessible. */
1819 if (decl_function_context (inner) == current_function_decl)
1820 return;
1822 /* Always instrument external vars, they might be dynamically
1823 initialized. */
1824 else if (!DECL_EXTERNAL (inner))
1826 /* For static vars if they are known not to be dynamically
1827 initialized, they will be always accessible. */
1828 varpool_node *vnode = varpool_node::get (inner);
1829 if (vnode && !vnode->dynamically_initialized)
1830 return;
1834 base = build_fold_addr_expr (t);
1835 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1837 unsigned int align = get_object_alignment (t);
1838 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1839 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1840 is_store, /*is_scalar_access*/true, align);
1841 update_mem_ref_hash_table (base, size_in_bytes);
1842 update_mem_ref_hash_table (t, size_in_bytes);
1847 /* Insert a memory reference into the hash table if access length
1848 can be determined in compile time. */
1850 static void
1851 maybe_update_mem_ref_hash_table (tree base, tree len)
1853 if (!POINTER_TYPE_P (TREE_TYPE (base))
1854 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1855 return;
1857 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1859 if (size_in_bytes != -1)
1860 update_mem_ref_hash_table (base, size_in_bytes);
1863 /* Instrument an access to a contiguous memory region that starts at
1864 the address pointed to by BASE, over a length of LEN (expressed in
1865 the sizeof (*BASE) bytes). ITER points to the instruction before
1866 which the instrumentation instructions must be inserted. LOCATION
1867 is the source location that the instrumentation instructions must
1868 have. If IS_STORE is true, then the memory access is a store;
1869 otherwise, it's a load. */
1871 static void
1872 instrument_mem_region_access (tree base, tree len,
1873 gimple_stmt_iterator *iter,
1874 location_t location, bool is_store)
1876 if (!POINTER_TYPE_P (TREE_TYPE (base))
1877 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1878 || integer_zerop (len))
1879 return;
1881 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1883 if ((size_in_bytes == -1)
1884 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1886 build_check_stmt (location, base, len, size_in_bytes, iter,
1887 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1888 is_store, /*is_scalar_access*/false, /*align*/0);
1891 maybe_update_mem_ref_hash_table (base, len);
1892 *iter = gsi_for_stmt (gsi_stmt (*iter));
1895 /* Instrument the call to a built-in memory access function that is
1896 pointed to by the iterator ITER.
1898 Upon completion, return TRUE iff *ITER has been advanced to the
1899 statement following the one it was originally pointing to. */
1901 static bool
1902 instrument_builtin_call (gimple_stmt_iterator *iter)
1904 if (!ASAN_MEMINTRIN)
1905 return false;
1907 bool iter_advanced_p = false;
1908 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1910 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1912 location_t loc = gimple_location (call);
1914 asan_mem_ref src0, src1, dest;
1915 asan_mem_ref_init (&src0, NULL, 1);
1916 asan_mem_ref_init (&src1, NULL, 1);
1917 asan_mem_ref_init (&dest, NULL, 1);
1919 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1920 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1921 dest_is_deref = false, intercepted_p = true;
1923 if (get_mem_refs_of_builtin_call (call,
1924 &src0, &src0_len, &src0_is_store,
1925 &src1, &src1_len, &src1_is_store,
1926 &dest, &dest_len, &dest_is_store,
1927 &dest_is_deref, &intercepted_p))
1929 if (dest_is_deref)
1931 instrument_derefs (iter, dest.start, loc, dest_is_store);
1932 gsi_next (iter);
1933 iter_advanced_p = true;
1935 else if (!intercepted_p
1936 && (src0_len || src1_len || dest_len))
1938 if (src0.start != NULL_TREE)
1939 instrument_mem_region_access (src0.start, src0_len,
1940 iter, loc, /*is_store=*/false);
1941 if (src1.start != NULL_TREE)
1942 instrument_mem_region_access (src1.start, src1_len,
1943 iter, loc, /*is_store=*/false);
1944 if (dest.start != NULL_TREE)
1945 instrument_mem_region_access (dest.start, dest_len,
1946 iter, loc, /*is_store=*/true);
1948 *iter = gsi_for_stmt (call);
1949 gsi_next (iter);
1950 iter_advanced_p = true;
1952 else
1954 if (src0.start != NULL_TREE)
1955 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1956 if (src1.start != NULL_TREE)
1957 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1958 if (dest.start != NULL_TREE)
1959 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1962 return iter_advanced_p;
1965 /* Instrument the assignment statement ITER if it is subject to
1966 instrumentation. Return TRUE iff instrumentation actually
1967 happened. In that case, the iterator ITER is advanced to the next
1968 logical expression following the one initially pointed to by ITER,
1969 and the relevant memory reference that which access has been
1970 instrumented is added to the memory references hash table. */
1972 static bool
1973 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1975 gimple *s = gsi_stmt (*iter);
1977 gcc_assert (gimple_assign_single_p (s));
1979 tree ref_expr = NULL_TREE;
1980 bool is_store, is_instrumented = false;
1982 if (gimple_store_p (s))
1984 ref_expr = gimple_assign_lhs (s);
1985 is_store = true;
1986 instrument_derefs (iter, ref_expr,
1987 gimple_location (s),
1988 is_store);
1989 is_instrumented = true;
1992 if (gimple_assign_load_p (s))
1994 ref_expr = gimple_assign_rhs1 (s);
1995 is_store = false;
1996 instrument_derefs (iter, ref_expr,
1997 gimple_location (s),
1998 is_store);
1999 is_instrumented = true;
2002 if (is_instrumented)
2003 gsi_next (iter);
2005 return is_instrumented;
2008 /* Instrument the function call pointed to by the iterator ITER, if it
2009 is subject to instrumentation. At the moment, the only function
2010 calls that are instrumented are some built-in functions that access
2011 memory. Look at instrument_builtin_call to learn more.
2013 Upon completion return TRUE iff *ITER was advanced to the statement
2014 following the one it was originally pointing to. */
2016 static bool
2017 maybe_instrument_call (gimple_stmt_iterator *iter)
2019 gimple *stmt = gsi_stmt (*iter);
2020 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2022 if (is_builtin && instrument_builtin_call (iter))
2023 return true;
2025 if (gimple_call_noreturn_p (stmt))
2027 if (is_builtin)
2029 tree callee = gimple_call_fndecl (stmt);
2030 switch (DECL_FUNCTION_CODE (callee))
2032 case BUILT_IN_UNREACHABLE:
2033 case BUILT_IN_TRAP:
2034 /* Don't instrument these. */
2035 return false;
2036 default:
2037 break;
2040 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2041 gimple *g = gimple_build_call (decl, 0);
2042 gimple_set_location (g, gimple_location (stmt));
2043 gsi_insert_before (iter, g, GSI_SAME_STMT);
2045 return false;
2048 /* Walk each instruction of all basic block and instrument those that
2049 represent memory references: loads, stores, or function calls.
2050 In a given basic block, this function avoids instrumenting memory
2051 references that have already been instrumented. */
2053 static void
2054 transform_statements (void)
2056 basic_block bb, last_bb = NULL;
2057 gimple_stmt_iterator i;
2058 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2060 FOR_EACH_BB_FN (bb, cfun)
2062 basic_block prev_bb = bb;
2064 if (bb->index >= saved_last_basic_block) continue;
2066 /* Flush the mem ref hash table, if current bb doesn't have
2067 exactly one predecessor, or if that predecessor (skipping
2068 over asan created basic blocks) isn't the last processed
2069 basic block. Thus we effectively flush on extended basic
2070 block boundaries. */
2071 while (single_pred_p (prev_bb))
2073 prev_bb = single_pred (prev_bb);
2074 if (prev_bb->index < saved_last_basic_block)
2075 break;
2077 if (prev_bb != last_bb)
2078 empty_mem_ref_hash_table ();
2079 last_bb = bb;
2081 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2083 gimple *s = gsi_stmt (i);
2085 if (has_stmt_been_instrumented_p (s))
2086 gsi_next (&i);
2087 else if (gimple_assign_single_p (s)
2088 && !gimple_clobber_p (s)
2089 && maybe_instrument_assignment (&i))
2090 /* Nothing to do as maybe_instrument_assignment advanced
2091 the iterator I. */;
2092 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2093 /* Nothing to do as maybe_instrument_call
2094 advanced the iterator I. */;
2095 else
2097 /* No instrumentation happened.
2099 If the current instruction is a function call that
2100 might free something, let's forget about the memory
2101 references that got instrumented. Otherwise we might
2102 miss some instrumentation opportunities. */
2103 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2104 empty_mem_ref_hash_table ();
2106 gsi_next (&i);
2110 free_mem_ref_resources ();
2113 /* Build
2114 __asan_before_dynamic_init (module_name)
2116 __asan_after_dynamic_init ()
2117 call. */
2119 tree
2120 asan_dynamic_init_call (bool after_p)
2122 tree fn = builtin_decl_implicit (after_p
2123 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2124 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2125 tree module_name_cst = NULL_TREE;
2126 if (!after_p)
2128 pretty_printer module_name_pp;
2129 pp_string (&module_name_pp, main_input_filename);
2131 if (shadow_ptr_types[0] == NULL_TREE)
2132 asan_init_shadow_ptr_types ();
2133 module_name_cst = asan_pp_string (&module_name_pp);
2134 module_name_cst = fold_convert (const_ptr_type_node,
2135 module_name_cst);
2138 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2141 /* Build
2142 struct __asan_global
2144 const void *__beg;
2145 uptr __size;
2146 uptr __size_with_redzone;
2147 const void *__name;
2148 const void *__module_name;
2149 uptr __has_dynamic_init;
2150 __asan_global_source_location *__location;
2151 } type. */
2153 static tree
2154 asan_global_struct (void)
2156 static const char *field_names[7]
2157 = { "__beg", "__size", "__size_with_redzone",
2158 "__name", "__module_name", "__has_dynamic_init", "__location"};
2159 tree fields[7], ret;
2160 int i;
2162 ret = make_node (RECORD_TYPE);
2163 for (i = 0; i < 7; i++)
2165 fields[i]
2166 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2167 get_identifier (field_names[i]),
2168 (i == 0 || i == 3) ? const_ptr_type_node
2169 : pointer_sized_int_node);
2170 DECL_CONTEXT (fields[i]) = ret;
2171 if (i)
2172 DECL_CHAIN (fields[i - 1]) = fields[i];
2174 tree type_decl = build_decl (input_location, TYPE_DECL,
2175 get_identifier ("__asan_global"), ret);
2176 DECL_IGNORED_P (type_decl) = 1;
2177 DECL_ARTIFICIAL (type_decl) = 1;
2178 TYPE_FIELDS (ret) = fields[0];
2179 TYPE_NAME (ret) = type_decl;
2180 TYPE_STUB_DECL (ret) = type_decl;
2181 layout_type (ret);
2182 return ret;
2185 /* Append description of a single global DECL into vector V.
2186 TYPE is __asan_global struct type as returned by asan_global_struct. */
2188 static void
2189 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2191 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2192 unsigned HOST_WIDE_INT size;
2193 tree str_cst, module_name_cst, refdecl = decl;
2194 vec<constructor_elt, va_gc> *vinner = NULL;
2196 pretty_printer asan_pp, module_name_pp;
2198 if (DECL_NAME (decl))
2199 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2200 else
2201 pp_string (&asan_pp, "<unknown>");
2202 str_cst = asan_pp_string (&asan_pp);
2204 pp_string (&module_name_pp, main_input_filename);
2205 module_name_cst = asan_pp_string (&module_name_pp);
2207 if (asan_needs_local_alias (decl))
2209 char buf[20];
2210 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2211 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2212 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2213 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2214 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2215 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2216 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2217 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2218 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2219 TREE_STATIC (refdecl) = 1;
2220 TREE_PUBLIC (refdecl) = 0;
2221 TREE_USED (refdecl) = 1;
2222 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2225 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2226 fold_convert (const_ptr_type_node,
2227 build_fold_addr_expr (refdecl)));
2228 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2229 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2230 size += asan_red_zone_size (size);
2231 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2232 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2233 fold_convert (const_ptr_type_node, str_cst));
2234 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2235 fold_convert (const_ptr_type_node, module_name_cst));
2236 varpool_node *vnode = varpool_node::get (decl);
2237 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2238 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2239 build_int_cst (uptr, has_dynamic_init));
2240 tree locptr = NULL_TREE;
2241 location_t loc = DECL_SOURCE_LOCATION (decl);
2242 expanded_location xloc = expand_location (loc);
2243 if (xloc.file != NULL)
2245 static int lasanloccnt = 0;
2246 char buf[25];
2247 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2248 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2249 ubsan_get_source_location_type ());
2250 TREE_STATIC (var) = 1;
2251 TREE_PUBLIC (var) = 0;
2252 DECL_ARTIFICIAL (var) = 1;
2253 DECL_IGNORED_P (var) = 1;
2254 pretty_printer filename_pp;
2255 pp_string (&filename_pp, xloc.file);
2256 tree str = asan_pp_string (&filename_pp);
2257 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2258 NULL_TREE, str, NULL_TREE,
2259 build_int_cst (unsigned_type_node,
2260 xloc.line), NULL_TREE,
2261 build_int_cst (unsigned_type_node,
2262 xloc.column));
2263 TREE_CONSTANT (ctor) = 1;
2264 TREE_STATIC (ctor) = 1;
2265 DECL_INITIAL (var) = ctor;
2266 varpool_node::finalize_decl (var);
2267 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2269 else
2270 locptr = build_int_cst (uptr, 0);
2271 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2272 init = build_constructor (type, vinner);
2273 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2276 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2277 void
2278 initialize_sanitizer_builtins (void)
2280 tree decl;
2282 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2283 return;
2285 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2286 tree BT_FN_VOID_PTR
2287 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2288 tree BT_FN_VOID_CONST_PTR
2289 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2290 tree BT_FN_VOID_PTR_PTR
2291 = build_function_type_list (void_type_node, ptr_type_node,
2292 ptr_type_node, NULL_TREE);
2293 tree BT_FN_VOID_PTR_PTR_PTR
2294 = build_function_type_list (void_type_node, ptr_type_node,
2295 ptr_type_node, ptr_type_node, NULL_TREE);
2296 tree BT_FN_VOID_PTR_PTRMODE
2297 = build_function_type_list (void_type_node, ptr_type_node,
2298 pointer_sized_int_node, NULL_TREE);
2299 tree BT_FN_VOID_INT
2300 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2301 tree BT_FN_SIZE_CONST_PTR_INT
2302 = build_function_type_list (size_type_node, const_ptr_type_node,
2303 integer_type_node, NULL_TREE);
2304 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2305 tree BT_FN_IX_CONST_VPTR_INT[5];
2306 tree BT_FN_IX_VPTR_IX_INT[5];
2307 tree BT_FN_VOID_VPTR_IX_INT[5];
2308 tree vptr
2309 = build_pointer_type (build_qualified_type (void_type_node,
2310 TYPE_QUAL_VOLATILE));
2311 tree cvptr
2312 = build_pointer_type (build_qualified_type (void_type_node,
2313 TYPE_QUAL_VOLATILE
2314 |TYPE_QUAL_CONST));
2315 tree boolt
2316 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2317 int i;
2318 for (i = 0; i < 5; i++)
2320 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2321 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2322 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2323 integer_type_node, integer_type_node,
2324 NULL_TREE);
2325 BT_FN_IX_CONST_VPTR_INT[i]
2326 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2327 BT_FN_IX_VPTR_IX_INT[i]
2328 = build_function_type_list (ix, vptr, ix, integer_type_node,
2329 NULL_TREE);
2330 BT_FN_VOID_VPTR_IX_INT[i]
2331 = build_function_type_list (void_type_node, vptr, ix,
2332 integer_type_node, NULL_TREE);
2334 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2335 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2336 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2337 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2338 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2339 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2340 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2341 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2342 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2343 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2344 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2345 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2346 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2347 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2348 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2349 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2350 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2351 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2352 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2353 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2354 #undef ATTR_NOTHROW_LEAF_LIST
2355 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2356 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2357 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2358 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2359 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2360 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2361 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2362 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2363 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2364 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2365 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2366 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2367 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2368 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2369 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2370 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2371 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2372 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2373 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2374 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2375 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2376 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2377 #undef DEF_SANITIZER_BUILTIN
2378 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2379 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2380 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2381 set_call_expr_flags (decl, ATTRS); \
2382 set_builtin_decl (ENUM, decl, true);
2384 #include "sanitizer.def"
2386 /* -fsanitize=object-size uses __builtin_object_size, but that might
2387 not be available for e.g. Fortran at this point. We use
2388 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2389 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2390 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2391 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2392 BT_FN_SIZE_CONST_PTR_INT,
2393 ATTR_PURE_NOTHROW_LEAF_LIST)
2395 #undef DEF_SANITIZER_BUILTIN
2398 /* Called via htab_traverse. Count number of emitted
2399 STRING_CSTs in the constant hash table. */
2402 count_string_csts (constant_descriptor_tree **slot,
2403 unsigned HOST_WIDE_INT *data)
2405 struct constant_descriptor_tree *desc = *slot;
2406 if (TREE_CODE (desc->value) == STRING_CST
2407 && TREE_ASM_WRITTEN (desc->value)
2408 && asan_protect_global (desc->value))
2409 ++*data;
2410 return 1;
2413 /* Helper structure to pass two parameters to
2414 add_string_csts. */
2416 struct asan_add_string_csts_data
2418 tree type;
2419 vec<constructor_elt, va_gc> *v;
2422 /* Called via hash_table::traverse. Call asan_add_global
2423 on emitted STRING_CSTs from the constant hash table. */
2426 add_string_csts (constant_descriptor_tree **slot,
2427 asan_add_string_csts_data *aascd)
2429 struct constant_descriptor_tree *desc = *slot;
2430 if (TREE_CODE (desc->value) == STRING_CST
2431 && TREE_ASM_WRITTEN (desc->value)
2432 && asan_protect_global (desc->value))
2434 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2435 aascd->type, aascd->v);
2437 return 1;
2440 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2441 invoke ggc_collect. */
2442 static GTY(()) tree asan_ctor_statements;
2444 /* Module-level instrumentation.
2445 - Insert __asan_init_vN() into the list of CTORs.
2446 - TODO: insert redzones around globals.
2449 void
2450 asan_finish_file (void)
2452 varpool_node *vnode;
2453 unsigned HOST_WIDE_INT gcount = 0;
2455 if (shadow_ptr_types[0] == NULL_TREE)
2456 asan_init_shadow_ptr_types ();
2457 /* Avoid instrumenting code in the asan ctors/dtors.
2458 We don't need to insert padding after the description strings,
2459 nor after .LASAN* array. */
2460 flag_sanitize &= ~SANITIZE_ADDRESS;
2462 /* For user-space we want asan constructors to run first.
2463 Linux kernel does not support priorities other than default, and the only
2464 other user of constructors is coverage. So we run with the default
2465 priority. */
2466 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2467 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2469 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2471 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2472 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2474 FOR_EACH_DEFINED_VARIABLE (vnode)
2475 if (TREE_ASM_WRITTEN (vnode->decl)
2476 && asan_protect_global (vnode->decl))
2477 ++gcount;
2478 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2479 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2480 (&gcount);
2481 if (gcount)
2483 tree type = asan_global_struct (), var, ctor;
2484 tree dtor_statements = NULL_TREE;
2485 vec<constructor_elt, va_gc> *v;
2486 char buf[20];
2488 type = build_array_type_nelts (type, gcount);
2489 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2490 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2491 type);
2492 TREE_STATIC (var) = 1;
2493 TREE_PUBLIC (var) = 0;
2494 DECL_ARTIFICIAL (var) = 1;
2495 DECL_IGNORED_P (var) = 1;
2496 vec_alloc (v, gcount);
2497 FOR_EACH_DEFINED_VARIABLE (vnode)
2498 if (TREE_ASM_WRITTEN (vnode->decl)
2499 && asan_protect_global (vnode->decl))
2500 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2501 struct asan_add_string_csts_data aascd;
2502 aascd.type = TREE_TYPE (type);
2503 aascd.v = v;
2504 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2505 (&aascd);
2506 ctor = build_constructor (type, v);
2507 TREE_CONSTANT (ctor) = 1;
2508 TREE_STATIC (ctor) = 1;
2509 DECL_INITIAL (var) = ctor;
2510 varpool_node::finalize_decl (var);
2512 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2513 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2514 append_to_statement_list (build_call_expr (fn, 2,
2515 build_fold_addr_expr (var),
2516 gcount_tree),
2517 &asan_ctor_statements);
2519 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2520 append_to_statement_list (build_call_expr (fn, 2,
2521 build_fold_addr_expr (var),
2522 gcount_tree),
2523 &dtor_statements);
2524 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2526 if (asan_ctor_statements)
2527 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2528 flag_sanitize |= SANITIZE_ADDRESS;
2531 /* Expand the ASAN_{LOAD,STORE} builtins. */
2533 bool
2534 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2536 gimple *g = gsi_stmt (*iter);
2537 location_t loc = gimple_location (g);
2539 bool recover_p
2540 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2542 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2543 gcc_assert (flags < ASAN_CHECK_LAST);
2544 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2545 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2546 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2548 tree base = gimple_call_arg (g, 1);
2549 tree len = gimple_call_arg (g, 2);
2550 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2552 HOST_WIDE_INT size_in_bytes
2553 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2555 if (use_calls)
2557 /* Instrument using callbacks. */
2558 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2559 NOP_EXPR, base);
2560 gimple_set_location (g, loc);
2561 gsi_insert_before (iter, g, GSI_SAME_STMT);
2562 tree base_addr = gimple_assign_lhs (g);
2564 int nargs;
2565 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2566 if (nargs == 1)
2567 g = gimple_build_call (fun, 1, base_addr);
2568 else
2570 gcc_assert (nargs == 2);
2571 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2572 NOP_EXPR, len);
2573 gimple_set_location (g, loc);
2574 gsi_insert_before (iter, g, GSI_SAME_STMT);
2575 tree sz_arg = gimple_assign_lhs (g);
2576 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2578 gimple_set_location (g, loc);
2579 gsi_replace (iter, g, false);
2580 return false;
2583 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2585 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2586 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2588 gimple_stmt_iterator gsi = *iter;
2590 if (!is_non_zero_len)
2592 /* So, the length of the memory area to asan-protect is
2593 non-constant. Let's guard the generated instrumentation code
2594 like:
2596 if (len != 0)
2598 //asan instrumentation code goes here.
2600 // falltrough instructions, starting with *ITER. */
2602 g = gimple_build_cond (NE_EXPR,
2603 len,
2604 build_int_cst (TREE_TYPE (len), 0),
2605 NULL_TREE, NULL_TREE);
2606 gimple_set_location (g, loc);
2608 basic_block then_bb, fallthrough_bb;
2609 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2610 /*then_more_likely_p=*/true,
2611 &then_bb, &fallthrough_bb);
2612 /* Note that fallthrough_bb starts with the statement that was
2613 pointed to by ITER. */
2615 /* The 'then block' of the 'if (len != 0) condition is where
2616 we'll generate the asan instrumentation code now. */
2617 gsi = gsi_last_bb (then_bb);
2620 /* Get an iterator on the point where we can add the condition
2621 statement for the instrumentation. */
2622 basic_block then_bb, else_bb;
2623 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2624 /*then_more_likely_p=*/false,
2625 /*create_then_fallthru_edge*/recover_p,
2626 &then_bb,
2627 &else_bb);
2629 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2630 NOP_EXPR, base);
2631 gimple_set_location (g, loc);
2632 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2633 tree base_addr = gimple_assign_lhs (g);
2635 tree t = NULL_TREE;
2636 if (real_size_in_bytes >= 8)
2638 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2639 shadow_ptr_type);
2640 t = shadow;
2642 else
2644 /* Slow path for 1, 2 and 4 byte accesses. */
2645 /* Test (shadow != 0)
2646 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2647 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2648 shadow_ptr_type);
2649 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2650 gimple_seq seq = NULL;
2651 gimple_seq_add_stmt (&seq, shadow_test);
2652 /* Aligned (>= 8 bytes) can test just
2653 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2654 to be 0. */
2655 if (align < 8)
2657 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2658 base_addr, 7));
2659 gimple_seq_add_stmt (&seq,
2660 build_type_cast (shadow_type,
2661 gimple_seq_last (seq)));
2662 if (real_size_in_bytes > 1)
2663 gimple_seq_add_stmt (&seq,
2664 build_assign (PLUS_EXPR,
2665 gimple_seq_last (seq),
2666 real_size_in_bytes - 1));
2667 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2669 else
2670 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2671 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2672 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2673 gimple_seq_last (seq)));
2674 t = gimple_assign_lhs (gimple_seq_last (seq));
2675 gimple_seq_set_location (seq, loc);
2676 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2678 /* For non-constant, misaligned or otherwise weird access sizes,
2679 check first and last byte. */
2680 if (size_in_bytes == -1)
2682 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2683 MINUS_EXPR, len,
2684 build_int_cst (pointer_sized_int_node, 1));
2685 gimple_set_location (g, loc);
2686 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2687 tree last = gimple_assign_lhs (g);
2688 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2689 PLUS_EXPR, base_addr, last);
2690 gimple_set_location (g, loc);
2691 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2692 tree base_end_addr = gimple_assign_lhs (g);
2694 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2695 shadow_ptr_type);
2696 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2697 gimple_seq seq = NULL;
2698 gimple_seq_add_stmt (&seq, shadow_test);
2699 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2700 base_end_addr, 7));
2701 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2702 gimple_seq_last (seq)));
2703 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2704 gimple_seq_last (seq),
2705 shadow));
2706 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2707 gimple_seq_last (seq)));
2708 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2709 gimple_seq_last (seq)));
2710 t = gimple_assign_lhs (gimple_seq_last (seq));
2711 gimple_seq_set_location (seq, loc);
2712 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2716 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2717 NULL_TREE, NULL_TREE);
2718 gimple_set_location (g, loc);
2719 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2721 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2722 gsi = gsi_start_bb (then_bb);
2723 int nargs;
2724 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2725 g = gimple_build_call (fun, nargs, base_addr, len);
2726 gimple_set_location (g, loc);
2727 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2729 gsi_remove (iter, true);
2730 *iter = gsi_start_bb (else_bb);
2732 return true;
2735 /* Instrument the current function. */
2737 static unsigned int
2738 asan_instrument (void)
2740 if (shadow_ptr_types[0] == NULL_TREE)
2741 asan_init_shadow_ptr_types ();
2742 transform_statements ();
2743 return 0;
2746 static bool
2747 gate_asan (void)
2749 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2750 && !lookup_attribute ("no_sanitize_address",
2751 DECL_ATTRIBUTES (current_function_decl));
2754 namespace {
2756 const pass_data pass_data_asan =
2758 GIMPLE_PASS, /* type */
2759 "asan", /* name */
2760 OPTGROUP_NONE, /* optinfo_flags */
2761 TV_NONE, /* tv_id */
2762 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2763 0, /* properties_provided */
2764 0, /* properties_destroyed */
2765 0, /* todo_flags_start */
2766 TODO_update_ssa, /* todo_flags_finish */
2769 class pass_asan : public gimple_opt_pass
2771 public:
2772 pass_asan (gcc::context *ctxt)
2773 : gimple_opt_pass (pass_data_asan, ctxt)
2776 /* opt_pass methods: */
2777 opt_pass * clone () { return new pass_asan (m_ctxt); }
2778 virtual bool gate (function *) { return gate_asan (); }
2779 virtual unsigned int execute (function *) { return asan_instrument (); }
2781 }; // class pass_asan
2783 } // anon namespace
2785 gimple_opt_pass *
2786 make_pass_asan (gcc::context *ctxt)
2788 return new pass_asan (ctxt);
2791 namespace {
2793 const pass_data pass_data_asan_O0 =
2795 GIMPLE_PASS, /* type */
2796 "asan0", /* name */
2797 OPTGROUP_NONE, /* optinfo_flags */
2798 TV_NONE, /* tv_id */
2799 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2800 0, /* properties_provided */
2801 0, /* properties_destroyed */
2802 0, /* todo_flags_start */
2803 TODO_update_ssa, /* todo_flags_finish */
2806 class pass_asan_O0 : public gimple_opt_pass
2808 public:
2809 pass_asan_O0 (gcc::context *ctxt)
2810 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2813 /* opt_pass methods: */
2814 virtual bool gate (function *) { return !optimize && gate_asan (); }
2815 virtual unsigned int execute (function *) { return asan_instrument (); }
2817 }; // class pass_asan_O0
2819 } // anon namespace
2821 gimple_opt_pass *
2822 make_pass_asan_O0 (gcc::context *ctxt)
2824 return new pass_asan_O0 (ctxt);
2827 #include "gt-asan.h"