* xcoffout.c (xcoff_tls_data_section_name): Define.
[official-gcc.git] / gcc / asan.c
blob26c4178a9f073d2a69a019d7fac60f39e5b75feb
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
37 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
38 with <2x slowdown on average.
40 The tool consists of two parts:
41 instrumentation module (this file) and a run-time library.
42 The instrumentation module adds a run-time check before every memory insn.
43 For a 8- or 16- byte load accessing address X:
44 ShadowAddr = (X >> 3) + Offset
45 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
46 if (ShadowValue)
47 __asan_report_load8(X);
48 For a load of N bytes (N=1, 2 or 4) from address X:
49 ShadowAddr = (X >> 3) + Offset
50 ShadowValue = *(char*)ShadowAddr;
51 if (ShadowValue)
52 if ((X & 7) + N - 1 > ShadowValue)
53 __asan_report_loadN(X);
54 Stores are instrumented similarly, but using __asan_report_storeN functions.
55 A call too __asan_init() is inserted to the list of module CTORs.
57 The run-time library redefines malloc (so that redzone are inserted around
58 the allocated memory) and free (so that reuse of free-ed memory is delayed),
59 provides __asan_report* and __asan_init functions.
61 Read more:
62 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
64 The current implementation supports detection of out-of-bounds and
65 use-after-free in the heap, on the stack and for global variables.
67 [Protection of stack variables]
69 To understand how detection of out-of-bounds and use-after-free works
70 for stack variables, lets look at this example on x86_64 where the
71 stack grows downward:
73 int
74 foo ()
76 char a[23] = {0};
77 int b[2] = {0};
79 a[5] = 1;
80 b[1] = 2;
82 return a[5] + b[1];
85 For this function, the stack protected by asan will be organized as
86 follows, from the top of the stack to the bottom:
88 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
90 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
91 the next slot be 32 bytes aligned; this one is called Partial
92 Redzone; this 32 bytes alignment is an asan constraint]
94 Slot 3/ [24 bytes for variable 'a']
96 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
98 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
100 Slot 6/ [8 bytes for variable 'b']
102 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
103 'LEFT RedZone']
105 The 32 bytes of LEFT red zone at the bottom of the stack can be
106 decomposed as such:
108 1/ The first 8 bytes contain a magical asan number that is always
109 0x41B58AB3.
111 2/ The following 8 bytes contains a pointer to a string (to be
112 parsed at runtime by the runtime asan library), which format is
113 the following:
115 "<function-name> <space> <num-of-variables-on-the-stack>
116 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
117 <length-of-var-in-bytes> ){n} "
119 where '(...){n}' means the content inside the parenthesis occurs 'n'
120 times, with 'n' being the number of variables on the stack.
122 3/ The following 16 bytes of the red zone have no particular
123 format.
125 The shadow memory for that stack layout is going to look like this:
127 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
128 The F1 byte pattern is a magic number called
129 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
130 the memory for that shadow byte is part of a the LEFT red zone
131 intended to seat at the bottom of the variables on the stack.
133 - content of shadow memory 8 bytes for slots 6 and 5:
134 0xF4F4F400. The F4 byte pattern is a magic number
135 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
136 memory region for this shadow byte is a PARTIAL red zone
137 intended to pad a variable A, so that the slot following
138 {A,padding} is 32 bytes aligned.
140 Note that the fact that the least significant byte of this
141 shadow memory content is 00 means that 8 bytes of its
142 corresponding memory (which corresponds to the memory of
143 variable 'b') is addressable.
145 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
146 The F2 byte pattern is a magic number called
147 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
148 region for this shadow byte is a MIDDLE red zone intended to
149 seat between two 32 aligned slots of {variable,padding}.
151 - content of shadow memory 8 bytes for slot 3 and 2:
152 0xF4000000. This represents is the concatenation of
153 variable 'a' and the partial red zone following it, like what we
154 had for variable 'b'. The least significant 3 bytes being 00
155 means that the 3 bytes of variable 'a' are addressable.
157 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
158 The F3 byte pattern is a magic number called
159 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
160 region for this shadow byte is a RIGHT red zone intended to seat
161 at the top of the variables of the stack.
163 Note that the real variable layout is done in expand_used_vars in
164 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
165 stack variables as well as the different red zones, emits some
166 prologue code to populate the shadow memory as to poison (mark as
167 non-accessible) the regions of the red zones and mark the regions of
168 stack variables as accessible, and emit some epilogue code to
169 un-poison (mark as accessible) the regions of red zones right before
170 the function exits.
172 [Protection of global variables]
174 The basic idea is to insert a red zone between two global variables
175 and install a constructor function that calls the asan runtime to do
176 the populating of the relevant shadow memory regions at load time.
178 So the global variables are laid out as to insert a red zone between
179 them. The size of the red zones is so that each variable starts on a
180 32 bytes boundary.
182 Then a constructor function is installed so that, for each global
183 variable, it calls the runtime asan library function
184 __asan_register_globals_with an instance of this type:
186 struct __asan_global
188 // Address of the beginning of the global variable.
189 const void *__beg;
191 // Initial size of the global variable.
192 uptr __size;
194 // Size of the global variable + size of the red zone. This
195 // size is 32 bytes aligned.
196 uptr __size_with_redzone;
198 // Name of the global variable.
199 const void *__name;
201 // This is always set to NULL for now.
202 uptr __has_dynamic_init;
205 A destructor function that calls the runtime asan library function
206 _asan_unregister_globals is also installed. */
208 alias_set_type asan_shadow_set = -1;
210 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
211 alias set is used for all shadow memory accesses. */
212 static GTY(()) tree shadow_ptr_types[2];
214 /* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
215 static pretty_printer asan_pp;
216 static bool asan_pp_initialized;
218 /* Initialize asan_pp. */
220 static void
221 asan_pp_initialize (void)
223 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
224 asan_pp_initialized = true;
227 /* Create ADDR_EXPR of STRING_CST with asan_pp text. */
229 static tree
230 asan_pp_string (void)
232 const char *buf = pp_base_formatted_text (&asan_pp);
233 size_t len = strlen (buf);
234 tree ret = build_string (len + 1, buf);
235 TREE_TYPE (ret)
236 = build_array_type (char_type_node, build_index_type (size_int (len)));
237 TREE_READONLY (ret) = 1;
238 TREE_STATIC (ret) = 1;
239 return build1 (ADDR_EXPR, build_pointer_type (char_type_node), ret);
242 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
244 static rtx
245 asan_shadow_cst (unsigned char shadow_bytes[4])
247 int i;
248 unsigned HOST_WIDE_INT val = 0;
249 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
250 for (i = 0; i < 4; i++)
251 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
252 << (BITS_PER_UNIT * i);
253 return GEN_INT (trunc_int_for_mode (val, SImode));
256 /* Insert code to protect stack vars. The prologue sequence should be emitted
257 directly, epilogue sequence returned. BASE is the register holding the
258 stack base, against which OFFSETS array offsets are relative to, OFFSETS
259 array contains pairs of offsets in reverse order, always the end offset
260 of some gap that needs protection followed by starting offset,
261 and DECLS is an array of representative decls for each var partition.
262 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
263 elements long (OFFSETS include gap before the first variable as well
264 as gaps after each stack variable). */
267 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
268 int length)
270 rtx shadow_base, shadow_mem, ret, mem;
271 unsigned char shadow_bytes[4];
272 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
273 HOST_WIDE_INT last_offset, last_size;
274 int l;
275 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
276 tree str_cst;
278 /* First of all, prepare the description string. */
279 if (!asan_pp_initialized)
280 asan_pp_initialize ();
282 pp_clear_output_area (&asan_pp);
283 if (DECL_NAME (current_function_decl))
284 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
285 else
286 pp_string (&asan_pp, "<unknown>");
287 pp_space (&asan_pp);
288 pp_decimal_int (&asan_pp, length / 2 - 1);
289 pp_space (&asan_pp);
290 for (l = length - 2; l; l -= 2)
292 tree decl = decls[l / 2 - 1];
293 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
294 pp_space (&asan_pp);
295 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
296 pp_space (&asan_pp);
297 if (DECL_P (decl) && DECL_NAME (decl))
299 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
300 pp_space (&asan_pp);
301 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
303 else
304 pp_string (&asan_pp, "9 <unknown>");
305 pp_space (&asan_pp);
307 str_cst = asan_pp_string ();
309 /* Emit the prologue sequence. */
310 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
311 NULL_RTX, 1, OPTAB_DIRECT);
312 mem = gen_rtx_MEM (ptr_mode, base);
313 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
314 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
315 emit_move_insn (mem, expand_normal (str_cst));
316 shadow_base = expand_binop (Pmode, lshr_optab, base,
317 GEN_INT (ASAN_SHADOW_SHIFT),
318 NULL_RTX, 1, OPTAB_DIRECT);
319 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
320 GEN_INT (targetm.asan_shadow_offset ()),
321 NULL_RTX, 1, OPTAB_DIRECT);
322 gcc_assert (asan_shadow_set != -1
323 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
324 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
325 set_mem_alias_set (shadow_mem, asan_shadow_set);
326 prev_offset = base_offset;
327 for (l = length; l; l -= 2)
329 if (l == 2)
330 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
331 offset = offsets[l - 1];
332 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
334 int i;
335 HOST_WIDE_INT aoff
336 = base_offset + ((offset - base_offset)
337 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
338 shadow_mem = adjust_address (shadow_mem, VOIDmode,
339 (aoff - prev_offset)
340 >> ASAN_SHADOW_SHIFT);
341 prev_offset = aoff;
342 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
343 if (aoff < offset)
345 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
346 shadow_bytes[i] = 0;
347 else
348 shadow_bytes[i] = offset - aoff;
350 else
351 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
352 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
353 offset = aoff;
355 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
357 shadow_mem = adjust_address (shadow_mem, VOIDmode,
358 (offset - prev_offset)
359 >> ASAN_SHADOW_SHIFT);
360 prev_offset = offset;
361 memset (shadow_bytes, cur_shadow_byte, 4);
362 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
363 offset += ASAN_RED_ZONE_SIZE;
365 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
367 do_pending_stack_adjust ();
369 /* Construct epilogue sequence. */
370 start_sequence ();
372 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
373 set_mem_alias_set (shadow_mem, asan_shadow_set);
374 prev_offset = base_offset;
375 last_offset = base_offset;
376 last_size = 0;
377 for (l = length; l; l -= 2)
379 offset = base_offset + ((offsets[l - 1] - base_offset)
380 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
381 if (last_offset + last_size != offset)
383 shadow_mem = adjust_address (shadow_mem, VOIDmode,
384 (last_offset - prev_offset)
385 >> ASAN_SHADOW_SHIFT);
386 prev_offset = last_offset;
387 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
388 BLOCK_OP_NORMAL);
389 last_offset = offset;
390 last_size = 0;
392 last_size += base_offset + ((offsets[l - 2] - base_offset)
393 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
394 - offset;
396 if (last_size)
398 shadow_mem = adjust_address (shadow_mem, VOIDmode,
399 (last_offset - prev_offset)
400 >> ASAN_SHADOW_SHIFT);
401 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
402 BLOCK_OP_NORMAL);
405 do_pending_stack_adjust ();
407 ret = get_insns ();
408 end_sequence ();
409 return ret;
412 /* Return true if DECL, a global var, might be overridden and needs
413 therefore a local alias. */
415 static bool
416 asan_needs_local_alias (tree decl)
418 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
421 /* Return true if DECL is a VAR_DECL that should be protected
422 by Address Sanitizer, by appending a red zone with protected
423 shadow memory after it and aligning it to at least
424 ASAN_RED_ZONE_SIZE bytes. */
426 bool
427 asan_protect_global (tree decl)
429 rtx rtl, symbol;
430 section *sect;
432 if (TREE_CODE (decl) != VAR_DECL
433 /* TLS vars aren't statically protectable. */
434 || DECL_THREAD_LOCAL_P (decl)
435 /* Externs will be protected elsewhere. */
436 || DECL_EXTERNAL (decl)
437 || !TREE_ASM_WRITTEN (decl)
438 || !DECL_RTL_SET_P (decl)
439 /* Comdat vars pose an ABI problem, we can't know if
440 the var that is selected by the linker will have
441 padding or not. */
442 || DECL_ONE_ONLY (decl)
443 /* Similarly for common vars. People can use -fno-common. */
444 || DECL_COMMON (decl)
445 /* Don't protect if using user section, often vars placed
446 into user section from multiple TUs are then assumed
447 to be an array of such vars, putting padding in there
448 breaks this assumption. */
449 || (DECL_SECTION_NAME (decl) != NULL_TREE
450 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
451 || DECL_SIZE (decl) == 0
452 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
453 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
454 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
455 return false;
457 rtl = DECL_RTL (decl);
458 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
459 return false;
460 symbol = XEXP (rtl, 0);
462 if (CONSTANT_POOL_ADDRESS_P (symbol)
463 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
464 return false;
466 sect = get_variable_section (decl, false);
467 if (sect->common.flags & SECTION_COMMON)
468 return false;
470 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
471 return false;
473 #ifndef ASM_OUTPUT_DEF
474 if (asan_needs_local_alias (decl))
475 return false;
476 #endif
478 return true;
481 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
482 IS_STORE is either 1 (for a store) or 0 (for a load).
483 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
485 static tree
486 report_error_func (bool is_store, int size_in_bytes)
488 tree fn_type;
489 tree def;
490 char name[100];
492 sprintf (name, "__asan_report_%s%d",
493 is_store ? "store" : "load", size_in_bytes);
494 fn_type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
495 def = build_fn_decl (name, fn_type);
496 TREE_NOTHROW (def) = 1;
497 DECL_IGNORED_P (def) = 1;
498 TREE_THIS_VOLATILE (def) = 1; /* Attribute noreturn. Surprise! */
499 DECL_ATTRIBUTES (def) = tree_cons (get_identifier ("leaf"),
500 NULL, DECL_ATTRIBUTES (def));
501 return def;
504 /* Construct a function tree for __asan_init(). */
506 static tree
507 asan_init_func (void)
509 tree fn_type;
510 tree def;
512 fn_type = build_function_type_list (void_type_node, NULL_TREE);
513 def = build_fn_decl ("__asan_init", fn_type);
514 TREE_NOTHROW (def) = 1;
515 DECL_IGNORED_P (def) = 1;
516 return def;
520 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
521 #define PROB_ALWAYS (REG_BR_PROB_BASE)
523 /* Split the current basic block and create a condition statement
524 insertion point right before or after the statement pointed to by
525 ITER. Return an iterator to the point at which the caller might
526 safely insert the condition statement.
528 THEN_BLOCK must be set to the address of an uninitialized instance
529 of basic_block. The function will then set *THEN_BLOCK to the
530 'then block' of the condition statement to be inserted by the
531 caller.
533 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
534 block' of the condition statement to be inserted by the caller.
536 Note that *FALLTHROUGH_BLOCK is a new block that contains the
537 statements starting from *ITER, and *THEN_BLOCK is a new empty
538 block.
540 *ITER is adjusted to point to always point to the first statement
541 of the basic block * FALLTHROUGH_BLOCK. That statement is the
542 same as what ITER was pointing to prior to calling this function,
543 if BEFORE_P is true; otherwise, it is its following statement. */
545 static gimple_stmt_iterator
546 create_cond_insert_point (gimple_stmt_iterator *iter,
547 bool before_p,
548 bool then_more_likely_p,
549 basic_block *then_block,
550 basic_block *fallthrough_block)
552 gimple_stmt_iterator gsi = *iter;
554 if (!gsi_end_p (gsi) && before_p)
555 gsi_prev (&gsi);
557 basic_block cur_bb = gsi_bb (*iter);
559 edge e = split_block (cur_bb, gsi_stmt (gsi));
561 /* Get a hold on the 'condition block', the 'then block' and the
562 'else block'. */
563 basic_block cond_bb = e->src;
564 basic_block fallthru_bb = e->dest;
565 basic_block then_bb = create_empty_bb (cond_bb);
567 /* Set up the newly created 'then block'. */
568 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
569 int fallthrough_probability
570 = then_more_likely_p
571 ? PROB_VERY_UNLIKELY
572 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
573 e->probability = PROB_ALWAYS - fallthrough_probability;
574 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
576 /* Set up the fallthrough basic block. */
577 e = find_edge (cond_bb, fallthru_bb);
578 e->flags = EDGE_FALSE_VALUE;
579 e->count = cond_bb->count;
580 e->probability = fallthrough_probability;
582 /* Update dominance info for the newly created then_bb; note that
583 fallthru_bb's dominance info has already been updated by
584 split_bock. */
585 if (dom_info_available_p (CDI_DOMINATORS))
586 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
588 *then_block = then_bb;
589 *fallthrough_block = fallthru_bb;
590 *iter = gsi_start_bb (fallthru_bb);
592 return gsi_last_bb (cond_bb);
595 /* Insert an if condition followed by a 'then block' right before the
596 statement pointed to by ITER. The fallthrough block -- which is the
597 else block of the condition as well as the destination of the
598 outcoming edge of the 'then block' -- starts with the statement
599 pointed to by ITER.
601 COND is the condition of the if.
603 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
604 'then block' is higher than the probability of the edge to the
605 fallthrough block.
607 Upon completion of the function, *THEN_BB is set to the newly
608 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
609 fallthrough block.
611 *ITER is adjusted to still point to the same statement it was
612 pointing to initially. */
614 static void
615 insert_if_then_before_iter (gimple cond,
616 gimple_stmt_iterator *iter,
617 bool then_more_likely_p,
618 basic_block *then_bb,
619 basic_block *fallthrough_bb)
621 gimple_stmt_iterator cond_insert_point =
622 create_cond_insert_point (iter,
623 /*before_p=*/true,
624 then_more_likely_p,
625 then_bb,
626 fallthrough_bb);
627 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
630 /* Instrument the memory access instruction BASE. Insert new
631 statements before or after ITER.
633 Note that the memory access represented by BASE can be either an
634 SSA_NAME, or a non-SSA expression. LOCATION is the source code
635 location. IS_STORE is TRUE for a store, FALSE for a load.
636 BEFORE_P is TRUE for inserting the instrumentation code before
637 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
638 1, 2, 4, 8, 16.
640 If BEFORE_P is TRUE, *ITER is arranged to still point to the
641 statement it was pointing to prior to calling this function,
642 otherwise, it points to the statement logically following it. */
644 static void
645 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
646 bool before_p, bool is_store, int size_in_bytes)
648 gimple_stmt_iterator gsi;
649 basic_block then_bb, else_bb;
650 tree t, base_addr, shadow;
651 gimple g;
652 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
653 tree shadow_type = TREE_TYPE (shadow_ptr_type);
654 tree uintptr_type
655 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
656 tree base_ssa = base;
658 /* Get an iterator on the point where we can add the condition
659 statement for the instrumentation. */
660 gsi = create_cond_insert_point (iter, before_p,
661 /*then_more_likely_p=*/false,
662 &then_bb,
663 &else_bb);
665 base = unshare_expr (base);
667 /* BASE can already be an SSA_NAME; in that case, do not create a
668 new SSA_NAME for it. */
669 if (TREE_CODE (base) != SSA_NAME)
671 g = gimple_build_assign_with_ops (TREE_CODE (base),
672 make_ssa_name (TREE_TYPE (base), NULL),
673 base, NULL_TREE);
674 gimple_set_location (g, location);
675 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
676 base_ssa = gimple_assign_lhs (g);
679 g = gimple_build_assign_with_ops (NOP_EXPR,
680 make_ssa_name (uintptr_type, NULL),
681 base_ssa, NULL_TREE);
682 gimple_set_location (g, location);
683 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
684 base_addr = gimple_assign_lhs (g);
686 /* Build
687 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
689 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
690 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
691 make_ssa_name (uintptr_type, NULL),
692 base_addr, t);
693 gimple_set_location (g, location);
694 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
696 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
697 g = gimple_build_assign_with_ops (PLUS_EXPR,
698 make_ssa_name (uintptr_type, NULL),
699 gimple_assign_lhs (g), t);
700 gimple_set_location (g, location);
701 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
703 g = gimple_build_assign_with_ops (NOP_EXPR,
704 make_ssa_name (shadow_ptr_type, NULL),
705 gimple_assign_lhs (g), NULL_TREE);
706 gimple_set_location (g, location);
707 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
709 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
710 build_int_cst (shadow_ptr_type, 0));
711 g = gimple_build_assign_with_ops (MEM_REF,
712 make_ssa_name (shadow_type, NULL),
713 t, NULL_TREE);
714 gimple_set_location (g, location);
715 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
716 shadow = gimple_assign_lhs (g);
718 if (size_in_bytes < 8)
720 /* Slow path for 1, 2 and 4 byte accesses.
721 Test (shadow != 0)
722 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
723 g = gimple_build_assign_with_ops (NE_EXPR,
724 make_ssa_name (boolean_type_node,
725 NULL),
726 shadow,
727 build_int_cst (shadow_type, 0));
728 gimple_set_location (g, location);
729 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
730 t = gimple_assign_lhs (g);
732 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
733 make_ssa_name (uintptr_type,
734 NULL),
735 base_addr,
736 build_int_cst (uintptr_type, 7));
737 gimple_set_location (g, location);
738 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
740 g = gimple_build_assign_with_ops (NOP_EXPR,
741 make_ssa_name (shadow_type,
742 NULL),
743 gimple_assign_lhs (g), NULL_TREE);
744 gimple_set_location (g, location);
745 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
747 if (size_in_bytes > 1)
749 g = gimple_build_assign_with_ops (PLUS_EXPR,
750 make_ssa_name (shadow_type,
751 NULL),
752 gimple_assign_lhs (g),
753 build_int_cst (shadow_type,
754 size_in_bytes - 1));
755 gimple_set_location (g, location);
756 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
759 g = gimple_build_assign_with_ops (GE_EXPR,
760 make_ssa_name (boolean_type_node,
761 NULL),
762 gimple_assign_lhs (g),
763 shadow);
764 gimple_set_location (g, location);
765 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
767 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
768 make_ssa_name (boolean_type_node,
769 NULL),
770 t, gimple_assign_lhs (g));
771 gimple_set_location (g, location);
772 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
773 t = gimple_assign_lhs (g);
775 else
776 t = shadow;
778 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
779 NULL_TREE, NULL_TREE);
780 gimple_set_location (g, location);
781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
783 /* Generate call to the run-time library (e.g. __asan_report_load8). */
784 gsi = gsi_start_bb (then_bb);
785 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
786 1, base_addr);
787 gimple_set_location (g, location);
788 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
790 *iter = gsi_start_bb (else_bb);
793 /* If T represents a memory access, add instrumentation code before ITER.
794 LOCATION is source code location.
795 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
797 static void
798 instrument_derefs (gimple_stmt_iterator *iter, tree t,
799 location_t location, bool is_store)
801 tree type, base;
802 HOST_WIDE_INT size_in_bytes;
804 type = TREE_TYPE (t);
805 switch (TREE_CODE (t))
807 case ARRAY_REF:
808 case COMPONENT_REF:
809 case INDIRECT_REF:
810 case MEM_REF:
811 break;
812 default:
813 return;
816 size_in_bytes = int_size_in_bytes (type);
817 if ((size_in_bytes & (size_in_bytes - 1)) != 0
818 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
819 return;
821 /* For now just avoid instrumenting bit field acceses.
822 Fixing it is doable, but expected to be messy. */
824 HOST_WIDE_INT bitsize, bitpos;
825 tree offset;
826 enum machine_mode mode;
827 int volatilep = 0, unsignedp = 0;
828 get_inner_reference (t, &bitsize, &bitpos, &offset,
829 &mode, &unsignedp, &volatilep, false);
830 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
831 || bitsize != size_in_bytes * BITS_PER_UNIT)
832 return;
834 base = build_fold_addr_expr (t);
835 build_check_stmt (location, base, iter, /*before_p=*/true,
836 is_store, size_in_bytes);
839 /* Instrument an access to a contiguous memory region that starts at
840 the address pointed to by BASE, over a length of LEN (expressed in
841 the sizeof (*BASE) bytes). ITER points to the instruction before
842 which the instrumentation instructions must be inserted. LOCATION
843 is the source location that the instrumentation instructions must
844 have. If IS_STORE is true, then the memory access is a store;
845 otherwise, it's a load. */
847 static void
848 instrument_mem_region_access (tree base, tree len,
849 gimple_stmt_iterator *iter,
850 location_t location, bool is_store)
852 if (integer_zerop (len))
853 return;
855 gimple_stmt_iterator gsi = *iter;
857 basic_block fallthrough_bb = NULL, then_bb = NULL;
858 if (!is_gimple_constant (len))
860 /* So, the length of the memory area to asan-protect is
861 non-constant. Let's guard the generated instrumentation code
862 like:
864 if (len != 0)
866 //asan instrumentation code goes here.
868 // falltrough instructions, starting with *ITER. */
870 gimple g = gimple_build_cond (NE_EXPR,
871 len,
872 build_int_cst (TREE_TYPE (len), 0),
873 NULL_TREE, NULL_TREE);
874 gimple_set_location (g, location);
875 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
876 &then_bb, &fallthrough_bb);
877 /* Note that fallthrough_bb starts with the statement that was
878 pointed to by ITER. */
880 /* The 'then block' of the 'if (len != 0) condition is where
881 we'll generate the asan instrumentation code now. */
882 gsi = gsi_start_bb (then_bb);
885 /* Instrument the beginning of the memory region to be accessed,
886 and arrange for the rest of the intrumentation code to be
887 inserted in the then block *after* the current gsi. */
888 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
890 if (then_bb)
891 /* We are in the case where the length of the region is not
892 constant; so instrumentation code is being generated in the
893 'then block' of the 'if (len != 0) condition. Let's arrange
894 for the subsequent instrumentation statements to go in the
895 'then block'. */
896 gsi = gsi_last_bb (then_bb);
897 else
898 *iter = gsi;
900 /* We want to instrument the access at the end of the memory region,
901 which is at (base + len - 1). */
903 /* offset = len - 1; */
904 len = unshare_expr (len);
905 gimple offset =
906 gimple_build_assign_with_ops (TREE_CODE (len),
907 make_ssa_name (TREE_TYPE (len), NULL),
908 len, NULL);
909 gimple_set_location (offset, location);
910 gsi_insert_before (&gsi, offset, GSI_NEW_STMT);
912 offset =
913 gimple_build_assign_with_ops (MINUS_EXPR,
914 make_ssa_name (size_type_node, NULL),
915 gimple_assign_lhs (offset),
916 build_int_cst (size_type_node, 1));
917 gimple_set_location (offset, location);
918 gsi_insert_after (&gsi, offset, GSI_NEW_STMT);
920 /* _1 = base; */
921 base = unshare_expr (base);
922 gimple region_end =
923 gimple_build_assign_with_ops (TREE_CODE (base),
924 make_ssa_name (TREE_TYPE (base), NULL),
925 base, NULL);
926 gimple_set_location (region_end, location);
927 gsi_insert_after (&gsi, region_end, GSI_NEW_STMT);
929 /* _2 = _1 + offset; */
930 region_end =
931 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
932 make_ssa_name (TREE_TYPE (base), NULL),
933 gimple_assign_lhs (region_end),
934 gimple_assign_lhs (offset));
935 gimple_set_location (region_end, location);
936 gsi_insert_after (&gsi, region_end, GSI_NEW_STMT);
938 /* instrument access at _2; */
939 build_check_stmt (location, gimple_assign_lhs (region_end),
940 &gsi, /*before_p=*/false, is_store, 1);
943 /* Instrument the call (to the builtin strlen function) pointed to by
944 ITER.
946 This function instruments the access to the first byte of the
947 argument, right before the call. After the call it instruments the
948 access to the last byte of the argument; it uses the result of the
949 call to deduce the offset of that last byte.
951 Upon completion, iff the call has actullay been instrumented, this
952 function returns TRUE and *ITER points to the statement logically
953 following the built-in strlen function call *ITER was initially
954 pointing to. Otherwise, the function returns FALSE and *ITER
955 remains unchanged. */
957 static bool
958 instrument_strlen_call (gimple_stmt_iterator *iter)
960 gimple call = gsi_stmt (*iter);
961 gcc_assert (is_gimple_call (call));
963 tree callee = gimple_call_fndecl (call);
964 gcc_assert (is_builtin_fn (callee)
965 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
966 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
968 tree len = gimple_call_lhs (call);
969 if (len == NULL)
970 /* Some passes might clear the return value of the strlen call;
971 bail out in that case. Return FALSE as we are not advancing
972 *ITER. */
973 return false;
974 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
976 location_t loc = gimple_location (call);
977 tree str_arg = gimple_call_arg (call, 0);
979 /* Instrument the access to the first byte of str_arg. i.e:
981 _1 = str_arg; instrument (_1); */
982 gimple str_arg_ssa =
983 gimple_build_assign_with_ops (NOP_EXPR,
984 make_ssa_name (build_pointer_type
985 (char_type_node), NULL),
986 str_arg, NULL);
987 gimple_set_location (str_arg_ssa, loc);
988 gimple_stmt_iterator gsi = *iter;
989 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
990 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
991 /*before_p=*/false, /*is_store=*/false, 1);
993 /* If we initially had an instruction like:
995 int n = strlen (str)
997 we now want to instrument the access to str[n], after the
998 instruction above.*/
1000 /* So let's build the access to str[n] that is, access through the
1001 pointer_plus expr: (_1 + len). */
1002 gimple stmt =
1003 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1004 make_ssa_name (TREE_TYPE (str_arg),
1005 NULL),
1006 gimple_assign_lhs (str_arg_ssa),
1007 len);
1008 gimple_set_location (stmt, loc);
1009 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1011 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1012 /*before_p=*/false, /*is_store=*/false, 1);
1014 /* Ensure that iter points to the statement logically following the
1015 one it was initially pointing to. */
1016 *iter = gsi;
1017 /* As *ITER has been advanced to point to the next statement, let's
1018 return true to inform transform_statements that it shouldn't
1019 advance *ITER anymore; otherwises it will skip that next
1020 statement, which wouldn't be instrumented. */
1021 return true;
1024 /* Instrument the call to a built-in memory access function that is
1025 pointed to by the iterator ITER.
1027 Upon completion, return TRUE iff *ITER has been advanced to the
1028 statement following the one it was originally pointing to. */
1030 static bool
1031 instrument_builtin_call (gimple_stmt_iterator *iter)
1033 gimple call = gsi_stmt (*iter);
1035 gcc_assert (is_gimple_builtin_call (call));
1037 tree callee = gimple_call_fndecl (call);
1038 location_t loc = gimple_location (call);
1039 tree source0 = NULL_TREE, source1 = NULL_TREE,
1040 dest = NULL_TREE, len = NULL_TREE;
1041 bool is_store = true;
1043 switch (DECL_FUNCTION_CODE (callee))
1045 /* (s, s, n) style memops. */
1046 case BUILT_IN_BCMP:
1047 case BUILT_IN_MEMCMP:
1048 source0 = gimple_call_arg (call, 0);
1049 source1 = gimple_call_arg (call, 1);
1050 len = gimple_call_arg (call, 2);
1051 break;
1053 /* (src, dest, n) style memops. */
1054 case BUILT_IN_BCOPY:
1055 source0 = gimple_call_arg (call, 0);
1056 dest = gimple_call_arg (call, 1);
1057 len = gimple_call_arg (call, 2);
1058 break;
1060 /* (dest, src, n) style memops. */
1061 case BUILT_IN_MEMCPY:
1062 case BUILT_IN_MEMCPY_CHK:
1063 case BUILT_IN_MEMMOVE:
1064 case BUILT_IN_MEMMOVE_CHK:
1065 case BUILT_IN_MEMPCPY:
1066 case BUILT_IN_MEMPCPY_CHK:
1067 dest = gimple_call_arg (call, 0);
1068 source0 = gimple_call_arg (call, 1);
1069 len = gimple_call_arg (call, 2);
1070 break;
1072 /* (dest, n) style memops. */
1073 case BUILT_IN_BZERO:
1074 dest = gimple_call_arg (call, 0);
1075 len = gimple_call_arg (call, 1);
1076 break;
1078 /* (dest, x, n) style memops*/
1079 case BUILT_IN_MEMSET:
1080 case BUILT_IN_MEMSET_CHK:
1081 dest = gimple_call_arg (call, 0);
1082 len = gimple_call_arg (call, 2);
1083 break;
1085 case BUILT_IN_STRLEN:
1086 return instrument_strlen_call (iter);
1088 /* And now the __atomic* and __sync builtins.
1089 These are handled differently from the classical memory memory
1090 access builtins above. */
1092 case BUILT_IN_ATOMIC_LOAD:
1093 case BUILT_IN_ATOMIC_LOAD_1:
1094 case BUILT_IN_ATOMIC_LOAD_2:
1095 case BUILT_IN_ATOMIC_LOAD_4:
1096 case BUILT_IN_ATOMIC_LOAD_8:
1097 case BUILT_IN_ATOMIC_LOAD_16:
1098 is_store = false;
1099 /* fall through. */
1101 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1102 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1103 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1104 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1105 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1107 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1108 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1109 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1110 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1111 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1113 case BUILT_IN_SYNC_FETCH_AND_OR_1:
1114 case BUILT_IN_SYNC_FETCH_AND_OR_2:
1115 case BUILT_IN_SYNC_FETCH_AND_OR_4:
1116 case BUILT_IN_SYNC_FETCH_AND_OR_8:
1117 case BUILT_IN_SYNC_FETCH_AND_OR_16:
1119 case BUILT_IN_SYNC_FETCH_AND_AND_1:
1120 case BUILT_IN_SYNC_FETCH_AND_AND_2:
1121 case BUILT_IN_SYNC_FETCH_AND_AND_4:
1122 case BUILT_IN_SYNC_FETCH_AND_AND_8:
1123 case BUILT_IN_SYNC_FETCH_AND_AND_16:
1125 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1126 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1127 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1128 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1129 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1131 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1132 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1133 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1134 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1136 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1137 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1138 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1139 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1140 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1142 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1143 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1144 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1145 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1146 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1148 case BUILT_IN_SYNC_OR_AND_FETCH_1:
1149 case BUILT_IN_SYNC_OR_AND_FETCH_2:
1150 case BUILT_IN_SYNC_OR_AND_FETCH_4:
1151 case BUILT_IN_SYNC_OR_AND_FETCH_8:
1152 case BUILT_IN_SYNC_OR_AND_FETCH_16:
1154 case BUILT_IN_SYNC_AND_AND_FETCH_1:
1155 case BUILT_IN_SYNC_AND_AND_FETCH_2:
1156 case BUILT_IN_SYNC_AND_AND_FETCH_4:
1157 case BUILT_IN_SYNC_AND_AND_FETCH_8:
1158 case BUILT_IN_SYNC_AND_AND_FETCH_16:
1160 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1161 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1162 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1163 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1164 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1166 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1167 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1168 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1169 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1171 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1172 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1173 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1174 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1175 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1177 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1178 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1179 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1180 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1181 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1183 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1184 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1185 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1186 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1187 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1189 case BUILT_IN_SYNC_LOCK_RELEASE_1:
1190 case BUILT_IN_SYNC_LOCK_RELEASE_2:
1191 case BUILT_IN_SYNC_LOCK_RELEASE_4:
1192 case BUILT_IN_SYNC_LOCK_RELEASE_8:
1193 case BUILT_IN_SYNC_LOCK_RELEASE_16:
1195 case BUILT_IN_ATOMIC_TEST_AND_SET:
1196 case BUILT_IN_ATOMIC_CLEAR:
1197 case BUILT_IN_ATOMIC_EXCHANGE:
1198 case BUILT_IN_ATOMIC_EXCHANGE_1:
1199 case BUILT_IN_ATOMIC_EXCHANGE_2:
1200 case BUILT_IN_ATOMIC_EXCHANGE_4:
1201 case BUILT_IN_ATOMIC_EXCHANGE_8:
1202 case BUILT_IN_ATOMIC_EXCHANGE_16:
1204 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1205 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1206 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1207 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1208 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1209 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1211 case BUILT_IN_ATOMIC_STORE:
1212 case BUILT_IN_ATOMIC_STORE_1:
1213 case BUILT_IN_ATOMIC_STORE_2:
1214 case BUILT_IN_ATOMIC_STORE_4:
1215 case BUILT_IN_ATOMIC_STORE_8:
1216 case BUILT_IN_ATOMIC_STORE_16:
1218 case BUILT_IN_ATOMIC_ADD_FETCH_1:
1219 case BUILT_IN_ATOMIC_ADD_FETCH_2:
1220 case BUILT_IN_ATOMIC_ADD_FETCH_4:
1221 case BUILT_IN_ATOMIC_ADD_FETCH_8:
1222 case BUILT_IN_ATOMIC_ADD_FETCH_16:
1224 case BUILT_IN_ATOMIC_SUB_FETCH_1:
1225 case BUILT_IN_ATOMIC_SUB_FETCH_2:
1226 case BUILT_IN_ATOMIC_SUB_FETCH_4:
1227 case BUILT_IN_ATOMIC_SUB_FETCH_8:
1228 case BUILT_IN_ATOMIC_SUB_FETCH_16:
1230 case BUILT_IN_ATOMIC_AND_FETCH_1:
1231 case BUILT_IN_ATOMIC_AND_FETCH_2:
1232 case BUILT_IN_ATOMIC_AND_FETCH_4:
1233 case BUILT_IN_ATOMIC_AND_FETCH_8:
1234 case BUILT_IN_ATOMIC_AND_FETCH_16:
1236 case BUILT_IN_ATOMIC_NAND_FETCH_1:
1237 case BUILT_IN_ATOMIC_NAND_FETCH_2:
1238 case BUILT_IN_ATOMIC_NAND_FETCH_4:
1239 case BUILT_IN_ATOMIC_NAND_FETCH_8:
1240 case BUILT_IN_ATOMIC_NAND_FETCH_16:
1242 case BUILT_IN_ATOMIC_XOR_FETCH_1:
1243 case BUILT_IN_ATOMIC_XOR_FETCH_2:
1244 case BUILT_IN_ATOMIC_XOR_FETCH_4:
1245 case BUILT_IN_ATOMIC_XOR_FETCH_8:
1246 case BUILT_IN_ATOMIC_XOR_FETCH_16:
1248 case BUILT_IN_ATOMIC_OR_FETCH_1:
1249 case BUILT_IN_ATOMIC_OR_FETCH_2:
1250 case BUILT_IN_ATOMIC_OR_FETCH_4:
1251 case BUILT_IN_ATOMIC_OR_FETCH_8:
1252 case BUILT_IN_ATOMIC_OR_FETCH_16:
1254 case BUILT_IN_ATOMIC_FETCH_ADD_1:
1255 case BUILT_IN_ATOMIC_FETCH_ADD_2:
1256 case BUILT_IN_ATOMIC_FETCH_ADD_4:
1257 case BUILT_IN_ATOMIC_FETCH_ADD_8:
1258 case BUILT_IN_ATOMIC_FETCH_ADD_16:
1260 case BUILT_IN_ATOMIC_FETCH_SUB_1:
1261 case BUILT_IN_ATOMIC_FETCH_SUB_2:
1262 case BUILT_IN_ATOMIC_FETCH_SUB_4:
1263 case BUILT_IN_ATOMIC_FETCH_SUB_8:
1264 case BUILT_IN_ATOMIC_FETCH_SUB_16:
1266 case BUILT_IN_ATOMIC_FETCH_AND_1:
1267 case BUILT_IN_ATOMIC_FETCH_AND_2:
1268 case BUILT_IN_ATOMIC_FETCH_AND_4:
1269 case BUILT_IN_ATOMIC_FETCH_AND_8:
1270 case BUILT_IN_ATOMIC_FETCH_AND_16:
1272 case BUILT_IN_ATOMIC_FETCH_NAND_1:
1273 case BUILT_IN_ATOMIC_FETCH_NAND_2:
1274 case BUILT_IN_ATOMIC_FETCH_NAND_4:
1275 case BUILT_IN_ATOMIC_FETCH_NAND_8:
1276 case BUILT_IN_ATOMIC_FETCH_NAND_16:
1278 case BUILT_IN_ATOMIC_FETCH_XOR_1:
1279 case BUILT_IN_ATOMIC_FETCH_XOR_2:
1280 case BUILT_IN_ATOMIC_FETCH_XOR_4:
1281 case BUILT_IN_ATOMIC_FETCH_XOR_8:
1282 case BUILT_IN_ATOMIC_FETCH_XOR_16:
1284 case BUILT_IN_ATOMIC_FETCH_OR_1:
1285 case BUILT_IN_ATOMIC_FETCH_OR_2:
1286 case BUILT_IN_ATOMIC_FETCH_OR_4:
1287 case BUILT_IN_ATOMIC_FETCH_OR_8:
1288 case BUILT_IN_ATOMIC_FETCH_OR_16:
1290 dest = gimple_call_arg (call, 0);
1291 /* So DEST represents the address of a memory location.
1292 instrument_derefs wants the memory location, so lets
1293 dereference the address DEST before handing it to
1294 instrument_derefs. */
1295 if (TREE_CODE (dest) == ADDR_EXPR)
1296 dest = TREE_OPERAND (dest, 0);
1297 else if (TREE_CODE (dest) == SSA_NAME)
1298 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
1299 dest, build_int_cst (TREE_TYPE (dest), 0));
1300 else
1301 gcc_unreachable ();
1303 instrument_derefs (iter, dest, loc, is_store);
1304 return false;
1307 default:
1308 /* The other builtins memory access are not instrumented in this
1309 function because they either don't have any length parameter,
1310 or their length parameter is just a limit. */
1311 break;
1314 if (len != NULL_TREE)
1316 if (source0 != NULL_TREE)
1317 instrument_mem_region_access (source0, len, iter,
1318 loc, /*is_store=*/false);
1319 if (source1 != NULL_TREE)
1320 instrument_mem_region_access (source1, len, iter,
1321 loc, /*is_store=*/false);
1322 else if (dest != NULL_TREE)
1323 instrument_mem_region_access (dest, len, iter,
1324 loc, /*is_store=*/true);
1326 *iter = gsi_for_stmt (call);
1327 return false;
1329 return false;
1332 /* Instrument the assignment statement ITER if it is subject to
1333 instrumentation. */
1335 static void
1336 instrument_assignment (gimple_stmt_iterator *iter)
1338 gimple s = gsi_stmt (*iter);
1340 gcc_assert (gimple_assign_single_p (s));
1342 instrument_derefs (iter, gimple_assign_lhs (s),
1343 gimple_location (s), true);
1344 instrument_derefs (iter, gimple_assign_rhs1 (s),
1345 gimple_location (s), false);
1348 /* Instrument the function call pointed to by the iterator ITER, if it
1349 is subject to instrumentation. At the moment, the only function
1350 calls that are instrumented are some built-in functions that access
1351 memory. Look at instrument_builtin_call to learn more.
1353 Upon completion return TRUE iff *ITER was advanced to the statement
1354 following the one it was originally pointing to. */
1356 static bool
1357 maybe_instrument_call (gimple_stmt_iterator *iter)
1359 if (is_gimple_builtin_call (gsi_stmt (*iter)))
1360 return instrument_builtin_call (iter);
1361 return false;
1364 /* asan: this looks too complex. Can this be done simpler? */
1365 /* Transform
1366 1) Memory references.
1367 2) BUILTIN_ALLOCA calls.
1370 static void
1371 transform_statements (void)
1373 basic_block bb;
1374 gimple_stmt_iterator i;
1375 int saved_last_basic_block = last_basic_block;
1377 FOR_EACH_BB (bb)
1379 if (bb->index >= saved_last_basic_block) continue;
1380 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1382 gimple s = gsi_stmt (i);
1384 if (gimple_assign_single_p (s))
1385 instrument_assignment (&i);
1386 else if (is_gimple_call (s))
1388 if (maybe_instrument_call (&i))
1389 /* Avoid gsi_next (&i), because maybe_instrument_call
1390 advanced the I iterator already. */
1391 continue;
1393 gsi_next (&i);
1398 /* Build
1399 struct __asan_global
1401 const void *__beg;
1402 uptr __size;
1403 uptr __size_with_redzone;
1404 const void *__name;
1405 uptr __has_dynamic_init;
1406 } type. */
1408 static tree
1409 asan_global_struct (void)
1411 static const char *field_names[5]
1412 = { "__beg", "__size", "__size_with_redzone",
1413 "__name", "__has_dynamic_init" };
1414 tree fields[5], ret;
1415 int i;
1417 ret = make_node (RECORD_TYPE);
1418 for (i = 0; i < 5; i++)
1420 fields[i]
1421 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1422 get_identifier (field_names[i]),
1423 (i == 0 || i == 3) ? const_ptr_type_node
1424 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1425 DECL_CONTEXT (fields[i]) = ret;
1426 if (i)
1427 DECL_CHAIN (fields[i - 1]) = fields[i];
1429 TYPE_FIELDS (ret) = fields[0];
1430 TYPE_NAME (ret) = get_identifier ("__asan_global");
1431 layout_type (ret);
1432 return ret;
1435 /* Append description of a single global DECL into vector V.
1436 TYPE is __asan_global struct type as returned by asan_global_struct. */
1438 static void
1439 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1441 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1442 unsigned HOST_WIDE_INT size;
1443 tree str_cst, refdecl = decl;
1444 vec<constructor_elt, va_gc> *vinner = NULL;
1446 if (!asan_pp_initialized)
1447 asan_pp_initialize ();
1449 pp_clear_output_area (&asan_pp);
1450 if (DECL_NAME (decl))
1451 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
1452 else
1453 pp_string (&asan_pp, "<unknown>");
1454 pp_space (&asan_pp);
1455 pp_left_paren (&asan_pp);
1456 pp_string (&asan_pp, main_input_filename);
1457 pp_right_paren (&asan_pp);
1458 str_cst = asan_pp_string ();
1460 if (asan_needs_local_alias (decl))
1462 char buf[20];
1463 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
1464 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1465 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1466 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
1467 TREE_READONLY (refdecl) = TREE_READONLY (decl);
1468 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
1469 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
1470 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
1471 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
1472 TREE_STATIC (refdecl) = 1;
1473 TREE_PUBLIC (refdecl) = 0;
1474 TREE_USED (refdecl) = 1;
1475 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
1478 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1479 fold_convert (const_ptr_type_node,
1480 build_fold_addr_expr (refdecl)));
1481 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
1482 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1483 size += asan_red_zone_size (size);
1484 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1485 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1486 fold_convert (const_ptr_type_node, str_cst));
1487 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
1488 init = build_constructor (type, vinner);
1489 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
1492 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
1493 invoke ggc_collect. */
1494 static GTY(()) tree asan_ctor_statements;
1496 /* Module-level instrumentation.
1497 - Insert __asan_init() into the list of CTORs.
1498 - TODO: insert redzones around globals.
1501 void
1502 asan_finish_file (void)
1504 struct varpool_node *vnode;
1505 unsigned HOST_WIDE_INT gcount = 0;
1507 append_to_statement_list (build_call_expr (asan_init_func (), 0),
1508 &asan_ctor_statements);
1509 FOR_EACH_DEFINED_VARIABLE (vnode)
1510 if (asan_protect_global (vnode->symbol.decl))
1511 ++gcount;
1512 if (gcount)
1514 tree type = asan_global_struct (), var, ctor, decl;
1515 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
1516 tree dtor_statements = NULL_TREE;
1517 vec<constructor_elt, va_gc> *v;
1518 char buf[20];
1520 type = build_array_type_nelts (type, gcount);
1521 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
1522 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
1523 type);
1524 TREE_STATIC (var) = 1;
1525 TREE_PUBLIC (var) = 0;
1526 DECL_ARTIFICIAL (var) = 1;
1527 DECL_IGNORED_P (var) = 1;
1528 vec_alloc (v, gcount);
1529 FOR_EACH_DEFINED_VARIABLE (vnode)
1530 if (asan_protect_global (vnode->symbol.decl))
1531 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
1532 ctor = build_constructor (type, v);
1533 TREE_CONSTANT (ctor) = 1;
1534 TREE_STATIC (ctor) = 1;
1535 DECL_INITIAL (var) = ctor;
1536 varpool_assemble_decl (varpool_node_for_decl (var));
1538 type = build_function_type_list (void_type_node, ptr_type_node,
1539 uptr, NULL_TREE);
1540 decl = build_fn_decl ("__asan_register_globals", type);
1541 TREE_NOTHROW (decl) = 1;
1542 DECL_IGNORED_P (decl) = 1;
1543 append_to_statement_list (build_call_expr (decl, 2,
1544 build_fold_addr_expr (var),
1545 build_int_cst (uptr, gcount)),
1546 &asan_ctor_statements);
1548 decl = build_fn_decl ("__asan_unregister_globals", type);
1549 TREE_NOTHROW (decl) = 1;
1550 DECL_IGNORED_P (decl) = 1;
1551 append_to_statement_list (build_call_expr (decl, 2,
1552 build_fold_addr_expr (var),
1553 build_int_cst (uptr, gcount)),
1554 &dtor_statements);
1555 cgraph_build_static_cdtor ('D', dtor_statements,
1556 MAX_RESERVED_INIT_PRIORITY - 1);
1558 cgraph_build_static_cdtor ('I', asan_ctor_statements,
1559 MAX_RESERVED_INIT_PRIORITY - 1);
1562 /* Initialize shadow_ptr_types array. */
1564 static void
1565 asan_init_shadow_ptr_types (void)
1567 asan_shadow_set = new_alias_set ();
1568 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
1569 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
1570 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
1571 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
1572 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
1573 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
1576 /* Instrument the current function. */
1578 static unsigned int
1579 asan_instrument (void)
1581 if (shadow_ptr_types[0] == NULL_TREE)
1582 asan_init_shadow_ptr_types ();
1583 transform_statements ();
1584 return 0;
1587 static bool
1588 gate_asan (void)
1590 return flag_asan != 0
1591 && !lookup_attribute ("no_address_safety_analysis",
1592 DECL_ATTRIBUTES (current_function_decl));
1595 struct gimple_opt_pass pass_asan =
1598 GIMPLE_PASS,
1599 "asan", /* name */
1600 OPTGROUP_NONE, /* optinfo_flags */
1601 gate_asan, /* gate */
1602 asan_instrument, /* execute */
1603 NULL, /* sub */
1604 NULL, /* next */
1605 0, /* static_pass_number */
1606 TV_NONE, /* tv_id */
1607 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1608 0, /* properties_provided */
1609 0, /* properties_destroyed */
1610 0, /* todo_flags_start */
1611 TODO_verify_flow | TODO_verify_stmts
1612 | TODO_update_ssa /* todo_flags_finish */
1616 static bool
1617 gate_asan_O0 (void)
1619 return !optimize && gate_asan ();
1622 struct gimple_opt_pass pass_asan_O0 =
1625 GIMPLE_PASS,
1626 "asan0", /* name */
1627 OPTGROUP_NONE, /* optinfo_flags */
1628 gate_asan_O0, /* gate */
1629 asan_instrument, /* execute */
1630 NULL, /* sub */
1631 NULL, /* next */
1632 0, /* static_pass_number */
1633 TV_NONE, /* tv_id */
1634 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1635 0, /* properties_provided */
1636 0, /* properties_destroyed */
1637 0, /* todo_flags_start */
1638 TODO_verify_flow | TODO_verify_stmts
1639 | TODO_update_ssa /* todo_flags_finish */
1643 #include "gt-asan.h"