re PR c++/79790 ([C++17] ICE class template argument deduction failed)
[official-gcc.git] / gcc / tsan.c
blob2f98b936c03345321b1635fdb7df640ed1295c49
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2017 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "gimple.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "fold-const.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimplify-me.h"
37 #include "tree-cfg.h"
38 #include "tree-iterator.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-loop-ivopts.h"
41 #include "tree-eh.h"
42 #include "tsan.h"
43 #include "asan.h"
44 #include "builtins.h"
45 #include "target.h"
47 /* Number of instrumented memory accesses in the current function. */
49 /* Builds the following decl
50 void __tsan_read/writeX (void *addr); */
52 static tree
53 get_memory_access_decl (bool is_write, unsigned size)
55 enum built_in_function fcode;
57 if (size <= 1)
58 fcode = is_write ? BUILT_IN_TSAN_WRITE1
59 : BUILT_IN_TSAN_READ1;
60 else if (size <= 3)
61 fcode = is_write ? BUILT_IN_TSAN_WRITE2
62 : BUILT_IN_TSAN_READ2;
63 else if (size <= 7)
64 fcode = is_write ? BUILT_IN_TSAN_WRITE4
65 : BUILT_IN_TSAN_READ4;
66 else if (size <= 15)
67 fcode = is_write ? BUILT_IN_TSAN_WRITE8
68 : BUILT_IN_TSAN_READ8;
69 else
70 fcode = is_write ? BUILT_IN_TSAN_WRITE16
71 : BUILT_IN_TSAN_READ16;
73 return builtin_decl_implicit (fcode);
76 /* Check as to whether EXPR refers to a store to vptr. */
78 static tree
79 is_vptr_store (gimple *stmt, tree expr, bool is_write)
81 if (is_write == true
82 && gimple_assign_single_p (stmt)
83 && TREE_CODE (expr) == COMPONENT_REF)
85 tree field = TREE_OPERAND (expr, 1);
86 if (TREE_CODE (field) == FIELD_DECL
87 && DECL_VIRTUAL_P (field))
88 return gimple_assign_rhs1 (stmt);
90 return NULL;
93 /* Instruments EXPR if needed. If any instrumentation is inserted,
94 return true. */
96 static bool
97 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
99 tree base, rhs, expr_ptr, builtin_decl;
100 basic_block bb;
101 HOST_WIDE_INT size;
102 gimple *stmt, *g;
103 gimple_seq seq;
104 location_t loc;
105 unsigned int align;
107 size = int_size_in_bytes (TREE_TYPE (expr));
108 if (size <= 0)
109 return false;
111 HOST_WIDE_INT bitsize, bitpos;
112 tree offset;
113 machine_mode mode;
114 int unsignedp, reversep, volatilep = 0;
115 base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
116 &unsignedp, &reversep, &volatilep);
118 /* No need to instrument accesses to decls that don't escape,
119 they can't escape to other threads then. */
120 if (DECL_P (base) && !is_global_var (base))
122 struct pt_solution pt;
123 memset (&pt, 0, sizeof (pt));
124 pt.escaped = 1;
125 pt.ipa_escaped = flag_ipa_pta != 0;
126 if (!pt_solution_includes (&pt, base))
127 return false;
128 if (!may_be_aliased (base))
129 return false;
132 if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
133 return false;
135 stmt = gsi_stmt (gsi);
136 loc = gimple_location (stmt);
137 rhs = is_vptr_store (stmt, expr, is_write);
139 if ((TREE_CODE (expr) == COMPONENT_REF
140 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
141 || TREE_CODE (expr) == BIT_FIELD_REF)
143 base = TREE_OPERAND (expr, 0);
144 if (TREE_CODE (expr) == COMPONENT_REF)
146 expr = TREE_OPERAND (expr, 1);
147 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
148 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
149 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
150 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
151 || !tree_fits_uhwi_p (DECL_SIZE (expr)))
152 return false;
153 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
154 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
155 bitsize = tree_to_uhwi (DECL_SIZE (expr));
157 else
159 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
160 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
161 return false;
162 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
163 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
165 if (bitpos < 0 || bitsize <= 0)
166 return false;
167 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
168 / BITS_PER_UNIT;
169 if (may_be_nonaddressable_p (base))
170 return false;
171 align = get_object_alignment (base);
172 if (align < BITS_PER_UNIT)
173 return false;
174 bitpos = bitpos & ~(BITS_PER_UNIT - 1);
175 if ((align - 1) & bitpos)
177 align = (align - 1) & bitpos;
178 align = least_bit_hwi (align);
180 expr = build_fold_addr_expr (unshare_expr (base));
181 expr = build2 (MEM_REF, char_type_node, expr,
182 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
183 expr_ptr = build_fold_addr_expr (expr);
185 else
187 if (may_be_nonaddressable_p (expr))
188 return false;
189 align = get_object_alignment (expr);
190 if (align < BITS_PER_UNIT)
191 return false;
192 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
194 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
195 if ((size & (size - 1)) != 0 || size > 16
196 || align < MIN (size, 8) * BITS_PER_UNIT)
198 builtin_decl = builtin_decl_implicit (is_write
199 ? BUILT_IN_TSAN_WRITE_RANGE
200 : BUILT_IN_TSAN_READ_RANGE);
201 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
203 else if (rhs == NULL)
204 g = gimple_build_call (get_memory_access_decl (is_write, size),
205 1, expr_ptr);
206 else
208 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
209 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
211 gimple_set_location (g, loc);
212 gimple_seq_add_stmt_without_update (&seq, g);
213 /* Instrumentation for assignment of a function result
214 must be inserted after the call. Instrumentation for
215 reads of function arguments must be inserted before the call.
216 That's because the call can contain synchronization. */
217 if (is_gimple_call (stmt) && is_write)
219 /* If the call can throw, it must be the last stmt in
220 a basic block, so the instrumented stmts need to be
221 inserted in successor bbs. */
222 if (is_ctrl_altering_stmt (stmt))
224 edge e;
226 bb = gsi_bb (gsi);
227 e = find_fallthru_edge (bb->succs);
228 if (e)
229 gsi_insert_seq_on_edge_immediate (e, seq);
231 else
232 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
234 else
235 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
237 return true;
240 /* Actions for sync/atomic builtin transformations. */
241 enum tsan_atomic_action
243 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
244 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
245 bool_clear, bool_test_and_set
248 /* Table how to map sync/atomic builtins to their corresponding
249 tsan equivalents. */
250 static const struct tsan_map_atomic
252 enum built_in_function fcode, tsan_fcode;
253 enum tsan_atomic_action action;
254 enum tree_code code;
255 } tsan_atomic_table[] =
257 #define TRANSFORM(fcode, tsan_fcode, action, code) \
258 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
259 #define CHECK_LAST(fcode, tsan_fcode) \
260 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
261 #define ADD_SEQ_CST(fcode, tsan_fcode) \
262 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
263 #define ADD_ACQUIRE(fcode, tsan_fcode) \
264 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
265 #define WEAK_CAS(fcode, tsan_fcode) \
266 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
267 #define STRONG_CAS(fcode, tsan_fcode) \
268 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
269 #define BOOL_CAS(fcode, tsan_fcode) \
270 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
271 #define VAL_CAS(fcode, tsan_fcode) \
272 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
273 #define LOCK_RELEASE(fcode, tsan_fcode) \
274 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
275 #define FETCH_OP(fcode, tsan_fcode, code) \
276 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
277 #define FETCH_OPS(fcode, tsan_fcode, code) \
278 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
279 #define BOOL_CLEAR(fcode, tsan_fcode) \
280 TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
281 #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
282 TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
284 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
285 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
286 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
287 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
288 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
289 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
290 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
291 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
292 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
293 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
294 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
295 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
296 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
297 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
298 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
299 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
300 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
301 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
302 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
303 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
304 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
305 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
306 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
307 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
308 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
309 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
310 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
311 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
312 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
313 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
314 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
315 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
316 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
317 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
318 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
319 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
320 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
321 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
322 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
323 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
324 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
325 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
326 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
327 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
328 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
330 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
331 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
333 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
334 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
335 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
336 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
337 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
338 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
339 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
340 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
341 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
342 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
343 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
344 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
345 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
346 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
347 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
348 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
349 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
350 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
351 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
352 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
353 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
354 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
355 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
356 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
357 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
358 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
359 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
360 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
361 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
362 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
364 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
365 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
366 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
367 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
368 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
370 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
371 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
372 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
373 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
374 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
375 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
376 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
377 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
378 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
379 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
380 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
381 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
382 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
383 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
384 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
385 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
386 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
387 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
388 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
389 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
390 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
391 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
392 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
393 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
394 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
395 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
396 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
397 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
398 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
399 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
401 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
403 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
404 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
405 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
406 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
407 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
408 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
409 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
410 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
411 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
412 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
413 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
414 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
415 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
416 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
417 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
418 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
419 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
420 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
421 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
422 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
423 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
424 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
425 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
426 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
427 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
428 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
429 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
430 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
431 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
432 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
434 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
435 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
436 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
437 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
438 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
440 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
441 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
442 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
443 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
444 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
445 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
446 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
447 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
448 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
450 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
451 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
452 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
453 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
454 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
455 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
456 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
457 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
458 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
459 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
461 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
462 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
463 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
464 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
465 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
466 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
468 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
469 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
470 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
471 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
472 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),
474 BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),
476 BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
479 /* Instrument an atomic builtin. */
481 static void
482 instrument_builtin_call (gimple_stmt_iterator *gsi)
484 gimple *stmt = gsi_stmt (*gsi), *g;
485 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
486 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
487 unsigned int i, num = gimple_call_num_args (stmt), j;
488 for (j = 0; j < 6 && j < num; j++)
489 args[j] = gimple_call_arg (stmt, j);
490 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
491 if (fcode != tsan_atomic_table[i].fcode)
492 continue;
493 else
495 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
496 if (decl == NULL_TREE)
497 return;
498 switch (tsan_atomic_table[i].action)
500 case check_last:
501 case fetch_op:
502 last_arg = gimple_call_arg (stmt, num - 1);
503 if (tree_fits_uhwi_p (last_arg)
504 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
505 return;
506 gimple_call_set_fndecl (stmt, decl);
507 update_stmt (stmt);
508 maybe_clean_eh_stmt (stmt);
509 if (tsan_atomic_table[i].action == fetch_op)
511 args[1] = gimple_call_arg (stmt, 1);
512 goto adjust_result;
514 return;
515 case add_seq_cst:
516 case add_acquire:
517 case fetch_op_seq_cst:
518 gcc_assert (num <= 2);
519 for (j = 0; j < num; j++)
520 args[j] = gimple_call_arg (stmt, j);
521 for (; j < 2; j++)
522 args[j] = NULL_TREE;
523 args[num] = build_int_cst (NULL_TREE,
524 tsan_atomic_table[i].action
525 != add_acquire
526 ? MEMMODEL_SEQ_CST
527 : MEMMODEL_ACQUIRE);
528 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
529 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
530 stmt = gsi_stmt (*gsi);
531 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
533 adjust_result:
534 lhs = gimple_call_lhs (stmt);
535 if (lhs == NULL_TREE)
536 return;
537 if (!useless_type_conversion_p (TREE_TYPE (lhs),
538 TREE_TYPE (args[1])))
540 tree var = make_ssa_name (TREE_TYPE (lhs));
541 g = gimple_build_assign (var, NOP_EXPR, args[1]);
542 gsi_insert_after (gsi, g, GSI_NEW_STMT);
543 args[1] = var;
545 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
546 /* BIT_NOT_EXPR stands for NAND. */
547 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
549 tree var = make_ssa_name (TREE_TYPE (lhs));
550 g = gimple_build_assign (var, BIT_AND_EXPR,
551 gimple_call_lhs (stmt), args[1]);
552 gsi_insert_after (gsi, g, GSI_NEW_STMT);
553 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
555 else
556 g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
557 gimple_call_lhs (stmt), args[1]);
558 update_stmt (stmt);
559 gsi_insert_after (gsi, g, GSI_NEW_STMT);
561 return;
562 case weak_cas:
563 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
564 continue;
565 /* FALLTHRU */
566 case strong_cas:
567 gcc_assert (num == 6);
568 for (j = 0; j < 6; j++)
569 args[j] = gimple_call_arg (stmt, j);
570 if (tree_fits_uhwi_p (args[4])
571 && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
572 return;
573 if (tree_fits_uhwi_p (args[5])
574 && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
575 return;
576 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
577 args[4], args[5]);
578 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
579 return;
580 case bool_cas:
581 case val_cas:
582 gcc_assert (num == 3);
583 for (j = 0; j < 3; j++)
584 args[j] = gimple_call_arg (stmt, j);
585 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
586 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
587 t = create_tmp_var (t);
588 mark_addressable (t);
589 if (!useless_type_conversion_p (TREE_TYPE (t),
590 TREE_TYPE (args[1])))
592 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
593 NOP_EXPR, args[1]);
594 gsi_insert_before (gsi, g, GSI_SAME_STMT);
595 args[1] = gimple_assign_lhs (g);
597 g = gimple_build_assign (t, args[1]);
598 gsi_insert_before (gsi, g, GSI_SAME_STMT);
599 lhs = gimple_call_lhs (stmt);
600 update_gimple_call (gsi, decl, 5, args[0],
601 build_fold_addr_expr (t), args[2],
602 build_int_cst (NULL_TREE,
603 MEMMODEL_SEQ_CST),
604 build_int_cst (NULL_TREE,
605 MEMMODEL_SEQ_CST));
606 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
607 if (tsan_atomic_table[i].action == val_cas && lhs)
609 tree cond;
610 stmt = gsi_stmt (*gsi);
611 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
612 gsi_insert_after (gsi, g, GSI_NEW_STMT);
613 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
614 cond = build2 (NE_EXPR, boolean_type_node, t,
615 build_int_cst (TREE_TYPE (t), 0));
616 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
617 gimple_assign_lhs (g));
618 gimple_call_set_lhs (stmt, t);
619 update_stmt (stmt);
620 gsi_insert_after (gsi, g, GSI_NEW_STMT);
622 return;
623 case lock_release:
624 gcc_assert (num == 1);
625 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
626 t = TREE_VALUE (TREE_CHAIN (t));
627 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
628 build_int_cst (t, 0),
629 build_int_cst (NULL_TREE,
630 MEMMODEL_RELEASE));
631 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
632 return;
633 case bool_clear:
634 case bool_test_and_set:
635 if (BOOL_TYPE_SIZE != 8)
637 decl = NULL_TREE;
638 for (j = 1; j < 5; j++)
639 if (BOOL_TYPE_SIZE == (8 << j))
641 enum built_in_function tsan_fcode
642 = (enum built_in_function)
643 (tsan_atomic_table[i].tsan_fcode + j);
644 decl = builtin_decl_implicit (tsan_fcode);
645 break;
647 if (decl == NULL_TREE)
648 return;
650 last_arg = gimple_call_arg (stmt, num - 1);
651 if (tree_fits_uhwi_p (last_arg)
652 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
653 return;
654 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
655 t = TREE_VALUE (TREE_CHAIN (t));
656 if (tsan_atomic_table[i].action == bool_clear)
658 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
659 build_int_cst (t, 0), last_arg);
660 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
661 return;
663 t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
664 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
665 t, last_arg);
666 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
667 stmt = gsi_stmt (*gsi);
668 lhs = gimple_call_lhs (stmt);
669 if (lhs == NULL_TREE)
670 return;
671 if (targetm.atomic_test_and_set_trueval != 1
672 || !useless_type_conversion_p (TREE_TYPE (lhs),
673 TREE_TYPE (t)))
675 tree new_lhs = make_ssa_name (TREE_TYPE (t));
676 gimple_call_set_lhs (stmt, new_lhs);
677 if (targetm.atomic_test_and_set_trueval != 1)
678 g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
679 build_int_cst (TREE_TYPE (t), 0));
680 else
681 g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
682 gsi_insert_after (gsi, g, GSI_NEW_STMT);
683 update_stmt (stmt);
685 return;
686 default:
687 continue;
692 /* Instruments the gimple pointed to by GSI. Return
693 true if func entry/exit should be instrumented. */
695 static bool
696 instrument_gimple (gimple_stmt_iterator *gsi)
698 gimple *stmt;
699 tree rhs, lhs;
700 bool instrumented = false;
702 stmt = gsi_stmt (*gsi);
703 if (is_gimple_call (stmt)
704 && (gimple_call_fndecl (stmt)
705 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
707 /* All functions with function call will have exit instrumented,
708 therefore no function calls other than __tsan_func_exit
709 shall appear in the functions. */
710 gimple_call_set_tail (as_a <gcall *> (stmt), false);
711 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
712 instrument_builtin_call (gsi);
713 return true;
715 else if (is_gimple_assign (stmt)
716 && !gimple_clobber_p (stmt))
718 if (gimple_store_p (stmt))
720 lhs = gimple_assign_lhs (stmt);
721 instrumented = instrument_expr (*gsi, lhs, true);
723 if (gimple_assign_load_p (stmt))
725 rhs = gimple_assign_rhs1 (stmt);
726 instrumented = instrument_expr (*gsi, rhs, false);
729 return instrumented;
732 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
734 static void
735 replace_func_exit (gimple *stmt)
737 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
738 gimple *g = gimple_build_call (builtin_decl, 0);
739 gimple_set_location (g, cfun->function_end_locus);
740 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
741 gsi_replace (&gsi, g, true);
744 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
746 static void
747 instrument_func_exit (void)
749 location_t loc;
750 basic_block exit_bb;
751 gimple_stmt_iterator gsi;
752 gimple *stmt, *g;
753 tree builtin_decl;
754 edge e;
755 edge_iterator ei;
757 /* Find all function exits. */
758 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
759 FOR_EACH_EDGE (e, ei, exit_bb->preds)
761 gsi = gsi_last_bb (e->src);
762 stmt = gsi_stmt (gsi);
763 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
764 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
765 loc = gimple_location (stmt);
766 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
767 g = gimple_build_call (builtin_decl, 0);
768 gimple_set_location (g, loc);
769 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
773 /* Instruments all interesting memory accesses in the current function.
774 Return true if func entry/exit should be instrumented. */
776 static bool
777 instrument_memory_accesses (bool *cfg_changed)
779 basic_block bb;
780 gimple_stmt_iterator gsi;
781 bool fentry_exit_instrument = false;
782 bool func_exit_seen = false;
783 auto_vec<gimple *> tsan_func_exits;
785 FOR_EACH_BB_FN (bb, cfun)
787 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
789 gimple *stmt = gsi_stmt (gsi);
790 if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
792 if (fentry_exit_instrument)
793 replace_func_exit (stmt);
794 else
795 tsan_func_exits.safe_push (stmt);
796 func_exit_seen = true;
798 else
799 fentry_exit_instrument |= instrument_gimple (&gsi);
801 if (gimple_purge_dead_eh_edges (bb))
802 *cfg_changed = true;
804 unsigned int i;
805 gimple *stmt;
806 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
807 if (fentry_exit_instrument)
808 replace_func_exit (stmt);
809 else
811 gsi = gsi_for_stmt (stmt);
812 gsi_remove (&gsi, true);
814 if (fentry_exit_instrument && !func_exit_seen)
815 instrument_func_exit ();
816 return fentry_exit_instrument;
819 /* Instruments function entry. */
821 static void
822 instrument_func_entry (void)
824 tree ret_addr, builtin_decl;
825 gimple *g;
826 gimple_seq seq = NULL;
828 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
829 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
830 ret_addr = make_ssa_name (ptr_type_node);
831 gimple_call_set_lhs (g, ret_addr);
832 gimple_set_location (g, cfun->function_start_locus);
833 gimple_seq_add_stmt_without_update (&seq, g);
835 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
836 g = gimple_build_call (builtin_decl, 1, ret_addr);
837 gimple_set_location (g, cfun->function_start_locus);
838 gimple_seq_add_stmt_without_update (&seq, g);
840 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
841 gsi_insert_seq_on_edge_immediate (e, seq);
844 /* ThreadSanitizer instrumentation pass. */
846 static unsigned
847 tsan_pass (void)
849 initialize_sanitizer_builtins ();
850 bool cfg_changed = false;
851 if (instrument_memory_accesses (&cfg_changed))
852 instrument_func_entry ();
853 return cfg_changed ? TODO_cleanup_cfg : 0;
856 /* Inserts __tsan_init () into the list of CTORs. */
858 void
859 tsan_finish_file (void)
861 tree ctor_statements = NULL_TREE;
863 initialize_sanitizer_builtins ();
864 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
865 append_to_statement_list (build_call_expr (init_decl, 0),
866 &ctor_statements);
867 cgraph_build_static_cdtor ('I', ctor_statements,
868 MAX_RESERVED_INIT_PRIORITY - 1);
871 /* The pass descriptor. */
873 namespace {
875 const pass_data pass_data_tsan =
877 GIMPLE_PASS, /* type */
878 "tsan", /* name */
879 OPTGROUP_NONE, /* optinfo_flags */
880 TV_NONE, /* tv_id */
881 ( PROP_ssa | PROP_cfg ), /* properties_required */
882 0, /* properties_provided */
883 0, /* properties_destroyed */
884 0, /* todo_flags_start */
885 TODO_update_ssa, /* todo_flags_finish */
888 class pass_tsan : public gimple_opt_pass
890 public:
891 pass_tsan (gcc::context *ctxt)
892 : gimple_opt_pass (pass_data_tsan, ctxt)
895 /* opt_pass methods: */
896 opt_pass * clone () { return new pass_tsan (m_ctxt); }
897 virtual bool gate (function *)
899 return sanitize_flags_p (SANITIZE_THREAD);
902 virtual unsigned int execute (function *) { return tsan_pass (); }
904 }; // class pass_tsan
906 } // anon namespace
908 gimple_opt_pass *
909 make_pass_tsan (gcc::context *ctxt)
911 return new pass_tsan (ctxt);
914 namespace {
916 const pass_data pass_data_tsan_O0 =
918 GIMPLE_PASS, /* type */
919 "tsan0", /* name */
920 OPTGROUP_NONE, /* optinfo_flags */
921 TV_NONE, /* tv_id */
922 ( PROP_ssa | PROP_cfg ), /* properties_required */
923 0, /* properties_provided */
924 0, /* properties_destroyed */
925 0, /* todo_flags_start */
926 TODO_update_ssa, /* todo_flags_finish */
929 class pass_tsan_O0 : public gimple_opt_pass
931 public:
932 pass_tsan_O0 (gcc::context *ctxt)
933 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
936 /* opt_pass methods: */
937 virtual bool gate (function *)
939 return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
942 virtual unsigned int execute (function *) { return tsan_pass (); }
944 }; // class pass_tsan_O0
946 } // anon namespace
948 gimple_opt_pass *
949 make_pass_tsan_O0 (gcc::context *ctxt)
951 return new pass_tsan_O0 (ctxt);