PR inline-asm/84941
[official-gcc.git] / gcc / tsan.c
blob67b111dfc39ec765477ed9da586b04dbfdb2a507
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2018 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "gimple.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "fold-const.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimplify-me.h"
37 #include "tree-cfg.h"
38 #include "tree-iterator.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-loop-ivopts.h"
41 #include "tree-eh.h"
42 #include "tsan.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "asan.h"
46 #include "builtins.h"
47 #include "target.h"
49 /* Number of instrumented memory accesses in the current function. */
51 /* Builds the following decl
52 void __tsan_read/writeX (void *addr); */
54 static tree
55 get_memory_access_decl (bool is_write, unsigned size)
57 enum built_in_function fcode;
59 if (size <= 1)
60 fcode = is_write ? BUILT_IN_TSAN_WRITE1
61 : BUILT_IN_TSAN_READ1;
62 else if (size <= 3)
63 fcode = is_write ? BUILT_IN_TSAN_WRITE2
64 : BUILT_IN_TSAN_READ2;
65 else if (size <= 7)
66 fcode = is_write ? BUILT_IN_TSAN_WRITE4
67 : BUILT_IN_TSAN_READ4;
68 else if (size <= 15)
69 fcode = is_write ? BUILT_IN_TSAN_WRITE8
70 : BUILT_IN_TSAN_READ8;
71 else
72 fcode = is_write ? BUILT_IN_TSAN_WRITE16
73 : BUILT_IN_TSAN_READ16;
75 return builtin_decl_implicit (fcode);
78 /* Check as to whether EXPR refers to a store to vptr. */
80 static tree
81 is_vptr_store (gimple *stmt, tree expr, bool is_write)
83 if (is_write == true
84 && gimple_assign_single_p (stmt)
85 && TREE_CODE (expr) == COMPONENT_REF)
87 tree field = TREE_OPERAND (expr, 1);
88 if (TREE_CODE (field) == FIELD_DECL
89 && DECL_VIRTUAL_P (field))
90 return gimple_assign_rhs1 (stmt);
92 return NULL;
95 /* Instruments EXPR if needed. If any instrumentation is inserted,
96 return true. */
98 static bool
99 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
101 tree base, rhs, expr_ptr, builtin_decl;
102 basic_block bb;
103 HOST_WIDE_INT size;
104 gimple *stmt, *g;
105 gimple_seq seq;
106 location_t loc;
107 unsigned int align;
109 size = int_size_in_bytes (TREE_TYPE (expr));
110 if (size <= 0)
111 return false;
113 poly_int64 unused_bitsize, unused_bitpos;
114 tree offset;
115 machine_mode mode;
116 int unsignedp, reversep, volatilep = 0;
117 base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset,
118 &mode, &unsignedp, &reversep, &volatilep);
120 /* No need to instrument accesses to decls that don't escape,
121 they can't escape to other threads then. */
122 if (DECL_P (base) && !is_global_var (base))
124 struct pt_solution pt;
125 memset (&pt, 0, sizeof (pt));
126 pt.escaped = 1;
127 pt.ipa_escaped = flag_ipa_pta != 0;
128 if (!pt_solution_includes (&pt, base))
129 return false;
130 if (!may_be_aliased (base))
131 return false;
134 if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
135 return false;
137 stmt = gsi_stmt (gsi);
138 loc = gimple_location (stmt);
139 rhs = is_vptr_store (stmt, expr, is_write);
141 if ((TREE_CODE (expr) == COMPONENT_REF
142 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
143 || TREE_CODE (expr) == BIT_FIELD_REF)
145 HOST_WIDE_INT bitpos, bitsize;
146 base = TREE_OPERAND (expr, 0);
147 if (TREE_CODE (expr) == COMPONENT_REF)
149 expr = TREE_OPERAND (expr, 1);
150 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
151 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
152 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
153 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
154 || !tree_fits_uhwi_p (DECL_SIZE (expr)))
155 return false;
156 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
157 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
158 bitsize = tree_to_uhwi (DECL_SIZE (expr));
160 else
162 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
163 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
164 return false;
165 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
166 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
168 if (bitpos < 0 || bitsize <= 0)
169 return false;
170 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
171 / BITS_PER_UNIT;
172 if (may_be_nonaddressable_p (base))
173 return false;
174 align = get_object_alignment (base);
175 if (align < BITS_PER_UNIT)
176 return false;
177 bitpos = bitpos & ~(BITS_PER_UNIT - 1);
178 if ((align - 1) & bitpos)
180 align = (align - 1) & bitpos;
181 align = least_bit_hwi (align);
183 expr = build_fold_addr_expr (unshare_expr (base));
184 expr = build2 (MEM_REF, char_type_node, expr,
185 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
186 expr_ptr = build_fold_addr_expr (expr);
188 else
190 if (may_be_nonaddressable_p (expr))
191 return false;
192 align = get_object_alignment (expr);
193 if (align < BITS_PER_UNIT)
194 return false;
195 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
197 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
198 if ((size & (size - 1)) != 0 || size > 16
199 || align < MIN (size, 8) * BITS_PER_UNIT)
201 builtin_decl = builtin_decl_implicit (is_write
202 ? BUILT_IN_TSAN_WRITE_RANGE
203 : BUILT_IN_TSAN_READ_RANGE);
204 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
206 else if (rhs == NULL)
207 g = gimple_build_call (get_memory_access_decl (is_write, size),
208 1, expr_ptr);
209 else
211 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
212 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
214 gimple_set_location (g, loc);
215 gimple_seq_add_stmt_without_update (&seq, g);
216 /* Instrumentation for assignment of a function result
217 must be inserted after the call. Instrumentation for
218 reads of function arguments must be inserted before the call.
219 That's because the call can contain synchronization. */
220 if (is_gimple_call (stmt) && is_write)
222 /* If the call can throw, it must be the last stmt in
223 a basic block, so the instrumented stmts need to be
224 inserted in successor bbs. */
225 if (is_ctrl_altering_stmt (stmt))
227 edge e;
229 bb = gsi_bb (gsi);
230 e = find_fallthru_edge (bb->succs);
231 if (e)
232 gsi_insert_seq_on_edge_immediate (e, seq);
234 else
235 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
237 else
238 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
240 return true;
243 /* Actions for sync/atomic builtin transformations. */
244 enum tsan_atomic_action
246 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
247 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
248 bool_clear, bool_test_and_set
251 /* Table how to map sync/atomic builtins to their corresponding
252 tsan equivalents. */
253 static const struct tsan_map_atomic
255 enum built_in_function fcode, tsan_fcode;
256 enum tsan_atomic_action action;
257 enum tree_code code;
258 } tsan_atomic_table[] =
260 #define TRANSFORM(fcode, tsan_fcode, action, code) \
261 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
262 #define CHECK_LAST(fcode, tsan_fcode) \
263 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
264 #define ADD_SEQ_CST(fcode, tsan_fcode) \
265 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
266 #define ADD_ACQUIRE(fcode, tsan_fcode) \
267 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
268 #define WEAK_CAS(fcode, tsan_fcode) \
269 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
270 #define STRONG_CAS(fcode, tsan_fcode) \
271 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
272 #define BOOL_CAS(fcode, tsan_fcode) \
273 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
274 #define VAL_CAS(fcode, tsan_fcode) \
275 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
276 #define LOCK_RELEASE(fcode, tsan_fcode) \
277 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
278 #define FETCH_OP(fcode, tsan_fcode, code) \
279 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
280 #define FETCH_OPS(fcode, tsan_fcode, code) \
281 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
282 #define BOOL_CLEAR(fcode, tsan_fcode) \
283 TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
284 #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
285 TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
287 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
288 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
289 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
290 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
291 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
292 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
293 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
294 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
295 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
296 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
297 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
298 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
299 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
300 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
301 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
302 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
303 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
304 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
305 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
306 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
307 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
308 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
309 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
310 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
311 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
312 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
313 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
314 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
315 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
316 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
317 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
318 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
319 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
320 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
321 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
322 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
323 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
324 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
325 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
326 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
327 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
328 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
329 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
330 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
331 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
333 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
334 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
336 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
337 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
338 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
339 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
340 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
341 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
342 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
343 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
344 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
345 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
346 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
347 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
348 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
349 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
350 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
351 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
352 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
353 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
354 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
355 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
356 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
357 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
358 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
359 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
360 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
361 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
362 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
363 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
364 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
365 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
367 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
368 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
369 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
370 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
371 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
373 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
374 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
375 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
376 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
377 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
378 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
379 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
380 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
381 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
382 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
383 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
384 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
385 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
386 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
387 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
388 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
389 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
390 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
391 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
392 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
393 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
394 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
395 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
396 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
397 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
398 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
399 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
400 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
401 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
402 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
404 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
406 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
407 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
408 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
409 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
410 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
411 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
412 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
413 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
414 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
415 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
416 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
417 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
418 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
419 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
420 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
421 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
422 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
423 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
424 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
425 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
426 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
427 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
428 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
429 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
430 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
431 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
432 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
433 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
434 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
435 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
437 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
438 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
439 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
440 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
441 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
443 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
444 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
445 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
446 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
447 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
448 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
449 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
450 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
451 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
453 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
454 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
455 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
456 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
457 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
458 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
459 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
460 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
461 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
462 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
464 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
465 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
466 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
467 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
468 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
469 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
471 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
472 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
473 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
474 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
475 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),
477 BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),
479 BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
482 /* Instrument an atomic builtin. */
484 static void
485 instrument_builtin_call (gimple_stmt_iterator *gsi)
487 gimple *stmt = gsi_stmt (*gsi), *g;
488 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
489 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
490 unsigned int i, num = gimple_call_num_args (stmt), j;
491 for (j = 0; j < 6 && j < num; j++)
492 args[j] = gimple_call_arg (stmt, j);
493 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
494 if (fcode != tsan_atomic_table[i].fcode)
495 continue;
496 else
498 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
499 if (decl == NULL_TREE)
500 return;
501 switch (tsan_atomic_table[i].action)
503 case check_last:
504 case fetch_op:
505 last_arg = gimple_call_arg (stmt, num - 1);
506 if (tree_fits_uhwi_p (last_arg)
507 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
508 return;
509 gimple_call_set_fndecl (stmt, decl);
510 update_stmt (stmt);
511 maybe_clean_eh_stmt (stmt);
512 if (tsan_atomic_table[i].action == fetch_op)
514 args[1] = gimple_call_arg (stmt, 1);
515 goto adjust_result;
517 return;
518 case add_seq_cst:
519 case add_acquire:
520 case fetch_op_seq_cst:
521 gcc_assert (num <= 2);
522 for (j = 0; j < num; j++)
523 args[j] = gimple_call_arg (stmt, j);
524 for (; j < 2; j++)
525 args[j] = NULL_TREE;
526 args[num] = build_int_cst (NULL_TREE,
527 tsan_atomic_table[i].action
528 != add_acquire
529 ? MEMMODEL_SEQ_CST
530 : MEMMODEL_ACQUIRE);
531 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
532 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
533 stmt = gsi_stmt (*gsi);
534 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
536 adjust_result:
537 lhs = gimple_call_lhs (stmt);
538 if (lhs == NULL_TREE)
539 return;
540 if (!useless_type_conversion_p (TREE_TYPE (lhs),
541 TREE_TYPE (args[1])))
543 tree var = make_ssa_name (TREE_TYPE (lhs));
544 g = gimple_build_assign (var, NOP_EXPR, args[1]);
545 gsi_insert_after (gsi, g, GSI_NEW_STMT);
546 args[1] = var;
548 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
549 /* BIT_NOT_EXPR stands for NAND. */
550 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
552 tree var = make_ssa_name (TREE_TYPE (lhs));
553 g = gimple_build_assign (var, BIT_AND_EXPR,
554 gimple_call_lhs (stmt), args[1]);
555 gsi_insert_after (gsi, g, GSI_NEW_STMT);
556 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
558 else
559 g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
560 gimple_call_lhs (stmt), args[1]);
561 update_stmt (stmt);
562 gsi_insert_after (gsi, g, GSI_NEW_STMT);
564 return;
565 case weak_cas:
566 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
567 continue;
568 /* FALLTHRU */
569 case strong_cas:
570 gcc_assert (num == 6);
571 for (j = 0; j < 6; j++)
572 args[j] = gimple_call_arg (stmt, j);
573 if (tree_fits_uhwi_p (args[4])
574 && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
575 return;
576 if (tree_fits_uhwi_p (args[5])
577 && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
578 return;
579 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
580 args[4], args[5]);
581 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
582 return;
583 case bool_cas:
584 case val_cas:
585 gcc_assert (num == 3);
586 for (j = 0; j < 3; j++)
587 args[j] = gimple_call_arg (stmt, j);
588 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
589 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
590 t = create_tmp_var (t);
591 mark_addressable (t);
592 if (!useless_type_conversion_p (TREE_TYPE (t),
593 TREE_TYPE (args[1])))
595 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
596 NOP_EXPR, args[1]);
597 gsi_insert_before (gsi, g, GSI_SAME_STMT);
598 args[1] = gimple_assign_lhs (g);
600 g = gimple_build_assign (t, args[1]);
601 gsi_insert_before (gsi, g, GSI_SAME_STMT);
602 lhs = gimple_call_lhs (stmt);
603 update_gimple_call (gsi, decl, 5, args[0],
604 build_fold_addr_expr (t), args[2],
605 build_int_cst (NULL_TREE,
606 MEMMODEL_SEQ_CST),
607 build_int_cst (NULL_TREE,
608 MEMMODEL_SEQ_CST));
609 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
610 if (tsan_atomic_table[i].action == val_cas && lhs)
612 tree cond;
613 stmt = gsi_stmt (*gsi);
614 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
615 gsi_insert_after (gsi, g, GSI_NEW_STMT);
616 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
617 cond = build2 (NE_EXPR, boolean_type_node, t,
618 build_int_cst (TREE_TYPE (t), 0));
619 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
620 gimple_assign_lhs (g));
621 gimple_call_set_lhs (stmt, t);
622 update_stmt (stmt);
623 gsi_insert_after (gsi, g, GSI_NEW_STMT);
625 return;
626 case lock_release:
627 gcc_assert (num == 1);
628 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
629 t = TREE_VALUE (TREE_CHAIN (t));
630 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
631 build_int_cst (t, 0),
632 build_int_cst (NULL_TREE,
633 MEMMODEL_RELEASE));
634 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
635 return;
636 case bool_clear:
637 case bool_test_and_set:
638 if (BOOL_TYPE_SIZE != 8)
640 decl = NULL_TREE;
641 for (j = 1; j < 5; j++)
642 if (BOOL_TYPE_SIZE == (8 << j))
644 enum built_in_function tsan_fcode
645 = (enum built_in_function)
646 (tsan_atomic_table[i].tsan_fcode + j);
647 decl = builtin_decl_implicit (tsan_fcode);
648 break;
650 if (decl == NULL_TREE)
651 return;
653 last_arg = gimple_call_arg (stmt, num - 1);
654 if (tree_fits_uhwi_p (last_arg)
655 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
656 return;
657 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
658 t = TREE_VALUE (TREE_CHAIN (t));
659 if (tsan_atomic_table[i].action == bool_clear)
661 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
662 build_int_cst (t, 0), last_arg);
663 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
664 return;
666 t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
667 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
668 t, last_arg);
669 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
670 stmt = gsi_stmt (*gsi);
671 lhs = gimple_call_lhs (stmt);
672 if (lhs == NULL_TREE)
673 return;
674 if (targetm.atomic_test_and_set_trueval != 1
675 || !useless_type_conversion_p (TREE_TYPE (lhs),
676 TREE_TYPE (t)))
678 tree new_lhs = make_ssa_name (TREE_TYPE (t));
679 gimple_call_set_lhs (stmt, new_lhs);
680 if (targetm.atomic_test_and_set_trueval != 1)
681 g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
682 build_int_cst (TREE_TYPE (t), 0));
683 else
684 g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
685 gsi_insert_after (gsi, g, GSI_NEW_STMT);
686 update_stmt (stmt);
688 return;
689 default:
690 continue;
695 /* Instruments the gimple pointed to by GSI. Return
696 true if func entry/exit should be instrumented. */
698 static bool
699 instrument_gimple (gimple_stmt_iterator *gsi)
701 gimple *stmt;
702 tree rhs, lhs;
703 bool instrumented = false;
705 stmt = gsi_stmt (*gsi);
706 if (is_gimple_call (stmt)
707 && (gimple_call_fndecl (stmt)
708 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
710 /* All functions with function call will have exit instrumented,
711 therefore no function calls other than __tsan_func_exit
712 shall appear in the functions. */
713 gimple_call_set_tail (as_a <gcall *> (stmt), false);
714 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
715 instrument_builtin_call (gsi);
716 return true;
718 else if (is_gimple_assign (stmt)
719 && !gimple_clobber_p (stmt))
721 if (gimple_store_p (stmt))
723 lhs = gimple_assign_lhs (stmt);
724 instrumented = instrument_expr (*gsi, lhs, true);
726 if (gimple_assign_load_p (stmt))
728 rhs = gimple_assign_rhs1 (stmt);
729 instrumented = instrument_expr (*gsi, rhs, false);
732 return instrumented;
735 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
737 static void
738 replace_func_exit (gimple *stmt)
740 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
741 gimple *g = gimple_build_call (builtin_decl, 0);
742 gimple_set_location (g, cfun->function_end_locus);
743 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
744 gsi_replace (&gsi, g, true);
747 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
749 static void
750 instrument_func_exit (void)
752 location_t loc;
753 basic_block exit_bb;
754 gimple_stmt_iterator gsi;
755 gimple *stmt, *g;
756 tree builtin_decl;
757 edge e;
758 edge_iterator ei;
760 /* Find all function exits. */
761 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
762 FOR_EACH_EDGE (e, ei, exit_bb->preds)
764 gsi = gsi_last_bb (e->src);
765 stmt = gsi_stmt (gsi);
766 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
767 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
768 loc = gimple_location (stmt);
769 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
770 g = gimple_build_call (builtin_decl, 0);
771 gimple_set_location (g, loc);
772 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
776 /* Instruments all interesting memory accesses in the current function.
777 Return true if func entry/exit should be instrumented. */
779 static bool
780 instrument_memory_accesses (bool *cfg_changed)
782 basic_block bb;
783 gimple_stmt_iterator gsi;
784 bool fentry_exit_instrument = false;
785 bool func_exit_seen = false;
786 auto_vec<gimple *> tsan_func_exits;
788 FOR_EACH_BB_FN (bb, cfun)
790 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
792 gimple *stmt = gsi_stmt (gsi);
793 if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
795 if (fentry_exit_instrument)
796 replace_func_exit (stmt);
797 else
798 tsan_func_exits.safe_push (stmt);
799 func_exit_seen = true;
801 else
802 fentry_exit_instrument |= instrument_gimple (&gsi);
804 if (gimple_purge_dead_eh_edges (bb))
805 *cfg_changed = true;
807 unsigned int i;
808 gimple *stmt;
809 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
810 if (fentry_exit_instrument)
811 replace_func_exit (stmt);
812 else
814 gsi = gsi_for_stmt (stmt);
815 gsi_remove (&gsi, true);
817 if (fentry_exit_instrument && !func_exit_seen)
818 instrument_func_exit ();
819 return fentry_exit_instrument;
822 /* Instruments function entry. */
824 static void
825 instrument_func_entry (void)
827 tree ret_addr, builtin_decl;
828 gimple *g;
829 gimple_seq seq = NULL;
831 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
832 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
833 ret_addr = make_ssa_name (ptr_type_node);
834 gimple_call_set_lhs (g, ret_addr);
835 gimple_set_location (g, cfun->function_start_locus);
836 gimple_seq_add_stmt_without_update (&seq, g);
838 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
839 g = gimple_build_call (builtin_decl, 1, ret_addr);
840 gimple_set_location (g, cfun->function_start_locus);
841 gimple_seq_add_stmt_without_update (&seq, g);
843 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
844 gsi_insert_seq_on_edge_immediate (e, seq);
847 /* ThreadSanitizer instrumentation pass. */
849 static unsigned
850 tsan_pass (void)
852 initialize_sanitizer_builtins ();
853 bool cfg_changed = false;
854 if (instrument_memory_accesses (&cfg_changed))
855 instrument_func_entry ();
856 return cfg_changed ? TODO_cleanup_cfg : 0;
859 /* Inserts __tsan_init () into the list of CTORs. */
861 void
862 tsan_finish_file (void)
864 tree ctor_statements = NULL_TREE;
866 initialize_sanitizer_builtins ();
867 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
868 append_to_statement_list (build_call_expr (init_decl, 0),
869 &ctor_statements);
870 cgraph_build_static_cdtor ('I', ctor_statements,
871 MAX_RESERVED_INIT_PRIORITY - 1);
874 /* The pass descriptor. */
876 namespace {
878 const pass_data pass_data_tsan =
880 GIMPLE_PASS, /* type */
881 "tsan", /* name */
882 OPTGROUP_NONE, /* optinfo_flags */
883 TV_NONE, /* tv_id */
884 ( PROP_ssa | PROP_cfg ), /* properties_required */
885 0, /* properties_provided */
886 0, /* properties_destroyed */
887 0, /* todo_flags_start */
888 TODO_update_ssa, /* todo_flags_finish */
891 class pass_tsan : public gimple_opt_pass
893 public:
894 pass_tsan (gcc::context *ctxt)
895 : gimple_opt_pass (pass_data_tsan, ctxt)
898 /* opt_pass methods: */
899 opt_pass * clone () { return new pass_tsan (m_ctxt); }
900 virtual bool gate (function *)
902 return sanitize_flags_p (SANITIZE_THREAD);
905 virtual unsigned int execute (function *) { return tsan_pass (); }
907 }; // class pass_tsan
909 } // anon namespace
911 gimple_opt_pass *
912 make_pass_tsan (gcc::context *ctxt)
914 return new pass_tsan (ctxt);
917 namespace {
919 const pass_data pass_data_tsan_O0 =
921 GIMPLE_PASS, /* type */
922 "tsan0", /* name */
923 OPTGROUP_NONE, /* optinfo_flags */
924 TV_NONE, /* tv_id */
925 ( PROP_ssa | PROP_cfg ), /* properties_required */
926 0, /* properties_provided */
927 0, /* properties_destroyed */
928 0, /* todo_flags_start */
929 TODO_update_ssa, /* todo_flags_finish */
932 class pass_tsan_O0 : public gimple_opt_pass
934 public:
935 pass_tsan_O0 (gcc::context *ctxt)
936 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
939 /* opt_pass methods: */
940 virtual bool gate (function *)
942 return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
945 virtual unsigned int execute (function *) { return tsan_pass (); }
947 }; // class pass_tsan_O0
949 } // anon namespace
951 gimple_opt_pass *
952 make_pass_tsan_O0 (gcc::context *ctxt)
954 return new pass_tsan_O0 (ctxt);