Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / tsan.c
blob47764bc5689e6a8aa5844696bc612826327fb7ff
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2016 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "gimplify.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
36 #include "tree-cfg.h"
37 #include "tree-iterator.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-loop-ivopts.h"
40 #include "tsan.h"
41 #include "asan.h"
42 #include "builtins.h"
44 /* Number of instrumented memory accesses in the current function. */
46 /* Builds the following decl
47 void __tsan_read/writeX (void *addr); */
49 static tree
50 get_memory_access_decl (bool is_write, unsigned size)
52 enum built_in_function fcode;
54 if (size <= 1)
55 fcode = is_write ? BUILT_IN_TSAN_WRITE1
56 : BUILT_IN_TSAN_READ1;
57 else if (size <= 3)
58 fcode = is_write ? BUILT_IN_TSAN_WRITE2
59 : BUILT_IN_TSAN_READ2;
60 else if (size <= 7)
61 fcode = is_write ? BUILT_IN_TSAN_WRITE4
62 : BUILT_IN_TSAN_READ4;
63 else if (size <= 15)
64 fcode = is_write ? BUILT_IN_TSAN_WRITE8
65 : BUILT_IN_TSAN_READ8;
66 else
67 fcode = is_write ? BUILT_IN_TSAN_WRITE16
68 : BUILT_IN_TSAN_READ16;
70 return builtin_decl_implicit (fcode);
73 /* Check as to whether EXPR refers to a store to vptr. */
75 static tree
76 is_vptr_store (gimple *stmt, tree expr, bool is_write)
78 if (is_write == true
79 && gimple_assign_single_p (stmt)
80 && TREE_CODE (expr) == COMPONENT_REF)
82 tree field = TREE_OPERAND (expr, 1);
83 if (TREE_CODE (field) == FIELD_DECL
84 && DECL_VIRTUAL_P (field))
85 return gimple_assign_rhs1 (stmt);
87 return NULL;
90 /* Instruments EXPR if needed. If any instrumentation is inserted,
91 return true. */
93 static bool
94 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
96 tree base, rhs, expr_ptr, builtin_decl;
97 basic_block bb;
98 HOST_WIDE_INT size;
99 gimple *stmt, *g;
100 gimple_seq seq;
101 location_t loc;
102 unsigned int align;
104 size = int_size_in_bytes (TREE_TYPE (expr));
105 if (size <= 0)
106 return false;
108 HOST_WIDE_INT bitsize, bitpos;
109 tree offset;
110 machine_mode mode;
111 int unsignedp, reversep, volatilep = 0;
112 base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
113 &unsignedp, &reversep, &volatilep, false);
115 /* No need to instrument accesses to decls that don't escape,
116 they can't escape to other threads then. */
117 if (DECL_P (base) && !is_global_var (base))
119 struct pt_solution pt;
120 memset (&pt, 0, sizeof (pt));
121 pt.escaped = 1;
122 pt.ipa_escaped = flag_ipa_pta != 0;
123 if (!pt_solution_includes (&pt, base))
124 return false;
125 if (!may_be_aliased (base))
126 return false;
129 if (TREE_READONLY (base)
130 || (TREE_CODE (base) == VAR_DECL
131 && DECL_HARD_REGISTER (base)))
132 return false;
134 stmt = gsi_stmt (gsi);
135 loc = gimple_location (stmt);
136 rhs = is_vptr_store (stmt, expr, is_write);
138 if ((TREE_CODE (expr) == COMPONENT_REF
139 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
140 || TREE_CODE (expr) == BIT_FIELD_REF)
142 base = TREE_OPERAND (expr, 0);
143 if (TREE_CODE (expr) == COMPONENT_REF)
145 expr = TREE_OPERAND (expr, 1);
146 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
147 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
148 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
149 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
150 || !tree_fits_uhwi_p (DECL_SIZE (expr)))
151 return false;
152 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
153 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
154 bitsize = tree_to_uhwi (DECL_SIZE (expr));
156 else
158 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
159 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
160 return false;
161 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
162 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
164 if (bitpos < 0 || bitsize <= 0)
165 return false;
166 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
167 / BITS_PER_UNIT;
168 if (may_be_nonaddressable_p (base))
169 return false;
170 align = get_object_alignment (base);
171 if (align < BITS_PER_UNIT)
172 return false;
173 bitpos = bitpos & ~(BITS_PER_UNIT - 1);
174 if ((align - 1) & bitpos)
176 align = (align - 1) & bitpos;
177 align = align & -align;
179 expr = build_fold_addr_expr (unshare_expr (base));
180 expr = build2 (MEM_REF, char_type_node, expr,
181 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
182 expr_ptr = build_fold_addr_expr (expr);
184 else
186 if (may_be_nonaddressable_p (expr))
187 return false;
188 align = get_object_alignment (expr);
189 if (align < BITS_PER_UNIT)
190 return false;
191 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
193 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
194 if ((size & (size - 1)) != 0 || size > 16
195 || align < MIN (size, 8) * BITS_PER_UNIT)
197 builtin_decl = builtin_decl_implicit (is_write
198 ? BUILT_IN_TSAN_WRITE_RANGE
199 : BUILT_IN_TSAN_READ_RANGE);
200 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
202 else if (rhs == NULL)
203 g = gimple_build_call (get_memory_access_decl (is_write, size),
204 1, expr_ptr);
205 else
207 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
208 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
210 gimple_set_location (g, loc);
211 gimple_seq_add_stmt_without_update (&seq, g);
212 /* Instrumentation for assignment of a function result
213 must be inserted after the call. Instrumentation for
214 reads of function arguments must be inserted before the call.
215 That's because the call can contain synchronization. */
216 if (is_gimple_call (stmt) && is_write)
218 /* If the call can throw, it must be the last stmt in
219 a basic block, so the instrumented stmts need to be
220 inserted in successor bbs. */
221 if (is_ctrl_altering_stmt (stmt))
223 edge e;
225 bb = gsi_bb (gsi);
226 e = find_fallthru_edge (bb->succs);
227 if (e)
228 gsi_insert_seq_on_edge_immediate (e, seq);
230 else
231 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
233 else
234 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
236 return true;
239 /* Actions for sync/atomic builtin transformations. */
240 enum tsan_atomic_action
242 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
243 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
246 /* Table how to map sync/atomic builtins to their corresponding
247 tsan equivalents. */
248 static const struct tsan_map_atomic
250 enum built_in_function fcode, tsan_fcode;
251 enum tsan_atomic_action action;
252 enum tree_code code;
253 } tsan_atomic_table[] =
255 #define TRANSFORM(fcode, tsan_fcode, action, code) \
256 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
257 #define CHECK_LAST(fcode, tsan_fcode) \
258 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
259 #define ADD_SEQ_CST(fcode, tsan_fcode) \
260 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
261 #define ADD_ACQUIRE(fcode, tsan_fcode) \
262 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
263 #define WEAK_CAS(fcode, tsan_fcode) \
264 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
265 #define STRONG_CAS(fcode, tsan_fcode) \
266 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
267 #define BOOL_CAS(fcode, tsan_fcode) \
268 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
269 #define VAL_CAS(fcode, tsan_fcode) \
270 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
271 #define LOCK_RELEASE(fcode, tsan_fcode) \
272 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
273 #define FETCH_OP(fcode, tsan_fcode, code) \
274 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
275 #define FETCH_OPS(fcode, tsan_fcode, code) \
276 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
278 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
279 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
280 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
281 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
282 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
283 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
284 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
285 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
286 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
287 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
288 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
289 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
290 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
291 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
292 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
293 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
294 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
295 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
296 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
297 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
298 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
299 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
300 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
301 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
302 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
303 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
304 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
305 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
306 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
307 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
308 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
309 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
310 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
311 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
312 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
313 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
314 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
315 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
316 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
317 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
318 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
319 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
320 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
321 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
322 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
324 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
325 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
327 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
328 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
329 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
330 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
331 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
332 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
333 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
334 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
335 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
336 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
337 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
338 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
339 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
340 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
341 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
342 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
343 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
344 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
345 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
346 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
347 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
348 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
349 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
350 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
351 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
352 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
353 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
354 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
355 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
356 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
358 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
359 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
360 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
361 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
362 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
364 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
365 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
366 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
367 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
368 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
369 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
370 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
371 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
372 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
373 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
374 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
375 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
376 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
377 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
378 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
379 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
380 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
381 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
382 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
383 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
384 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
385 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
386 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
387 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
388 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
389 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
390 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
391 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
392 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
393 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
395 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
397 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
398 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
399 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
400 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
401 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
402 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
403 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
404 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
405 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
406 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
407 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
408 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
409 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
410 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
411 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
412 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
413 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
414 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
415 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
416 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
417 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
418 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
419 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
420 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
421 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
422 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
423 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
424 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
425 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
426 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
428 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
429 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
430 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
431 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
432 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
434 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
435 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
436 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
437 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
438 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
439 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
440 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
441 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
442 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
444 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
445 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
446 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
447 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
448 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
449 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
450 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
451 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
452 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
453 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
455 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
456 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
457 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
458 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
459 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
460 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
462 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
463 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
464 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
465 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
466 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
469 /* Instrument an atomic builtin. */
471 static void
472 instrument_builtin_call (gimple_stmt_iterator *gsi)
474 gimple *stmt = gsi_stmt (*gsi), *g;
475 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
476 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
477 unsigned int i, num = gimple_call_num_args (stmt), j;
478 for (j = 0; j < 6 && j < num; j++)
479 args[j] = gimple_call_arg (stmt, j);
480 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
481 if (fcode != tsan_atomic_table[i].fcode)
482 continue;
483 else
485 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
486 if (decl == NULL_TREE)
487 return;
488 switch (tsan_atomic_table[i].action)
490 case check_last:
491 case fetch_op:
492 last_arg = gimple_call_arg (stmt, num - 1);
493 if (!tree_fits_uhwi_p (last_arg)
494 || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
495 return;
496 gimple_call_set_fndecl (stmt, decl);
497 update_stmt (stmt);
498 if (tsan_atomic_table[i].action == fetch_op)
500 args[1] = gimple_call_arg (stmt, 1);
501 goto adjust_result;
503 return;
504 case add_seq_cst:
505 case add_acquire:
506 case fetch_op_seq_cst:
507 gcc_assert (num <= 2);
508 for (j = 0; j < num; j++)
509 args[j] = gimple_call_arg (stmt, j);
510 for (; j < 2; j++)
511 args[j] = NULL_TREE;
512 args[num] = build_int_cst (NULL_TREE,
513 tsan_atomic_table[i].action
514 != add_acquire
515 ? MEMMODEL_SEQ_CST
516 : MEMMODEL_ACQUIRE);
517 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
518 stmt = gsi_stmt (*gsi);
519 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
521 adjust_result:
522 lhs = gimple_call_lhs (stmt);
523 if (lhs == NULL_TREE)
524 return;
525 if (!useless_type_conversion_p (TREE_TYPE (lhs),
526 TREE_TYPE (args[1])))
528 tree var = make_ssa_name (TREE_TYPE (lhs));
529 g = gimple_build_assign (var, NOP_EXPR, args[1]);
530 gsi_insert_after (gsi, g, GSI_NEW_STMT);
531 args[1] = var;
533 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
534 /* BIT_NOT_EXPR stands for NAND. */
535 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
537 tree var = make_ssa_name (TREE_TYPE (lhs));
538 g = gimple_build_assign (var, BIT_AND_EXPR,
539 gimple_call_lhs (stmt), args[1]);
540 gsi_insert_after (gsi, g, GSI_NEW_STMT);
541 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
543 else
544 g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
545 gimple_call_lhs (stmt), args[1]);
546 update_stmt (stmt);
547 gsi_insert_after (gsi, g, GSI_NEW_STMT);
549 return;
550 case weak_cas:
551 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
552 continue;
553 /* FALLTHRU */
554 case strong_cas:
555 gcc_assert (num == 6);
556 for (j = 0; j < 6; j++)
557 args[j] = gimple_call_arg (stmt, j);
558 if (!tree_fits_uhwi_p (args[4])
559 || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
560 return;
561 if (!tree_fits_uhwi_p (args[5])
562 || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
563 return;
564 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
565 args[4], args[5]);
566 return;
567 case bool_cas:
568 case val_cas:
569 gcc_assert (num == 3);
570 for (j = 0; j < 3; j++)
571 args[j] = gimple_call_arg (stmt, j);
572 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
573 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
574 t = create_tmp_var (t);
575 mark_addressable (t);
576 if (!useless_type_conversion_p (TREE_TYPE (t),
577 TREE_TYPE (args[1])))
579 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
580 NOP_EXPR, args[1]);
581 gsi_insert_before (gsi, g, GSI_SAME_STMT);
582 args[1] = gimple_assign_lhs (g);
584 g = gimple_build_assign (t, args[1]);
585 gsi_insert_before (gsi, g, GSI_SAME_STMT);
586 lhs = gimple_call_lhs (stmt);
587 update_gimple_call (gsi, decl, 5, args[0],
588 build_fold_addr_expr (t), args[2],
589 build_int_cst (NULL_TREE,
590 MEMMODEL_SEQ_CST),
591 build_int_cst (NULL_TREE,
592 MEMMODEL_SEQ_CST));
593 if (tsan_atomic_table[i].action == val_cas && lhs)
595 tree cond;
596 stmt = gsi_stmt (*gsi);
597 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
598 gsi_insert_after (gsi, g, GSI_NEW_STMT);
599 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
600 cond = build2 (NE_EXPR, boolean_type_node, t,
601 build_int_cst (TREE_TYPE (t), 0));
602 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
603 gimple_assign_lhs (g));
604 gimple_call_set_lhs (stmt, t);
605 update_stmt (stmt);
606 gsi_insert_after (gsi, g, GSI_NEW_STMT);
608 return;
609 case lock_release:
610 gcc_assert (num == 1);
611 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
612 t = TREE_VALUE (TREE_CHAIN (t));
613 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
614 build_int_cst (t, 0),
615 build_int_cst (NULL_TREE,
616 MEMMODEL_RELEASE));
617 return;
618 default:
619 continue;
624 /* Instruments the gimple pointed to by GSI. Return
625 true if func entry/exit should be instrumented. */
627 static bool
628 instrument_gimple (gimple_stmt_iterator *gsi)
630 gimple *stmt;
631 tree rhs, lhs;
632 bool instrumented = false;
634 stmt = gsi_stmt (*gsi);
635 if (is_gimple_call (stmt)
636 && (gimple_call_fndecl (stmt)
637 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
639 /* All functions with function call will have exit instrumented,
640 therefore no function calls other than __tsan_func_exit
641 shall appear in the functions. */
642 gimple_call_set_tail (as_a <gcall *> (stmt), false);
643 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
644 instrument_builtin_call (gsi);
645 return true;
647 else if (is_gimple_assign (stmt)
648 && !gimple_clobber_p (stmt))
650 if (gimple_store_p (stmt))
652 lhs = gimple_assign_lhs (stmt);
653 instrumented = instrument_expr (*gsi, lhs, true);
655 if (gimple_assign_load_p (stmt))
657 rhs = gimple_assign_rhs1 (stmt);
658 instrumented = instrument_expr (*gsi, rhs, false);
661 return instrumented;
664 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
666 static void
667 replace_func_exit (gimple *stmt)
669 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
670 gimple *g = gimple_build_call (builtin_decl, 0);
671 gimple_set_location (g, cfun->function_end_locus);
672 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
673 gsi_replace (&gsi, g, true);
676 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
678 static void
679 instrument_func_exit (void)
681 location_t loc;
682 basic_block exit_bb;
683 gimple_stmt_iterator gsi;
684 gimple *stmt, *g;
685 tree builtin_decl;
686 edge e;
687 edge_iterator ei;
689 /* Find all function exits. */
690 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
691 FOR_EACH_EDGE (e, ei, exit_bb->preds)
693 gsi = gsi_last_bb (e->src);
694 stmt = gsi_stmt (gsi);
695 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
696 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
697 loc = gimple_location (stmt);
698 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
699 g = gimple_build_call (builtin_decl, 0);
700 gimple_set_location (g, loc);
701 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
705 /* Instruments all interesting memory accesses in the current function.
706 Return true if func entry/exit should be instrumented. */
708 static bool
709 instrument_memory_accesses (void)
711 basic_block bb;
712 gimple_stmt_iterator gsi;
713 bool fentry_exit_instrument = false;
714 bool func_exit_seen = false;
715 auto_vec<gimple *> tsan_func_exits;
717 FOR_EACH_BB_FN (bb, cfun)
718 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
720 gimple *stmt = gsi_stmt (gsi);
721 if (is_gimple_call (stmt)
722 && gimple_call_internal_p (stmt)
723 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
725 if (fentry_exit_instrument)
726 replace_func_exit (stmt);
727 else
728 tsan_func_exits.safe_push (stmt);
729 func_exit_seen = true;
731 else
732 fentry_exit_instrument |= instrument_gimple (&gsi);
734 unsigned int i;
735 gimple *stmt;
736 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
737 if (fentry_exit_instrument)
738 replace_func_exit (stmt);
739 else
741 gsi = gsi_for_stmt (stmt);
742 gsi_remove (&gsi, true);
744 if (fentry_exit_instrument && !func_exit_seen)
745 instrument_func_exit ();
746 return fentry_exit_instrument;
749 /* Instruments function entry. */
751 static void
752 instrument_func_entry (void)
754 tree ret_addr, builtin_decl;
755 gimple *g;
756 gimple_seq seq = NULL;
758 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
759 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
760 ret_addr = make_ssa_name (ptr_type_node);
761 gimple_call_set_lhs (g, ret_addr);
762 gimple_set_location (g, cfun->function_start_locus);
763 gimple_seq_add_stmt_without_update (&seq, g);
765 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
766 g = gimple_build_call (builtin_decl, 1, ret_addr);
767 gimple_set_location (g, cfun->function_start_locus);
768 gimple_seq_add_stmt_without_update (&seq, g);
770 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
771 gsi_insert_seq_on_edge_immediate (e, seq);
774 /* ThreadSanitizer instrumentation pass. */
776 static unsigned
777 tsan_pass (void)
779 initialize_sanitizer_builtins ();
780 if (instrument_memory_accesses ())
781 instrument_func_entry ();
782 return 0;
785 /* Inserts __tsan_init () into the list of CTORs. */
787 void
788 tsan_finish_file (void)
790 tree ctor_statements = NULL_TREE;
792 initialize_sanitizer_builtins ();
793 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
794 append_to_statement_list (build_call_expr (init_decl, 0),
795 &ctor_statements);
796 cgraph_build_static_cdtor ('I', ctor_statements,
797 MAX_RESERVED_INIT_PRIORITY - 1);
800 /* The pass descriptor. */
802 namespace {
804 const pass_data pass_data_tsan =
806 GIMPLE_PASS, /* type */
807 "tsan", /* name */
808 OPTGROUP_NONE, /* optinfo_flags */
809 TV_NONE, /* tv_id */
810 ( PROP_ssa | PROP_cfg ), /* properties_required */
811 0, /* properties_provided */
812 0, /* properties_destroyed */
813 0, /* todo_flags_start */
814 TODO_update_ssa, /* todo_flags_finish */
817 class pass_tsan : public gimple_opt_pass
819 public:
820 pass_tsan (gcc::context *ctxt)
821 : gimple_opt_pass (pass_data_tsan, ctxt)
824 /* opt_pass methods: */
825 opt_pass * clone () { return new pass_tsan (m_ctxt); }
826 virtual bool gate (function *)
828 return ((flag_sanitize & SANITIZE_THREAD) != 0
829 && !lookup_attribute ("no_sanitize_thread",
830 DECL_ATTRIBUTES (current_function_decl)));
833 virtual unsigned int execute (function *) { return tsan_pass (); }
835 }; // class pass_tsan
837 } // anon namespace
839 gimple_opt_pass *
840 make_pass_tsan (gcc::context *ctxt)
842 return new pass_tsan (ctxt);
845 namespace {
847 const pass_data pass_data_tsan_O0 =
849 GIMPLE_PASS, /* type */
850 "tsan0", /* name */
851 OPTGROUP_NONE, /* optinfo_flags */
852 TV_NONE, /* tv_id */
853 ( PROP_ssa | PROP_cfg ), /* properties_required */
854 0, /* properties_provided */
855 0, /* properties_destroyed */
856 0, /* todo_flags_start */
857 TODO_update_ssa, /* todo_flags_finish */
860 class pass_tsan_O0 : public gimple_opt_pass
862 public:
863 pass_tsan_O0 (gcc::context *ctxt)
864 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
867 /* opt_pass methods: */
868 virtual bool gate (function *)
870 return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize
871 && !lookup_attribute ("no_sanitize_thread",
872 DECL_ATTRIBUTES (current_function_decl)));
875 virtual unsigned int execute (function *) { return tsan_pass (); }
877 }; // class pass_tsan_O0
879 } // anon namespace
881 gimple_opt_pass *
882 make_pass_tsan_O0 (gcc::context *ctxt)
884 return new pass_tsan_O0 (ctxt);