1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "double-int.h"
36 #include "fold-const.h"
39 #include "hard-reg-set.h"
43 #include "statistics.h"
45 #include "fixed-value.h"
46 #include "insn-config.h"
57 #include "dominance.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-expr.h"
66 #include "gimple-iterator.h"
67 #include "gimplify-me.h"
68 #include "gimple-ssa.h"
70 #include "plugin-api.h"
74 #include "stringpool.h"
75 #include "tree-ssanames.h"
76 #include "tree-pass.h"
77 #include "tree-iterator.h"
78 #include "langhooks.h"
81 #include "diagnostic.h"
82 #include "tree-ssa-propagate.h"
83 #include "tree-ssa-loop-ivopts.h"
88 /* Number of instrumented memory accesses in the current function. */
90 /* Builds the following decl
91 void __tsan_read/writeX (void *addr); */
94 get_memory_access_decl (bool is_write
, unsigned size
)
96 enum built_in_function fcode
;
99 fcode
= is_write
? BUILT_IN_TSAN_WRITE1
100 : BUILT_IN_TSAN_READ1
;
102 fcode
= is_write
? BUILT_IN_TSAN_WRITE2
103 : BUILT_IN_TSAN_READ2
;
105 fcode
= is_write
? BUILT_IN_TSAN_WRITE4
106 : BUILT_IN_TSAN_READ4
;
108 fcode
= is_write
? BUILT_IN_TSAN_WRITE8
109 : BUILT_IN_TSAN_READ8
;
111 fcode
= is_write
? BUILT_IN_TSAN_WRITE16
112 : BUILT_IN_TSAN_READ16
;
114 return builtin_decl_implicit (fcode
);
117 /* Check as to whether EXPR refers to a store to vptr. */
120 is_vptr_store (gimple stmt
, tree expr
, bool is_write
)
123 && gimple_assign_single_p (stmt
)
124 && TREE_CODE (expr
) == COMPONENT_REF
)
126 tree field
= TREE_OPERAND (expr
, 1);
127 if (TREE_CODE (field
) == FIELD_DECL
128 && DECL_VIRTUAL_P (field
))
129 return gimple_assign_rhs1 (stmt
);
134 /* Instruments EXPR if needed. If any instrumentation is inserted,
138 instrument_expr (gimple_stmt_iterator gsi
, tree expr
, bool is_write
)
140 tree base
, rhs
, expr_ptr
, builtin_decl
;
148 size
= int_size_in_bytes (TREE_TYPE (expr
));
152 HOST_WIDE_INT bitsize
, bitpos
;
155 int volatilep
= 0, unsignedp
= 0;
156 base
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
157 &mode
, &unsignedp
, &volatilep
, false);
159 /* No need to instrument accesses to decls that don't escape,
160 they can't escape to other threads then. */
161 if (DECL_P (base
) && !is_global_var (base
))
163 struct pt_solution pt
;
164 memset (&pt
, 0, sizeof (pt
));
166 pt
.ipa_escaped
= flag_ipa_pta
!= 0;
167 if (!pt_solution_includes (&pt
, base
))
169 if (!may_be_aliased (base
))
173 if (TREE_READONLY (base
)
174 || (TREE_CODE (base
) == VAR_DECL
175 && DECL_HARD_REGISTER (base
)))
178 stmt
= gsi_stmt (gsi
);
179 loc
= gimple_location (stmt
);
180 rhs
= is_vptr_store (stmt
, expr
, is_write
);
182 if ((TREE_CODE (expr
) == COMPONENT_REF
183 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr
, 1)))
184 || TREE_CODE (expr
) == BIT_FIELD_REF
)
186 base
= TREE_OPERAND (expr
, 0);
187 if (TREE_CODE (expr
) == COMPONENT_REF
)
189 expr
= TREE_OPERAND (expr
, 1);
190 if (is_write
&& DECL_BIT_FIELD_REPRESENTATIVE (expr
))
191 expr
= DECL_BIT_FIELD_REPRESENTATIVE (expr
);
192 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr
))
193 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr
))
194 || !tree_fits_uhwi_p (DECL_SIZE (expr
)))
196 bitpos
= tree_to_uhwi (DECL_FIELD_OFFSET (expr
)) * BITS_PER_UNIT
197 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr
));
198 bitsize
= tree_to_uhwi (DECL_SIZE (expr
));
202 if (!tree_fits_uhwi_p (TREE_OPERAND (expr
, 2))
203 || !tree_fits_uhwi_p (TREE_OPERAND (expr
, 1)))
205 bitpos
= tree_to_uhwi (TREE_OPERAND (expr
, 2));
206 bitsize
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
208 if (bitpos
< 0 || bitsize
<= 0)
210 size
= (bitpos
% BITS_PER_UNIT
+ bitsize
+ BITS_PER_UNIT
- 1)
212 if (may_be_nonaddressable_p (base
))
214 align
= get_object_alignment (base
);
215 if (align
< BITS_PER_UNIT
)
217 bitpos
= bitpos
& ~(BITS_PER_UNIT
- 1);
218 if ((align
- 1) & bitpos
)
220 align
= (align
- 1) & bitpos
;
221 align
= align
& -align
;
223 expr
= build_fold_addr_expr (unshare_expr (base
));
224 expr
= build2 (MEM_REF
, char_type_node
, expr
,
225 build_int_cst (TREE_TYPE (expr
), bitpos
/ BITS_PER_UNIT
));
226 expr_ptr
= build_fold_addr_expr (expr
);
230 if (may_be_nonaddressable_p (expr
))
232 align
= get_object_alignment (expr
);
233 if (align
< BITS_PER_UNIT
)
235 expr_ptr
= build_fold_addr_expr (unshare_expr (expr
));
237 expr_ptr
= force_gimple_operand (expr_ptr
, &seq
, true, NULL_TREE
);
238 if ((size
& (size
- 1)) != 0 || size
> 16
239 || align
< MIN (size
, 8) * BITS_PER_UNIT
)
241 builtin_decl
= builtin_decl_implicit (is_write
242 ? BUILT_IN_TSAN_WRITE_RANGE
243 : BUILT_IN_TSAN_READ_RANGE
);
244 g
= gimple_build_call (builtin_decl
, 2, expr_ptr
, size_int (size
));
246 else if (rhs
== NULL
)
247 g
= gimple_build_call (get_memory_access_decl (is_write
, size
),
251 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE
);
252 g
= gimple_build_call (builtin_decl
, 2, expr_ptr
, unshare_expr (rhs
));
254 gimple_set_location (g
, loc
);
255 gimple_seq_add_stmt_without_update (&seq
, g
);
256 /* Instrumentation for assignment of a function result
257 must be inserted after the call. Instrumentation for
258 reads of function arguments must be inserted before the call.
259 That's because the call can contain synchronization. */
260 if (is_gimple_call (stmt
) && is_write
)
262 /* If the call can throw, it must be the last stmt in
263 a basic block, so the instrumented stmts need to be
264 inserted in successor bbs. */
265 if (is_ctrl_altering_stmt (stmt
))
270 e
= find_fallthru_edge (bb
->succs
);
272 gsi_insert_seq_on_edge_immediate (e
, seq
);
275 gsi_insert_seq_after (&gsi
, seq
, GSI_NEW_STMT
);
278 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
283 /* Actions for sync/atomic builtin transformations. */
284 enum tsan_atomic_action
286 check_last
, add_seq_cst
, add_acquire
, weak_cas
, strong_cas
,
287 bool_cas
, val_cas
, lock_release
, fetch_op
, fetch_op_seq_cst
290 /* Table how to map sync/atomic builtins to their corresponding
292 static const struct tsan_map_atomic
294 enum built_in_function fcode
, tsan_fcode
;
295 enum tsan_atomic_action action
;
297 } tsan_atomic_table
[] =
299 #define TRANSFORM(fcode, tsan_fcode, action, code) \
300 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
301 #define CHECK_LAST(fcode, tsan_fcode) \
302 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
303 #define ADD_SEQ_CST(fcode, tsan_fcode) \
304 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
305 #define ADD_ACQUIRE(fcode, tsan_fcode) \
306 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
307 #define WEAK_CAS(fcode, tsan_fcode) \
308 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
309 #define STRONG_CAS(fcode, tsan_fcode) \
310 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
311 #define BOOL_CAS(fcode, tsan_fcode) \
312 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
313 #define VAL_CAS(fcode, tsan_fcode) \
314 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
315 #define LOCK_RELEASE(fcode, tsan_fcode) \
316 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
317 #define FETCH_OP(fcode, tsan_fcode, code) \
318 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
319 #define FETCH_OPS(fcode, tsan_fcode, code) \
320 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
322 CHECK_LAST (ATOMIC_LOAD_1
, TSAN_ATOMIC8_LOAD
),
323 CHECK_LAST (ATOMIC_LOAD_2
, TSAN_ATOMIC16_LOAD
),
324 CHECK_LAST (ATOMIC_LOAD_4
, TSAN_ATOMIC32_LOAD
),
325 CHECK_LAST (ATOMIC_LOAD_8
, TSAN_ATOMIC64_LOAD
),
326 CHECK_LAST (ATOMIC_LOAD_16
, TSAN_ATOMIC128_LOAD
),
327 CHECK_LAST (ATOMIC_STORE_1
, TSAN_ATOMIC8_STORE
),
328 CHECK_LAST (ATOMIC_STORE_2
, TSAN_ATOMIC16_STORE
),
329 CHECK_LAST (ATOMIC_STORE_4
, TSAN_ATOMIC32_STORE
),
330 CHECK_LAST (ATOMIC_STORE_8
, TSAN_ATOMIC64_STORE
),
331 CHECK_LAST (ATOMIC_STORE_16
, TSAN_ATOMIC128_STORE
),
332 CHECK_LAST (ATOMIC_EXCHANGE_1
, TSAN_ATOMIC8_EXCHANGE
),
333 CHECK_LAST (ATOMIC_EXCHANGE_2
, TSAN_ATOMIC16_EXCHANGE
),
334 CHECK_LAST (ATOMIC_EXCHANGE_4
, TSAN_ATOMIC32_EXCHANGE
),
335 CHECK_LAST (ATOMIC_EXCHANGE_8
, TSAN_ATOMIC64_EXCHANGE
),
336 CHECK_LAST (ATOMIC_EXCHANGE_16
, TSAN_ATOMIC128_EXCHANGE
),
337 CHECK_LAST (ATOMIC_FETCH_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
338 CHECK_LAST (ATOMIC_FETCH_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
339 CHECK_LAST (ATOMIC_FETCH_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
340 CHECK_LAST (ATOMIC_FETCH_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
341 CHECK_LAST (ATOMIC_FETCH_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
342 CHECK_LAST (ATOMIC_FETCH_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
343 CHECK_LAST (ATOMIC_FETCH_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
344 CHECK_LAST (ATOMIC_FETCH_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
345 CHECK_LAST (ATOMIC_FETCH_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
346 CHECK_LAST (ATOMIC_FETCH_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
347 CHECK_LAST (ATOMIC_FETCH_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
348 CHECK_LAST (ATOMIC_FETCH_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
349 CHECK_LAST (ATOMIC_FETCH_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
350 CHECK_LAST (ATOMIC_FETCH_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
351 CHECK_LAST (ATOMIC_FETCH_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
352 CHECK_LAST (ATOMIC_FETCH_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
353 CHECK_LAST (ATOMIC_FETCH_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
354 CHECK_LAST (ATOMIC_FETCH_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
355 CHECK_LAST (ATOMIC_FETCH_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
356 CHECK_LAST (ATOMIC_FETCH_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
357 CHECK_LAST (ATOMIC_FETCH_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
358 CHECK_LAST (ATOMIC_FETCH_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
359 CHECK_LAST (ATOMIC_FETCH_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
360 CHECK_LAST (ATOMIC_FETCH_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
361 CHECK_LAST (ATOMIC_FETCH_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
362 CHECK_LAST (ATOMIC_FETCH_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
363 CHECK_LAST (ATOMIC_FETCH_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
364 CHECK_LAST (ATOMIC_FETCH_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
365 CHECK_LAST (ATOMIC_FETCH_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
366 CHECK_LAST (ATOMIC_FETCH_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
368 CHECK_LAST (ATOMIC_THREAD_FENCE
, TSAN_ATOMIC_THREAD_FENCE
),
369 CHECK_LAST (ATOMIC_SIGNAL_FENCE
, TSAN_ATOMIC_SIGNAL_FENCE
),
371 FETCH_OP (ATOMIC_ADD_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
372 FETCH_OP (ATOMIC_ADD_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
373 FETCH_OP (ATOMIC_ADD_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
374 FETCH_OP (ATOMIC_ADD_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
375 FETCH_OP (ATOMIC_ADD_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
376 FETCH_OP (ATOMIC_SUB_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
377 FETCH_OP (ATOMIC_SUB_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
378 FETCH_OP (ATOMIC_SUB_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
379 FETCH_OP (ATOMIC_SUB_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
380 FETCH_OP (ATOMIC_SUB_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
381 FETCH_OP (ATOMIC_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
382 FETCH_OP (ATOMIC_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
383 FETCH_OP (ATOMIC_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
384 FETCH_OP (ATOMIC_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
385 FETCH_OP (ATOMIC_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
386 FETCH_OP (ATOMIC_OR_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
387 FETCH_OP (ATOMIC_OR_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
388 FETCH_OP (ATOMIC_OR_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
389 FETCH_OP (ATOMIC_OR_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
390 FETCH_OP (ATOMIC_OR_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
391 FETCH_OP (ATOMIC_XOR_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
392 FETCH_OP (ATOMIC_XOR_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
393 FETCH_OP (ATOMIC_XOR_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
394 FETCH_OP (ATOMIC_XOR_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
395 FETCH_OP (ATOMIC_XOR_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
396 FETCH_OP (ATOMIC_NAND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
397 FETCH_OP (ATOMIC_NAND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
398 FETCH_OP (ATOMIC_NAND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
399 FETCH_OP (ATOMIC_NAND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
400 FETCH_OP (ATOMIC_NAND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
402 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1
, TSAN_ATOMIC8_EXCHANGE
),
403 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2
, TSAN_ATOMIC16_EXCHANGE
),
404 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4
, TSAN_ATOMIC32_EXCHANGE
),
405 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8
, TSAN_ATOMIC64_EXCHANGE
),
406 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16
, TSAN_ATOMIC128_EXCHANGE
),
408 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
409 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
410 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
411 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
412 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
413 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
414 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
415 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
416 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
417 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
418 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
419 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
420 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
421 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
422 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
423 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
424 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
425 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
426 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
427 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
428 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
429 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
430 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
431 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
432 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
433 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
434 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
435 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
436 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
437 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
439 ADD_SEQ_CST (SYNC_SYNCHRONIZE
, TSAN_ATOMIC_THREAD_FENCE
),
441 FETCH_OPS (SYNC_ADD_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
442 FETCH_OPS (SYNC_ADD_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
443 FETCH_OPS (SYNC_ADD_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
444 FETCH_OPS (SYNC_ADD_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
445 FETCH_OPS (SYNC_ADD_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
446 FETCH_OPS (SYNC_SUB_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
447 FETCH_OPS (SYNC_SUB_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
448 FETCH_OPS (SYNC_SUB_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
449 FETCH_OPS (SYNC_SUB_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
450 FETCH_OPS (SYNC_SUB_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
451 FETCH_OPS (SYNC_AND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
452 FETCH_OPS (SYNC_AND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
453 FETCH_OPS (SYNC_AND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
454 FETCH_OPS (SYNC_AND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
455 FETCH_OPS (SYNC_AND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
456 FETCH_OPS (SYNC_OR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
457 FETCH_OPS (SYNC_OR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
458 FETCH_OPS (SYNC_OR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
459 FETCH_OPS (SYNC_OR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
460 FETCH_OPS (SYNC_OR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
461 FETCH_OPS (SYNC_XOR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
462 FETCH_OPS (SYNC_XOR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
463 FETCH_OPS (SYNC_XOR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
464 FETCH_OPS (SYNC_XOR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
465 FETCH_OPS (SYNC_XOR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
466 FETCH_OPS (SYNC_NAND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
467 FETCH_OPS (SYNC_NAND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
468 FETCH_OPS (SYNC_NAND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
469 FETCH_OPS (SYNC_NAND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
470 FETCH_OPS (SYNC_NAND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
472 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK
),
473 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK
),
474 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK
),
475 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK
),
476 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16
, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK
),
478 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
479 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2
,
480 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
481 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4
,
482 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
483 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8
,
484 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
485 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16
,
486 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
488 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1
,
489 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
490 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2
,
491 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
492 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4
,
493 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
494 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8
,
495 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
496 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16
,
497 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
499 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
500 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
501 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
502 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
503 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16
,
504 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
506 LOCK_RELEASE (SYNC_LOCK_RELEASE_1
, TSAN_ATOMIC8_STORE
),
507 LOCK_RELEASE (SYNC_LOCK_RELEASE_2
, TSAN_ATOMIC16_STORE
),
508 LOCK_RELEASE (SYNC_LOCK_RELEASE_4
, TSAN_ATOMIC32_STORE
),
509 LOCK_RELEASE (SYNC_LOCK_RELEASE_8
, TSAN_ATOMIC64_STORE
),
510 LOCK_RELEASE (SYNC_LOCK_RELEASE_16
, TSAN_ATOMIC128_STORE
)
513 /* Instrument an atomic builtin. */
516 instrument_builtin_call (gimple_stmt_iterator
*gsi
)
518 gimple stmt
= gsi_stmt (*gsi
), g
;
519 tree callee
= gimple_call_fndecl (stmt
), last_arg
, args
[6], t
, lhs
;
520 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
521 unsigned int i
, num
= gimple_call_num_args (stmt
), j
;
522 for (j
= 0; j
< 6 && j
< num
; j
++)
523 args
[j
] = gimple_call_arg (stmt
, j
);
524 for (i
= 0; i
< ARRAY_SIZE (tsan_atomic_table
); i
++)
525 if (fcode
!= tsan_atomic_table
[i
].fcode
)
529 tree decl
= builtin_decl_implicit (tsan_atomic_table
[i
].tsan_fcode
);
530 if (decl
== NULL_TREE
)
532 switch (tsan_atomic_table
[i
].action
)
536 last_arg
= gimple_call_arg (stmt
, num
- 1);
537 if (!tree_fits_uhwi_p (last_arg
)
538 || tree_to_uhwi (last_arg
) > MEMMODEL_SEQ_CST
)
540 gimple_call_set_fndecl (stmt
, decl
);
542 if (tsan_atomic_table
[i
].action
== fetch_op
)
544 args
[1] = gimple_call_arg (stmt
, 1);
550 case fetch_op_seq_cst
:
551 gcc_assert (num
<= 2);
552 for (j
= 0; j
< num
; j
++)
553 args
[j
] = gimple_call_arg (stmt
, j
);
556 args
[num
] = build_int_cst (NULL_TREE
,
557 tsan_atomic_table
[i
].action
561 update_gimple_call (gsi
, decl
, num
+ 1, args
[0], args
[1], args
[2]);
562 stmt
= gsi_stmt (*gsi
);
563 if (tsan_atomic_table
[i
].action
== fetch_op_seq_cst
)
566 lhs
= gimple_call_lhs (stmt
);
567 if (lhs
== NULL_TREE
)
569 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
570 TREE_TYPE (args
[1])))
572 tree var
= make_ssa_name (TREE_TYPE (lhs
));
573 g
= gimple_build_assign (var
, NOP_EXPR
, args
[1]);
574 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
577 gimple_call_set_lhs (stmt
, make_ssa_name (TREE_TYPE (lhs
)));
578 /* BIT_NOT_EXPR stands for NAND. */
579 if (tsan_atomic_table
[i
].code
== BIT_NOT_EXPR
)
581 tree var
= make_ssa_name (TREE_TYPE (lhs
));
582 g
= gimple_build_assign (var
, BIT_AND_EXPR
,
583 gimple_call_lhs (stmt
), args
[1]);
584 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
585 g
= gimple_build_assign (lhs
, BIT_NOT_EXPR
, var
);
588 g
= gimple_build_assign (lhs
, tsan_atomic_table
[i
].code
,
589 gimple_call_lhs (stmt
), args
[1]);
591 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
595 if (!integer_nonzerop (gimple_call_arg (stmt
, 3)))
599 gcc_assert (num
== 6);
600 for (j
= 0; j
< 6; j
++)
601 args
[j
] = gimple_call_arg (stmt
, j
);
602 if (!tree_fits_uhwi_p (args
[4])
603 || tree_to_uhwi (args
[4]) > MEMMODEL_SEQ_CST
)
605 if (!tree_fits_uhwi_p (args
[5])
606 || tree_to_uhwi (args
[5]) > MEMMODEL_SEQ_CST
)
608 update_gimple_call (gsi
, decl
, 5, args
[0], args
[1], args
[2],
613 gcc_assert (num
== 3);
614 for (j
= 0; j
< 3; j
++)
615 args
[j
] = gimple_call_arg (stmt
, j
);
616 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
617 t
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t
)));
618 t
= create_tmp_var (t
);
619 mark_addressable (t
);
620 if (!useless_type_conversion_p (TREE_TYPE (t
),
621 TREE_TYPE (args
[1])))
623 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)),
625 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
626 args
[1] = gimple_assign_lhs (g
);
628 g
= gimple_build_assign (t
, args
[1]);
629 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
630 lhs
= gimple_call_lhs (stmt
);
631 update_gimple_call (gsi
, decl
, 5, args
[0],
632 build_fold_addr_expr (t
), args
[2],
633 build_int_cst (NULL_TREE
,
635 build_int_cst (NULL_TREE
,
637 if (tsan_atomic_table
[i
].action
== val_cas
&& lhs
)
640 stmt
= gsi_stmt (*gsi
);
641 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
642 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
643 t
= make_ssa_name (TREE_TYPE (TREE_TYPE (decl
)), stmt
);
644 cond
= build2 (NE_EXPR
, boolean_type_node
, t
,
645 build_int_cst (TREE_TYPE (t
), 0));
646 g
= gimple_build_assign (lhs
, COND_EXPR
, cond
, args
[1],
647 gimple_assign_lhs (g
));
648 gimple_call_set_lhs (stmt
, t
);
650 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
654 gcc_assert (num
== 1);
655 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
656 t
= TREE_VALUE (TREE_CHAIN (t
));
657 update_gimple_call (gsi
, decl
, 3, gimple_call_arg (stmt
, 0),
658 build_int_cst (t
, 0),
659 build_int_cst (NULL_TREE
,
668 /* Instruments the gimple pointed to by GSI. Return
669 true if func entry/exit should be instrumented. */
672 instrument_gimple (gimple_stmt_iterator
*gsi
)
676 bool instrumented
= false;
678 stmt
= gsi_stmt (*gsi
);
679 if (is_gimple_call (stmt
)
680 && (gimple_call_fndecl (stmt
)
681 != builtin_decl_implicit (BUILT_IN_TSAN_INIT
)))
683 /* All functions with function call will have exit instrumented,
684 therefore no function calls other than __tsan_func_exit
685 shall appear in the functions. */
686 gimple_call_set_tail (as_a
<gcall
*> (stmt
), false);
687 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
688 instrument_builtin_call (gsi
);
691 else if (is_gimple_assign (stmt
)
692 && !gimple_clobber_p (stmt
))
694 if (gimple_store_p (stmt
))
696 lhs
= gimple_assign_lhs (stmt
);
697 instrumented
= instrument_expr (*gsi
, lhs
, true);
699 if (gimple_assign_load_p (stmt
))
701 rhs
= gimple_assign_rhs1 (stmt
);
702 instrumented
= instrument_expr (*gsi
, rhs
, false);
708 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
711 replace_func_exit (gimple stmt
)
713 tree builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT
);
714 gimple g
= gimple_build_call (builtin_decl
, 0);
715 gimple_set_location (g
, cfun
->function_end_locus
);
716 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
717 gsi_replace (&gsi
, g
, true);
720 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
723 instrument_func_exit (void)
727 gimple_stmt_iterator gsi
;
733 /* Find all function exits. */
734 exit_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
);
735 FOR_EACH_EDGE (e
, ei
, exit_bb
->preds
)
737 gsi
= gsi_last_bb (e
->src
);
738 stmt
= gsi_stmt (gsi
);
739 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
740 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
));
741 loc
= gimple_location (stmt
);
742 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT
);
743 g
= gimple_build_call (builtin_decl
, 0);
744 gimple_set_location (g
, loc
);
745 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
749 /* Instruments all interesting memory accesses in the current function.
750 Return true if func entry/exit should be instrumented. */
753 instrument_memory_accesses (void)
756 gimple_stmt_iterator gsi
;
757 bool fentry_exit_instrument
= false;
758 bool func_exit_seen
= false;
759 auto_vec
<gimple
> tsan_func_exits
;
761 FOR_EACH_BB_FN (bb
, cfun
)
762 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
764 gimple stmt
= gsi_stmt (gsi
);
765 if (is_gimple_call (stmt
)
766 && gimple_call_internal_p (stmt
)
767 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
769 if (fentry_exit_instrument
)
770 replace_func_exit (stmt
);
772 tsan_func_exits
.safe_push (stmt
);
773 func_exit_seen
= true;
776 fentry_exit_instrument
|= instrument_gimple (&gsi
);
780 FOR_EACH_VEC_ELT (tsan_func_exits
, i
, stmt
)
781 if (fentry_exit_instrument
)
782 replace_func_exit (stmt
);
785 gsi
= gsi_for_stmt (stmt
);
786 gsi_remove (&gsi
, true);
788 if (fentry_exit_instrument
&& !func_exit_seen
)
789 instrument_func_exit ();
790 return fentry_exit_instrument
;
793 /* Instruments function entry. */
796 instrument_func_entry (void)
798 tree ret_addr
, builtin_decl
;
800 gimple_seq seq
= NULL
;
802 builtin_decl
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
803 g
= gimple_build_call (builtin_decl
, 1, integer_zero_node
);
804 ret_addr
= make_ssa_name (ptr_type_node
);
805 gimple_call_set_lhs (g
, ret_addr
);
806 gimple_set_location (g
, cfun
->function_start_locus
);
807 gimple_seq_add_stmt_without_update (&seq
, g
);
809 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY
);
810 g
= gimple_build_call (builtin_decl
, 1, ret_addr
);
811 gimple_set_location (g
, cfun
->function_start_locus
);
812 gimple_seq_add_stmt_without_update (&seq
, g
);
814 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
815 gsi_insert_seq_on_edge_immediate (e
, seq
);
818 /* ThreadSanitizer instrumentation pass. */
823 initialize_sanitizer_builtins ();
824 if (instrument_memory_accesses ())
825 instrument_func_entry ();
829 /* Inserts __tsan_init () into the list of CTORs. */
832 tsan_finish_file (void)
834 tree ctor_statements
= NULL_TREE
;
836 initialize_sanitizer_builtins ();
837 tree init_decl
= builtin_decl_implicit (BUILT_IN_TSAN_INIT
);
838 append_to_statement_list (build_call_expr (init_decl
, 0),
840 cgraph_build_static_cdtor ('I', ctor_statements
,
841 MAX_RESERVED_INIT_PRIORITY
- 1);
844 /* The pass descriptor. */
848 const pass_data pass_data_tsan
=
850 GIMPLE_PASS
, /* type */
852 OPTGROUP_NONE
, /* optinfo_flags */
854 ( PROP_ssa
| PROP_cfg
), /* properties_required */
855 0, /* properties_provided */
856 0, /* properties_destroyed */
857 0, /* todo_flags_start */
858 TODO_update_ssa
, /* todo_flags_finish */
861 class pass_tsan
: public gimple_opt_pass
864 pass_tsan (gcc::context
*ctxt
)
865 : gimple_opt_pass (pass_data_tsan
, ctxt
)
868 /* opt_pass methods: */
869 opt_pass
* clone () { return new pass_tsan (m_ctxt
); }
870 virtual bool gate (function
*)
872 return ((flag_sanitize
& SANITIZE_THREAD
) != 0
873 && !lookup_attribute ("no_sanitize_thread",
874 DECL_ATTRIBUTES (current_function_decl
)));
877 virtual unsigned int execute (function
*) { return tsan_pass (); }
879 }; // class pass_tsan
884 make_pass_tsan (gcc::context
*ctxt
)
886 return new pass_tsan (ctxt
);
891 const pass_data pass_data_tsan_O0
=
893 GIMPLE_PASS
, /* type */
895 OPTGROUP_NONE
, /* optinfo_flags */
897 ( PROP_ssa
| PROP_cfg
), /* properties_required */
898 0, /* properties_provided */
899 0, /* properties_destroyed */
900 0, /* todo_flags_start */
901 TODO_update_ssa
, /* todo_flags_finish */
904 class pass_tsan_O0
: public gimple_opt_pass
907 pass_tsan_O0 (gcc::context
*ctxt
)
908 : gimple_opt_pass (pass_data_tsan_O0
, ctxt
)
911 /* opt_pass methods: */
912 virtual bool gate (function
*)
914 return ((flag_sanitize
& SANITIZE_THREAD
) != 0 && !optimize
915 && !lookup_attribute ("no_sanitize_thread",
916 DECL_ATTRIBUTES (current_function_decl
)));
919 virtual unsigned int execute (function
*) { return tsan_pass (); }
921 }; // class pass_tsan_O0
926 make_pass_tsan_O0 (gcc::context
*ctxt
)
928 return new pass_tsan_O0 (ctxt
);