1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "basic-block.h"
32 #include "gimple-iterator.h"
34 #include "gimple-ssa.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "tree-pass.h"
40 #include "tree-iterator.h"
41 #include "langhooks.h"
45 #include "diagnostic.h"
46 #include "tree-ssa-propagate.h"
50 /* Number of instrumented memory accesses in the current function. */
52 /* Builds the following decl
53 void __tsan_read/writeX (void *addr); */
56 get_memory_access_decl (bool is_write
, unsigned size
)
58 enum built_in_function fcode
;
61 fcode
= is_write
? BUILT_IN_TSAN_WRITE1
62 : BUILT_IN_TSAN_READ1
;
64 fcode
= is_write
? BUILT_IN_TSAN_WRITE2
65 : BUILT_IN_TSAN_READ2
;
67 fcode
= is_write
? BUILT_IN_TSAN_WRITE4
68 : BUILT_IN_TSAN_READ4
;
70 fcode
= is_write
? BUILT_IN_TSAN_WRITE8
71 : BUILT_IN_TSAN_READ8
;
73 fcode
= is_write
? BUILT_IN_TSAN_WRITE16
74 : BUILT_IN_TSAN_READ16
;
76 return builtin_decl_implicit (fcode
);
79 /* Check as to whether EXPR refers to a store to vptr. */
82 is_vptr_store (gimple stmt
, tree expr
, bool is_write
)
85 && gimple_assign_single_p (stmt
)
86 && TREE_CODE (expr
) == COMPONENT_REF
)
88 tree field
= TREE_OPERAND (expr
, 1);
89 if (TREE_CODE (field
) == FIELD_DECL
90 && DECL_VIRTUAL_P (field
))
91 return gimple_assign_rhs1 (stmt
);
96 /* Instruments EXPR if needed. If any instrumentation is inserted,
100 instrument_expr (gimple_stmt_iterator gsi
, tree expr
, bool is_write
)
102 tree base
, rhs
, expr_ptr
, builtin_decl
;
109 size
= int_size_in_bytes (TREE_TYPE (expr
));
113 /* For now just avoid instrumenting bit field acceses.
114 TODO: handle bit-fields as if touching the whole field. */
115 HOST_WIDE_INT bitsize
, bitpos
;
117 enum machine_mode mode
;
118 int volatilep
= 0, unsignedp
= 0;
119 base
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
120 &mode
, &unsignedp
, &volatilep
, false);
122 /* No need to instrument accesses to decls that don't escape,
123 they can't escape to other threads then. */
126 struct pt_solution pt
;
127 memset (&pt
, 0, sizeof (pt
));
129 pt
.ipa_escaped
= flag_ipa_pta
!= 0;
131 if (!pt_solution_includes (&pt
, base
))
133 if (!is_global_var (base
) && !may_be_aliased (base
))
137 if (TREE_READONLY (base
)
138 || (TREE_CODE (base
) == VAR_DECL
139 && DECL_HARD_REGISTER (base
)))
143 || bitpos
% (size
* BITS_PER_UNIT
)
144 || bitsize
!= size
* BITS_PER_UNIT
)
147 stmt
= gsi_stmt (gsi
);
148 loc
= gimple_location (stmt
);
149 rhs
= is_vptr_store (stmt
, expr
, is_write
);
150 gcc_checking_assert (rhs
!= NULL
|| is_gimple_addressable (expr
));
151 expr_ptr
= build_fold_addr_expr (unshare_expr (expr
));
153 if (!is_gimple_val (expr_ptr
))
155 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expr_ptr
), NULL
),
157 expr_ptr
= gimple_assign_lhs (g
);
158 gimple_set_location (g
, loc
);
159 gimple_seq_add_stmt_without_update (&seq
, g
);
162 g
= gimple_build_call (get_memory_access_decl (is_write
, size
),
166 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE
);
167 g
= gimple_build_call (builtin_decl
, 1, expr_ptr
);
169 gimple_set_location (g
, loc
);
170 gimple_seq_add_stmt_without_update (&seq
, g
);
171 /* Instrumentation for assignment of a function result
172 must be inserted after the call. Instrumentation for
173 reads of function arguments must be inserted before the call.
174 That's because the call can contain synchronization. */
175 if (is_gimple_call (stmt
) && is_write
)
177 /* If the call can throw, it must be the last stmt in
178 a basic block, so the instrumented stmts need to be
179 inserted in successor bbs. */
180 if (is_ctrl_altering_stmt (stmt
))
185 e
= find_fallthru_edge (bb
->succs
);
187 gsi_insert_seq_on_edge_immediate (e
, seq
);
190 gsi_insert_seq_after (&gsi
, seq
, GSI_NEW_STMT
);
193 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
198 /* Actions for sync/atomic builtin transformations. */
199 enum tsan_atomic_action
201 check_last
, add_seq_cst
, add_acquire
, weak_cas
, strong_cas
,
202 bool_cas
, val_cas
, lock_release
, fetch_op
, fetch_op_seq_cst
205 /* Table how to map sync/atomic builtins to their corresponding
207 static const struct tsan_map_atomic
209 enum built_in_function fcode
, tsan_fcode
;
210 enum tsan_atomic_action action
;
212 } tsan_atomic_table
[] =
214 #define TRANSFORM(fcode, tsan_fcode, action, code) \
215 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
216 #define CHECK_LAST(fcode, tsan_fcode) \
217 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
218 #define ADD_SEQ_CST(fcode, tsan_fcode) \
219 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
220 #define ADD_ACQUIRE(fcode, tsan_fcode) \
221 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
222 #define WEAK_CAS(fcode, tsan_fcode) \
223 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
224 #define STRONG_CAS(fcode, tsan_fcode) \
225 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
226 #define BOOL_CAS(fcode, tsan_fcode) \
227 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
228 #define VAL_CAS(fcode, tsan_fcode) \
229 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
230 #define LOCK_RELEASE(fcode, tsan_fcode) \
231 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
232 #define FETCH_OP(fcode, tsan_fcode, code) \
233 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
234 #define FETCH_OPS(fcode, tsan_fcode, code) \
235 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
237 CHECK_LAST (ATOMIC_LOAD_1
, TSAN_ATOMIC8_LOAD
),
238 CHECK_LAST (ATOMIC_LOAD_2
, TSAN_ATOMIC16_LOAD
),
239 CHECK_LAST (ATOMIC_LOAD_4
, TSAN_ATOMIC32_LOAD
),
240 CHECK_LAST (ATOMIC_LOAD_8
, TSAN_ATOMIC64_LOAD
),
241 CHECK_LAST (ATOMIC_LOAD_16
, TSAN_ATOMIC128_LOAD
),
242 CHECK_LAST (ATOMIC_STORE_1
, TSAN_ATOMIC8_STORE
),
243 CHECK_LAST (ATOMIC_STORE_2
, TSAN_ATOMIC16_STORE
),
244 CHECK_LAST (ATOMIC_STORE_4
, TSAN_ATOMIC32_STORE
),
245 CHECK_LAST (ATOMIC_STORE_8
, TSAN_ATOMIC64_STORE
),
246 CHECK_LAST (ATOMIC_STORE_16
, TSAN_ATOMIC128_STORE
),
247 CHECK_LAST (ATOMIC_EXCHANGE_1
, TSAN_ATOMIC8_EXCHANGE
),
248 CHECK_LAST (ATOMIC_EXCHANGE_2
, TSAN_ATOMIC16_EXCHANGE
),
249 CHECK_LAST (ATOMIC_EXCHANGE_4
, TSAN_ATOMIC32_EXCHANGE
),
250 CHECK_LAST (ATOMIC_EXCHANGE_8
, TSAN_ATOMIC64_EXCHANGE
),
251 CHECK_LAST (ATOMIC_EXCHANGE_16
, TSAN_ATOMIC128_EXCHANGE
),
252 CHECK_LAST (ATOMIC_FETCH_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
253 CHECK_LAST (ATOMIC_FETCH_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
254 CHECK_LAST (ATOMIC_FETCH_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
255 CHECK_LAST (ATOMIC_FETCH_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
256 CHECK_LAST (ATOMIC_FETCH_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
257 CHECK_LAST (ATOMIC_FETCH_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
258 CHECK_LAST (ATOMIC_FETCH_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
259 CHECK_LAST (ATOMIC_FETCH_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
260 CHECK_LAST (ATOMIC_FETCH_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
261 CHECK_LAST (ATOMIC_FETCH_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
262 CHECK_LAST (ATOMIC_FETCH_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
263 CHECK_LAST (ATOMIC_FETCH_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
264 CHECK_LAST (ATOMIC_FETCH_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
265 CHECK_LAST (ATOMIC_FETCH_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
266 CHECK_LAST (ATOMIC_FETCH_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
267 CHECK_LAST (ATOMIC_FETCH_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
268 CHECK_LAST (ATOMIC_FETCH_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
269 CHECK_LAST (ATOMIC_FETCH_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
270 CHECK_LAST (ATOMIC_FETCH_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
271 CHECK_LAST (ATOMIC_FETCH_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
272 CHECK_LAST (ATOMIC_FETCH_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
273 CHECK_LAST (ATOMIC_FETCH_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
274 CHECK_LAST (ATOMIC_FETCH_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
275 CHECK_LAST (ATOMIC_FETCH_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
276 CHECK_LAST (ATOMIC_FETCH_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
277 CHECK_LAST (ATOMIC_FETCH_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
278 CHECK_LAST (ATOMIC_FETCH_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
279 CHECK_LAST (ATOMIC_FETCH_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
280 CHECK_LAST (ATOMIC_FETCH_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
281 CHECK_LAST (ATOMIC_FETCH_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
283 CHECK_LAST (ATOMIC_THREAD_FENCE
, TSAN_ATOMIC_THREAD_FENCE
),
284 CHECK_LAST (ATOMIC_SIGNAL_FENCE
, TSAN_ATOMIC_SIGNAL_FENCE
),
286 FETCH_OP (ATOMIC_ADD_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
287 FETCH_OP (ATOMIC_ADD_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
288 FETCH_OP (ATOMIC_ADD_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
289 FETCH_OP (ATOMIC_ADD_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
290 FETCH_OP (ATOMIC_ADD_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
291 FETCH_OP (ATOMIC_SUB_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
292 FETCH_OP (ATOMIC_SUB_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
293 FETCH_OP (ATOMIC_SUB_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
294 FETCH_OP (ATOMIC_SUB_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
295 FETCH_OP (ATOMIC_SUB_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
296 FETCH_OP (ATOMIC_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
297 FETCH_OP (ATOMIC_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
298 FETCH_OP (ATOMIC_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
299 FETCH_OP (ATOMIC_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
300 FETCH_OP (ATOMIC_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
301 FETCH_OP (ATOMIC_OR_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
302 FETCH_OP (ATOMIC_OR_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
303 FETCH_OP (ATOMIC_OR_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
304 FETCH_OP (ATOMIC_OR_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
305 FETCH_OP (ATOMIC_OR_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
306 FETCH_OP (ATOMIC_XOR_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
307 FETCH_OP (ATOMIC_XOR_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
308 FETCH_OP (ATOMIC_XOR_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
309 FETCH_OP (ATOMIC_XOR_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
310 FETCH_OP (ATOMIC_XOR_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
311 FETCH_OP (ATOMIC_NAND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
312 FETCH_OP (ATOMIC_NAND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
313 FETCH_OP (ATOMIC_NAND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
314 FETCH_OP (ATOMIC_NAND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
315 FETCH_OP (ATOMIC_NAND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
317 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1
, TSAN_ATOMIC8_EXCHANGE
),
318 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2
, TSAN_ATOMIC16_EXCHANGE
),
319 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4
, TSAN_ATOMIC32_EXCHANGE
),
320 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8
, TSAN_ATOMIC64_EXCHANGE
),
321 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16
, TSAN_ATOMIC128_EXCHANGE
),
323 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1
, TSAN_ATOMIC8_FETCH_ADD
),
324 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2
, TSAN_ATOMIC16_FETCH_ADD
),
325 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4
, TSAN_ATOMIC32_FETCH_ADD
),
326 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8
, TSAN_ATOMIC64_FETCH_ADD
),
327 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16
, TSAN_ATOMIC128_FETCH_ADD
),
328 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1
, TSAN_ATOMIC8_FETCH_SUB
),
329 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2
, TSAN_ATOMIC16_FETCH_SUB
),
330 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4
, TSAN_ATOMIC32_FETCH_SUB
),
331 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8
, TSAN_ATOMIC64_FETCH_SUB
),
332 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16
, TSAN_ATOMIC128_FETCH_SUB
),
333 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1
, TSAN_ATOMIC8_FETCH_AND
),
334 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2
, TSAN_ATOMIC16_FETCH_AND
),
335 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4
, TSAN_ATOMIC32_FETCH_AND
),
336 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8
, TSAN_ATOMIC64_FETCH_AND
),
337 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16
, TSAN_ATOMIC128_FETCH_AND
),
338 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1
, TSAN_ATOMIC8_FETCH_OR
),
339 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2
, TSAN_ATOMIC16_FETCH_OR
),
340 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4
, TSAN_ATOMIC32_FETCH_OR
),
341 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8
, TSAN_ATOMIC64_FETCH_OR
),
342 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16
, TSAN_ATOMIC128_FETCH_OR
),
343 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1
, TSAN_ATOMIC8_FETCH_XOR
),
344 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2
, TSAN_ATOMIC16_FETCH_XOR
),
345 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4
, TSAN_ATOMIC32_FETCH_XOR
),
346 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8
, TSAN_ATOMIC64_FETCH_XOR
),
347 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16
, TSAN_ATOMIC128_FETCH_XOR
),
348 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1
, TSAN_ATOMIC8_FETCH_NAND
),
349 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2
, TSAN_ATOMIC16_FETCH_NAND
),
350 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4
, TSAN_ATOMIC32_FETCH_NAND
),
351 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8
, TSAN_ATOMIC64_FETCH_NAND
),
352 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16
, TSAN_ATOMIC128_FETCH_NAND
),
354 ADD_SEQ_CST (SYNC_SYNCHRONIZE
, TSAN_ATOMIC_THREAD_FENCE
),
356 FETCH_OPS (SYNC_ADD_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_ADD
, PLUS_EXPR
),
357 FETCH_OPS (SYNC_ADD_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_ADD
, PLUS_EXPR
),
358 FETCH_OPS (SYNC_ADD_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_ADD
, PLUS_EXPR
),
359 FETCH_OPS (SYNC_ADD_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_ADD
, PLUS_EXPR
),
360 FETCH_OPS (SYNC_ADD_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_ADD
, PLUS_EXPR
),
361 FETCH_OPS (SYNC_SUB_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_SUB
, MINUS_EXPR
),
362 FETCH_OPS (SYNC_SUB_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_SUB
, MINUS_EXPR
),
363 FETCH_OPS (SYNC_SUB_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_SUB
, MINUS_EXPR
),
364 FETCH_OPS (SYNC_SUB_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_SUB
, MINUS_EXPR
),
365 FETCH_OPS (SYNC_SUB_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_SUB
, MINUS_EXPR
),
366 FETCH_OPS (SYNC_AND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_AND
, BIT_AND_EXPR
),
367 FETCH_OPS (SYNC_AND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_AND
, BIT_AND_EXPR
),
368 FETCH_OPS (SYNC_AND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_AND
, BIT_AND_EXPR
),
369 FETCH_OPS (SYNC_AND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_AND
, BIT_AND_EXPR
),
370 FETCH_OPS (SYNC_AND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_AND
, BIT_AND_EXPR
),
371 FETCH_OPS (SYNC_OR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_OR
, BIT_IOR_EXPR
),
372 FETCH_OPS (SYNC_OR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_OR
, BIT_IOR_EXPR
),
373 FETCH_OPS (SYNC_OR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_OR
, BIT_IOR_EXPR
),
374 FETCH_OPS (SYNC_OR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_OR
, BIT_IOR_EXPR
),
375 FETCH_OPS (SYNC_OR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_OR
, BIT_IOR_EXPR
),
376 FETCH_OPS (SYNC_XOR_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_XOR
, BIT_XOR_EXPR
),
377 FETCH_OPS (SYNC_XOR_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_XOR
, BIT_XOR_EXPR
),
378 FETCH_OPS (SYNC_XOR_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_XOR
, BIT_XOR_EXPR
),
379 FETCH_OPS (SYNC_XOR_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_XOR
, BIT_XOR_EXPR
),
380 FETCH_OPS (SYNC_XOR_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_XOR
, BIT_XOR_EXPR
),
381 FETCH_OPS (SYNC_NAND_AND_FETCH_1
, TSAN_ATOMIC8_FETCH_NAND
, BIT_NOT_EXPR
),
382 FETCH_OPS (SYNC_NAND_AND_FETCH_2
, TSAN_ATOMIC16_FETCH_NAND
, BIT_NOT_EXPR
),
383 FETCH_OPS (SYNC_NAND_AND_FETCH_4
, TSAN_ATOMIC32_FETCH_NAND
, BIT_NOT_EXPR
),
384 FETCH_OPS (SYNC_NAND_AND_FETCH_8
, TSAN_ATOMIC64_FETCH_NAND
, BIT_NOT_EXPR
),
385 FETCH_OPS (SYNC_NAND_AND_FETCH_16
, TSAN_ATOMIC128_FETCH_NAND
, BIT_NOT_EXPR
),
387 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK
),
388 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK
),
389 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK
),
390 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK
),
391 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16
, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK
),
393 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
394 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2
,
395 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
396 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4
,
397 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
398 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8
,
399 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
400 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16
,
401 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
403 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1
,
404 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
405 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2
,
406 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
407 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4
,
408 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
409 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8
,
410 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
411 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16
,
412 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
414 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1
, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG
),
415 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2
, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG
),
416 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4
, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG
),
417 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8
, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG
),
418 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16
,
419 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG
),
421 LOCK_RELEASE (SYNC_LOCK_RELEASE_1
, TSAN_ATOMIC8_STORE
),
422 LOCK_RELEASE (SYNC_LOCK_RELEASE_2
, TSAN_ATOMIC16_STORE
),
423 LOCK_RELEASE (SYNC_LOCK_RELEASE_4
, TSAN_ATOMIC32_STORE
),
424 LOCK_RELEASE (SYNC_LOCK_RELEASE_8
, TSAN_ATOMIC64_STORE
),
425 LOCK_RELEASE (SYNC_LOCK_RELEASE_16
, TSAN_ATOMIC128_STORE
)
428 /* Instrument an atomic builtin. */
431 instrument_builtin_call (gimple_stmt_iterator
*gsi
)
433 gimple stmt
= gsi_stmt (*gsi
), g
;
434 tree callee
= gimple_call_fndecl (stmt
), last_arg
, args
[6], t
, lhs
;
435 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
436 unsigned int i
, num
= gimple_call_num_args (stmt
), j
;
437 for (j
= 0; j
< 6 && j
< num
; j
++)
438 args
[j
] = gimple_call_arg (stmt
, j
);
439 for (i
= 0; i
< ARRAY_SIZE (tsan_atomic_table
); i
++)
440 if (fcode
!= tsan_atomic_table
[i
].fcode
)
444 tree decl
= builtin_decl_implicit (tsan_atomic_table
[i
].tsan_fcode
);
445 if (decl
== NULL_TREE
)
447 switch (tsan_atomic_table
[i
].action
)
451 last_arg
= gimple_call_arg (stmt
, num
- 1);
452 if (!tree_fits_uhwi_p (last_arg
)
453 || tree_to_uhwi (last_arg
) > MEMMODEL_SEQ_CST
)
455 gimple_call_set_fndecl (stmt
, decl
);
457 if (tsan_atomic_table
[i
].action
== fetch_op
)
459 args
[1] = gimple_call_arg (stmt
, 1);
465 case fetch_op_seq_cst
:
466 gcc_assert (num
<= 2);
467 for (j
= 0; j
< num
; j
++)
468 args
[j
] = gimple_call_arg (stmt
, j
);
471 args
[num
] = build_int_cst (NULL_TREE
,
472 tsan_atomic_table
[i
].action
476 update_gimple_call (gsi
, decl
, num
+ 1, args
[0], args
[1], args
[2]);
477 stmt
= gsi_stmt (*gsi
);
478 if (tsan_atomic_table
[i
].action
== fetch_op_seq_cst
)
481 lhs
= gimple_call_lhs (stmt
);
482 if (lhs
== NULL_TREE
)
484 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
485 TREE_TYPE (args
[1])))
487 tree var
= make_ssa_name (TREE_TYPE (lhs
), NULL
);
488 g
= gimple_build_assign_with_ops (NOP_EXPR
, var
,
490 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
493 gimple_call_set_lhs (stmt
,
494 make_ssa_name (TREE_TYPE (lhs
), NULL
));
495 /* BIT_NOT_EXPR stands for NAND. */
496 if (tsan_atomic_table
[i
].code
== BIT_NOT_EXPR
)
498 tree var
= make_ssa_name (TREE_TYPE (lhs
), NULL
);
499 g
= gimple_build_assign_with_ops (BIT_AND_EXPR
, var
,
500 gimple_call_lhs (stmt
),
502 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
503 g
= gimple_build_assign_with_ops (BIT_NOT_EXPR
, lhs
, var
,
507 g
= gimple_build_assign_with_ops (tsan_atomic_table
[i
].code
,
509 gimple_call_lhs (stmt
),
512 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
516 if (!integer_nonzerop (gimple_call_arg (stmt
, 3)))
520 gcc_assert (num
== 6);
521 for (j
= 0; j
< 6; j
++)
522 args
[j
] = gimple_call_arg (stmt
, j
);
523 if (!tree_fits_uhwi_p (args
[4])
524 || tree_to_uhwi (args
[4]) > MEMMODEL_SEQ_CST
)
526 if (!tree_fits_uhwi_p (args
[5])
527 || tree_to_uhwi (args
[5]) > MEMMODEL_SEQ_CST
)
529 update_gimple_call (gsi
, decl
, 5, args
[0], args
[1], args
[2],
534 gcc_assert (num
== 3);
535 for (j
= 0; j
< 3; j
++)
536 args
[j
] = gimple_call_arg (stmt
, j
);
537 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
538 t
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t
)));
539 t
= create_tmp_var (t
, NULL
);
540 mark_addressable (t
);
541 if (!useless_type_conversion_p (TREE_TYPE (t
),
542 TREE_TYPE (args
[1])))
544 g
= gimple_build_assign_with_ops (NOP_EXPR
,
545 make_ssa_name (TREE_TYPE (t
),
548 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
549 args
[1] = gimple_assign_lhs (g
);
551 g
= gimple_build_assign (t
, args
[1]);
552 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
553 lhs
= gimple_call_lhs (stmt
);
554 update_gimple_call (gsi
, decl
, 5, args
[0],
555 build_fold_addr_expr (t
), args
[2],
556 build_int_cst (NULL_TREE
,
558 build_int_cst (NULL_TREE
,
560 if (tsan_atomic_table
[i
].action
== val_cas
&& lhs
)
563 stmt
= gsi_stmt (*gsi
);
564 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
), NULL
),
566 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
567 t
= make_ssa_name (TREE_TYPE (TREE_TYPE (decl
)), stmt
);
568 cond
= build2 (NE_EXPR
, boolean_type_node
, t
,
569 build_int_cst (TREE_TYPE (t
), 0));
570 g
= gimple_build_assign_with_ops (COND_EXPR
, lhs
, cond
,
572 gimple_assign_lhs (g
));
573 gimple_call_set_lhs (stmt
, t
);
575 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
579 gcc_assert (num
== 1);
580 t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
581 t
= TREE_VALUE (TREE_CHAIN (t
));
582 update_gimple_call (gsi
, decl
, 3, gimple_call_arg (stmt
, 0),
583 build_int_cst (t
, 0),
584 build_int_cst (NULL_TREE
,
593 /* Instruments the gimple pointed to by GSI. Return
594 true if func entry/exit should be instrumented. */
597 instrument_gimple (gimple_stmt_iterator
*gsi
)
601 bool instrumented
= false;
603 stmt
= gsi_stmt (*gsi
);
604 if (is_gimple_call (stmt
)
605 && (gimple_call_fndecl (stmt
)
606 != builtin_decl_implicit (BUILT_IN_TSAN_INIT
)))
608 if (is_gimple_builtin_call (stmt
))
609 instrument_builtin_call (gsi
);
612 else if (is_gimple_assign (stmt
)
613 && !gimple_clobber_p (stmt
))
615 if (gimple_store_p (stmt
))
617 lhs
= gimple_assign_lhs (stmt
);
618 instrumented
= instrument_expr (*gsi
, lhs
, true);
620 if (gimple_assign_load_p (stmt
))
622 rhs
= gimple_assign_rhs1 (stmt
);
623 instrumented
= instrument_expr (*gsi
, rhs
, false);
629 /* Instruments all interesting memory accesses in the current function.
630 Return true if func entry/exit should be instrumented. */
633 instrument_memory_accesses (void)
636 gimple_stmt_iterator gsi
;
637 bool fentry_exit_instrument
= false;
640 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
641 fentry_exit_instrument
|= instrument_gimple (&gsi
);
642 return fentry_exit_instrument
;
645 /* Instruments function entry. */
648 instrument_func_entry (void)
651 gimple_stmt_iterator gsi
;
652 tree ret_addr
, builtin_decl
;
655 succ_bb
= single_succ (ENTRY_BLOCK_PTR
);
656 gsi
= gsi_after_labels (succ_bb
);
658 builtin_decl
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
659 g
= gimple_build_call (builtin_decl
, 1, integer_zero_node
);
660 ret_addr
= make_ssa_name (ptr_type_node
, NULL
);
661 gimple_call_set_lhs (g
, ret_addr
);
662 gimple_set_location (g
, cfun
->function_start_locus
);
663 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
665 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY
);
666 g
= gimple_build_call (builtin_decl
, 1, ret_addr
);
667 gimple_set_location (g
, cfun
->function_start_locus
);
668 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
671 /* Instruments function exits. */
674 instrument_func_exit (void)
678 gimple_stmt_iterator gsi
;
684 /* Find all function exits. */
685 exit_bb
= EXIT_BLOCK_PTR
;
686 FOR_EACH_EDGE (e
, ei
, exit_bb
->preds
)
688 gsi
= gsi_last_bb (e
->src
);
689 stmt
= gsi_stmt (gsi
);
690 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
691 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
));
692 loc
= gimple_location (stmt
);
693 builtin_decl
= builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT
);
694 g
= gimple_build_call (builtin_decl
, 0);
695 gimple_set_location (g
, loc
);
696 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
700 /* ThreadSanitizer instrumentation pass. */
705 initialize_sanitizer_builtins ();
706 if (instrument_memory_accesses ())
708 instrument_func_entry ();
709 instrument_func_exit ();
714 /* The pass's gate. */
719 return (flag_sanitize
& SANITIZE_THREAD
) != 0;
722 /* Inserts __tsan_init () into the list of CTORs. */
725 tsan_finish_file (void)
727 tree ctor_statements
= NULL_TREE
;
729 initialize_sanitizer_builtins ();
730 tree init_decl
= builtin_decl_implicit (BUILT_IN_TSAN_INIT
);
731 append_to_statement_list (build_call_expr (init_decl
, 0),
733 cgraph_build_static_cdtor ('I', ctor_statements
,
734 MAX_RESERVED_INIT_PRIORITY
- 1);
737 /* The pass descriptor. */
741 const pass_data pass_data_tsan
=
743 GIMPLE_PASS
, /* type */
745 OPTGROUP_NONE
, /* optinfo_flags */
747 true, /* has_execute */
749 ( PROP_ssa
| PROP_cfg
), /* properties_required */
750 0, /* properties_provided */
751 0, /* properties_destroyed */
752 0, /* todo_flags_start */
753 ( TODO_verify_all
| TODO_update_ssa
), /* todo_flags_finish */
756 class pass_tsan
: public gimple_opt_pass
759 pass_tsan (gcc::context
*ctxt
)
760 : gimple_opt_pass (pass_data_tsan
, ctxt
)
763 /* opt_pass methods: */
764 opt_pass
* clone () { return new pass_tsan (m_ctxt
); }
765 bool gate () { return tsan_gate (); }
766 unsigned int execute () { return tsan_pass (); }
768 }; // class pass_tsan
773 make_pass_tsan (gcc::context
*ctxt
)
775 return new pass_tsan (ctxt
);
781 return (flag_sanitize
& SANITIZE_THREAD
) != 0 && !optimize
;
786 const pass_data pass_data_tsan_O0
=
788 GIMPLE_PASS
, /* type */
790 OPTGROUP_NONE
, /* optinfo_flags */
792 true, /* has_execute */
794 ( PROP_ssa
| PROP_cfg
), /* properties_required */
795 0, /* properties_provided */
796 0, /* properties_destroyed */
797 0, /* todo_flags_start */
798 ( TODO_verify_all
| TODO_update_ssa
), /* todo_flags_finish */
801 class pass_tsan_O0
: public gimple_opt_pass
804 pass_tsan_O0 (gcc::context
*ctxt
)
805 : gimple_opt_pass (pass_data_tsan_O0
, ctxt
)
808 /* opt_pass methods: */
809 bool gate () { return tsan_gate_O0 (); }
810 unsigned int execute () { return tsan_pass (); }
812 }; // class pass_tsan_O0
817 make_pass_tsan_O0 (gcc::context
*ctxt
)
819 return new pass_tsan_O0 (ctxt
);