arm.md (split for eq(reg, 0)): Add variants for ARMv5 and Thumb2.
[official-gcc.git] / gcc / tsan.c
blobd218eed5a1b1affd2bbdfffb4a184c2af488d5da
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "intl.h"
27 #include "tm.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-iterator.h"
34 #include "langhooks.h"
35 #include "output.h"
36 #include "options.h"
37 #include "target.h"
38 #include "cgraph.h"
39 #include "diagnostic.h"
40 #include "tree-ssa-propagate.h"
41 #include "tsan.h"
42 #include "asan.h"
44 /* Number of instrumented memory accesses in the current function. */
46 /* Builds the following decl
47 void __tsan_read/writeX (void *addr); */
49 static tree
50 get_memory_access_decl (bool is_write, unsigned size)
52 enum built_in_function fcode;
54 if (size <= 1)
55 fcode = is_write ? BUILT_IN_TSAN_WRITE1
56 : BUILT_IN_TSAN_READ1;
57 else if (size <= 3)
58 fcode = is_write ? BUILT_IN_TSAN_WRITE2
59 : BUILT_IN_TSAN_READ2;
60 else if (size <= 7)
61 fcode = is_write ? BUILT_IN_TSAN_WRITE4
62 : BUILT_IN_TSAN_READ4;
63 else if (size <= 15)
64 fcode = is_write ? BUILT_IN_TSAN_WRITE8
65 : BUILT_IN_TSAN_READ8;
66 else
67 fcode = is_write ? BUILT_IN_TSAN_WRITE16
68 : BUILT_IN_TSAN_READ16;
70 return builtin_decl_implicit (fcode);
73 /* Check as to whether EXPR refers to a store to vptr. */
75 static tree
76 is_vptr_store (gimple stmt, tree expr, bool is_write)
78 if (is_write == true
79 && gimple_assign_single_p (stmt)
80 && TREE_CODE (expr) == COMPONENT_REF)
82 tree field = TREE_OPERAND (expr, 1);
83 if (TREE_CODE (field) == FIELD_DECL
84 && DECL_VIRTUAL_P (field))
85 return gimple_assign_rhs1 (stmt);
87 return NULL;
90 /* Instruments EXPR if needed. If any instrumentation is inserted,
91 return true. */
93 static bool
94 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
96 tree base, rhs, expr_ptr, builtin_decl;
97 basic_block bb;
98 HOST_WIDE_INT size;
99 gimple stmt, g;
100 gimple_seq seq;
101 location_t loc;
103 size = int_size_in_bytes (TREE_TYPE (expr));
104 if (size == -1)
105 return false;
107 /* For now just avoid instrumenting bit field acceses.
108 TODO: handle bit-fields as if touching the whole field. */
109 HOST_WIDE_INT bitsize, bitpos;
110 tree offset;
111 enum machine_mode mode;
112 int volatilep = 0, unsignedp = 0;
113 base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
114 &mode, &unsignedp, &volatilep, false);
116 /* No need to instrument accesses to decls that don't escape,
117 they can't escape to other threads then. */
118 if (DECL_P (base))
120 struct pt_solution pt;
121 memset (&pt, 0, sizeof (pt));
122 pt.escaped = 1;
123 pt.ipa_escaped = flag_ipa_pta != 0;
124 pt.nonlocal = 1;
125 if (!pt_solution_includes (&pt, base))
126 return false;
127 if (!is_global_var (base) && !may_be_aliased (base))
128 return false;
131 if (TREE_READONLY (base)
132 || (TREE_CODE (base) == VAR_DECL
133 && DECL_HARD_REGISTER (base)))
134 return false;
136 if (size == 0
137 || bitpos % (size * BITS_PER_UNIT)
138 || bitsize != size * BITS_PER_UNIT)
139 return false;
141 stmt = gsi_stmt (gsi);
142 loc = gimple_location (stmt);
143 rhs = is_vptr_store (stmt, expr, is_write);
144 gcc_checking_assert (rhs != NULL || is_gimple_addressable (expr));
145 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
146 seq = NULL;
147 if (!is_gimple_val (expr_ptr))
149 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expr_ptr), NULL),
150 expr_ptr);
151 expr_ptr = gimple_assign_lhs (g);
152 gimple_set_location (g, loc);
153 gimple_seq_add_stmt_without_update (&seq, g);
155 if (rhs == NULL)
156 g = gimple_build_call (get_memory_access_decl (is_write, size),
157 1, expr_ptr);
158 else
160 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
161 g = gimple_build_call (builtin_decl, 1, expr_ptr);
163 gimple_set_location (g, loc);
164 gimple_seq_add_stmt_without_update (&seq, g);
165 /* Instrumentation for assignment of a function result
166 must be inserted after the call. Instrumentation for
167 reads of function arguments must be inserted before the call.
168 That's because the call can contain synchronization. */
169 if (is_gimple_call (stmt) && is_write)
171 /* If the call can throw, it must be the last stmt in
172 a basic block, so the instrumented stmts need to be
173 inserted in successor bbs. */
174 if (is_ctrl_altering_stmt (stmt))
176 edge e;
178 bb = gsi_bb (gsi);
179 e = find_fallthru_edge (bb->succs);
180 if (e)
181 gsi_insert_seq_on_edge_immediate (e, seq);
183 else
184 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
186 else
187 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
189 return true;
192 /* Actions for sync/atomic builtin transformations. */
193 enum tsan_atomic_action
195 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
196 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
199 /* Table how to map sync/atomic builtins to their corresponding
200 tsan equivalents. */
201 static const struct tsan_map_atomic
203 enum built_in_function fcode, tsan_fcode;
204 enum tsan_atomic_action action;
205 enum tree_code code;
206 } tsan_atomic_table[] =
208 #define TRANSFORM(fcode, tsan_fcode, action, code) \
209 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
210 #define CHECK_LAST(fcode, tsan_fcode) \
211 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
212 #define ADD_SEQ_CST(fcode, tsan_fcode) \
213 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
214 #define ADD_ACQUIRE(fcode, tsan_fcode) \
215 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
216 #define WEAK_CAS(fcode, tsan_fcode) \
217 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
218 #define STRONG_CAS(fcode, tsan_fcode) \
219 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
220 #define BOOL_CAS(fcode, tsan_fcode) \
221 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
222 #define VAL_CAS(fcode, tsan_fcode) \
223 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
224 #define LOCK_RELEASE(fcode, tsan_fcode) \
225 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
226 #define FETCH_OP(fcode, tsan_fcode, code) \
227 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
228 #define FETCH_OPS(fcode, tsan_fcode, code) \
229 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
231 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
232 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
233 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
234 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
235 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
236 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
237 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
238 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
239 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
240 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
241 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
242 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
243 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
244 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
245 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
246 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
247 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
248 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
249 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
250 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
251 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
252 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
253 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
254 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
255 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
256 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
257 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
258 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
259 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
260 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
261 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
262 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
263 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
264 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
265 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
266 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
267 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
268 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
269 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
270 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
271 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
272 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
273 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
274 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
275 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
277 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
278 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
280 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
281 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
282 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
283 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
284 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
285 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
286 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
287 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
288 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
289 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
290 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
291 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
292 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
293 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
294 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
295 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
296 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
297 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
298 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
299 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
300 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
301 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
302 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
303 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
304 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
305 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
306 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
307 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
308 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
309 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
311 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
312 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
313 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
314 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
315 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
317 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
318 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
319 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
320 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
321 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
322 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
323 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
324 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
325 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
326 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
327 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
328 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
329 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
330 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
331 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
332 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
333 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
334 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
335 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
336 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
337 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
338 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
339 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
340 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
341 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
342 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
343 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
344 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
345 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
346 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
348 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
350 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
351 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
352 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
353 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
354 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
355 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
356 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
357 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
358 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
359 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
360 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
361 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
362 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
363 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
364 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
365 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
366 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
367 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
368 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
369 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
370 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
371 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
372 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
373 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
374 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
375 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
376 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
377 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
378 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
379 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
381 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
382 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
383 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
384 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
385 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
387 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
388 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
389 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
390 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
391 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
392 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
393 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
394 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
395 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
397 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
398 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
399 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
400 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
401 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
402 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
403 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
404 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
405 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
406 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
408 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
409 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
410 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
411 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
412 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
413 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
415 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
416 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
417 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
418 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
419 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
422 /* Instrument an atomic builtin. */
424 static void
425 instrument_builtin_call (gimple_stmt_iterator *gsi)
427 gimple stmt = gsi_stmt (*gsi), g;
428 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
430 unsigned int i, num = gimple_call_num_args (stmt), j;
431 for (j = 0; j < 6 && j < num; j++)
432 args[j] = gimple_call_arg (stmt, j);
433 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
434 if (fcode != tsan_atomic_table[i].fcode)
435 continue;
436 else
438 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
439 if (decl == NULL_TREE)
440 return;
441 switch (tsan_atomic_table[i].action)
443 case check_last:
444 case fetch_op:
445 last_arg = gimple_call_arg (stmt, num - 1);
446 if (!host_integerp (last_arg, 1)
447 || (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1)
448 > MEMMODEL_SEQ_CST)
449 return;
450 gimple_call_set_fndecl (stmt, decl);
451 update_stmt (stmt);
452 if (tsan_atomic_table[i].action == fetch_op)
454 args[1] = gimple_call_arg (stmt, 1);
455 goto adjust_result;
457 return;
458 case add_seq_cst:
459 case add_acquire:
460 case fetch_op_seq_cst:
461 gcc_assert (num <= 2);
462 for (j = 0; j < num; j++)
463 args[j] = gimple_call_arg (stmt, j);
464 for (; j < 2; j++)
465 args[j] = NULL_TREE;
466 args[num] = build_int_cst (NULL_TREE,
467 tsan_atomic_table[i].action
468 != add_acquire
469 ? MEMMODEL_SEQ_CST
470 : MEMMODEL_ACQUIRE);
471 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
472 stmt = gsi_stmt (*gsi);
473 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
475 adjust_result:
476 lhs = gimple_call_lhs (stmt);
477 if (lhs == NULL_TREE)
478 return;
479 if (!useless_type_conversion_p (TREE_TYPE (lhs),
480 TREE_TYPE (args[1])))
482 tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
483 g = gimple_build_assign_with_ops (NOP_EXPR, var,
484 args[1], NULL_TREE);
485 gsi_insert_after (gsi, g, GSI_NEW_STMT);
486 args[1] = var;
488 gimple_call_set_lhs (stmt,
489 make_ssa_name (TREE_TYPE (lhs), NULL));
490 /* BIT_NOT_EXPR stands for NAND. */
491 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
493 tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
494 g = gimple_build_assign_with_ops (BIT_AND_EXPR, var,
495 gimple_call_lhs (stmt),
496 args[1]);
497 gsi_insert_after (gsi, g, GSI_NEW_STMT);
498 g = gimple_build_assign_with_ops (BIT_NOT_EXPR, lhs, var,
499 NULL_TREE);
501 else
502 g = gimple_build_assign_with_ops (tsan_atomic_table[i].code,
503 lhs,
504 gimple_call_lhs (stmt),
505 args[1]);
506 update_stmt (stmt);
507 gsi_insert_after (gsi, g, GSI_NEW_STMT);
509 return;
510 case weak_cas:
511 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
512 continue;
513 /* FALLTHRU */
514 case strong_cas:
515 gcc_assert (num == 6);
516 for (j = 0; j < 6; j++)
517 args[j] = gimple_call_arg (stmt, j);
518 if (!host_integerp (args[4], 1)
519 || (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1)
520 > MEMMODEL_SEQ_CST)
521 return;
522 if (!host_integerp (args[5], 1)
523 || (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1)
524 > MEMMODEL_SEQ_CST)
525 return;
526 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
527 args[4], args[5]);
528 return;
529 case bool_cas:
530 case val_cas:
531 gcc_assert (num == 3);
532 for (j = 0; j < 3; j++)
533 args[j] = gimple_call_arg (stmt, j);
534 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
535 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
536 t = create_tmp_var (t, NULL);
537 mark_addressable (t);
538 if (!useless_type_conversion_p (TREE_TYPE (t),
539 TREE_TYPE (args[1])))
541 g = gimple_build_assign_with_ops (NOP_EXPR,
542 make_ssa_name (TREE_TYPE (t),
543 NULL),
544 args[1], NULL_TREE);
545 gsi_insert_before (gsi, g, GSI_SAME_STMT);
546 args[1] = gimple_assign_lhs (g);
548 g = gimple_build_assign (t, args[1]);
549 gsi_insert_before (gsi, g, GSI_SAME_STMT);
550 lhs = gimple_call_lhs (stmt);
551 update_gimple_call (gsi, decl, 5, args[0],
552 build_fold_addr_expr (t), args[2],
553 build_int_cst (NULL_TREE,
554 MEMMODEL_SEQ_CST),
555 build_int_cst (NULL_TREE,
556 MEMMODEL_SEQ_CST));
557 if (tsan_atomic_table[i].action == val_cas && lhs)
559 tree cond;
560 stmt = gsi_stmt (*gsi);
561 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t), NULL),
563 gsi_insert_after (gsi, g, GSI_NEW_STMT);
564 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
565 cond = build2 (NE_EXPR, boolean_type_node, t,
566 build_int_cst (TREE_TYPE (t), 0));
567 g = gimple_build_assign_with_ops (COND_EXPR, lhs, cond,
568 args[1],
569 gimple_assign_lhs (g));
570 gimple_call_set_lhs (stmt, t);
571 update_stmt (stmt);
572 gsi_insert_after (gsi, g, GSI_NEW_STMT);
574 return;
575 case lock_release:
576 gcc_assert (num == 1);
577 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
578 t = TREE_VALUE (TREE_CHAIN (t));
579 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
580 build_int_cst (t, 0),
581 build_int_cst (NULL_TREE,
582 MEMMODEL_RELEASE));
583 return;
584 default:
585 continue;
590 /* Instruments the gimple pointed to by GSI. Return
591 true if func entry/exit should be instrumented. */
593 static bool
594 instrument_gimple (gimple_stmt_iterator *gsi)
596 gimple stmt;
597 tree rhs, lhs;
598 bool instrumented = false;
600 stmt = gsi_stmt (*gsi);
601 if (is_gimple_call (stmt)
602 && (gimple_call_fndecl (stmt)
603 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
605 if (is_gimple_builtin_call (stmt))
606 instrument_builtin_call (gsi);
607 return true;
609 else if (is_gimple_assign (stmt)
610 && !gimple_clobber_p (stmt))
612 if (gimple_store_p (stmt))
614 lhs = gimple_assign_lhs (stmt);
615 instrumented = instrument_expr (*gsi, lhs, true);
617 if (gimple_assign_load_p (stmt))
619 rhs = gimple_assign_rhs1 (stmt);
620 instrumented = instrument_expr (*gsi, rhs, false);
623 return instrumented;
626 /* Instruments all interesting memory accesses in the current function.
627 Return true if func entry/exit should be instrumented. */
629 static bool
630 instrument_memory_accesses (void)
632 basic_block bb;
633 gimple_stmt_iterator gsi;
634 bool fentry_exit_instrument = false;
636 FOR_EACH_BB (bb)
637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
638 fentry_exit_instrument |= instrument_gimple (&gsi);
639 return fentry_exit_instrument;
642 /* Instruments function entry. */
644 static void
645 instrument_func_entry (void)
647 basic_block succ_bb;
648 gimple_stmt_iterator gsi;
649 tree ret_addr, builtin_decl;
650 gimple g;
652 succ_bb = single_succ (ENTRY_BLOCK_PTR);
653 gsi = gsi_after_labels (succ_bb);
655 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
656 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
657 ret_addr = make_ssa_name (ptr_type_node, NULL);
658 gimple_call_set_lhs (g, ret_addr);
659 gimple_set_location (g, cfun->function_start_locus);
660 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
662 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
663 g = gimple_build_call (builtin_decl, 1, ret_addr);
664 gimple_set_location (g, cfun->function_start_locus);
665 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
668 /* Instruments function exits. */
670 static void
671 instrument_func_exit (void)
673 location_t loc;
674 basic_block exit_bb;
675 gimple_stmt_iterator gsi;
676 gimple stmt, g;
677 tree builtin_decl;
678 edge e;
679 edge_iterator ei;
681 /* Find all function exits. */
682 exit_bb = EXIT_BLOCK_PTR;
683 FOR_EACH_EDGE (e, ei, exit_bb->preds)
685 gsi = gsi_last_bb (e->src);
686 stmt = gsi_stmt (gsi);
687 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
688 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
689 loc = gimple_location (stmt);
690 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
691 g = gimple_build_call (builtin_decl, 0);
692 gimple_set_location (g, loc);
693 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
697 /* ThreadSanitizer instrumentation pass. */
699 static unsigned
700 tsan_pass (void)
702 initialize_sanitizer_builtins ();
703 if (instrument_memory_accesses ())
705 instrument_func_entry ();
706 instrument_func_exit ();
708 return 0;
711 /* The pass's gate. */
713 static bool
714 tsan_gate (void)
716 return flag_tsan != 0;
719 /* Inserts __tsan_init () into the list of CTORs. */
721 void
722 tsan_finish_file (void)
724 tree ctor_statements = NULL_TREE;
726 initialize_sanitizer_builtins ();
727 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
728 append_to_statement_list (build_call_expr (init_decl, 0),
729 &ctor_statements);
730 cgraph_build_static_cdtor ('I', ctor_statements,
731 MAX_RESERVED_INIT_PRIORITY - 1);
734 /* The pass descriptor. */
736 struct gimple_opt_pass pass_tsan =
739 GIMPLE_PASS,
740 "tsan", /* name */
741 OPTGROUP_NONE, /* optinfo_flags */
742 tsan_gate, /* gate */
743 tsan_pass, /* execute */
744 NULL, /* sub */
745 NULL, /* next */
746 0, /* static_pass_number */
747 TV_NONE, /* tv_id */
748 PROP_ssa | PROP_cfg, /* properties_required */
749 0, /* properties_provided */
750 0, /* properties_destroyed */
751 0, /* todo_flags_start */
752 TODO_verify_all | TODO_update_ssa /* todo_flags_finish */
756 static bool
757 tsan_gate_O0 (void)
759 return flag_tsan != 0 && !optimize;
762 struct gimple_opt_pass pass_tsan_O0 =
765 GIMPLE_PASS,
766 "tsan0", /* name */
767 OPTGROUP_NONE, /* optinfo_flags */
768 tsan_gate_O0, /* gate */
769 tsan_pass, /* execute */
770 NULL, /* sub */
771 NULL, /* next */
772 0, /* static_pass_number */
773 TV_NONE, /* tv_id */
774 PROP_ssa | PROP_cfg, /* properties_required */
775 0, /* properties_provided */
776 0, /* properties_destroyed */
777 0, /* todo_flags_start */
778 TODO_verify_all | TODO_update_ssa /* todo_flags_finish */