Merge from trunk: 215733-215743
[official-gcc.git] / gcc-4_6_3-mobile / gcc / tree-threadsafe-analyze.c
blobbda4e9ae03f91403cbc29871bc2d04244d476736
1 /* Thread Safety Annotations and Analysis.
2 Copyright (C) 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
3 Contributed by Le-Chun Wu <lcwu@google.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* This file contains an analysis pass that uses the thread safety attributes
23 to identify and warn about potential issues that could result in data
24 races and deadlocks. The thread safety attributes currently support only
25 lock-based synchronization. They help developers document the locks that
26 need to be used to safely read and write shared variables and also the
27 order in which they intend to acquire locks. Here is the list of the
28 attributes that this analysis pass uses:
30 __attribute__ ((lockable))
31 __attribute__ ((scoped_lockable))
32 __attribute__ ((guarded_by(x)))
33 __attribute__ ((guarded))
34 __attribute__ ((point_to_guarded_by(x)))
35 __attribute__ ((point_to_guarded))
36 __attribute__ ((acquired_after(__VA_ARGS__)))
37 __attribute__ ((acquired_before(__VA_ARGS__)))
38 __attribute__ ((exclusive_lock(__VA_ARGS__)))
39 __attribute__ ((shared_lock(__VA_ARGS__)))
40 __attribute__ ((exclusive_trylock(__VA_ARGS__)))
41 __attribute__ ((shared_trylock(__VA_ARGS__)))
42 __attribute__ ((unlock(__VA_ARGS__)))
43 __attribute__ ((exclusive_locks_required(__VA_ARGS__)))
44 __attribute__ ((shared_locks_required(__VA_ARGS__)))
45 __attribute__ ((locks_excluded(__VA_ARGS__)))
46 __attribute__ ((lock_returned(x)))
47 __attribute__ ((no_thread_safety_analysis))
48 __attribute__ ((ignore_reads_begin))
49 __attribute__ ((ignore_reads_end))
50 __attribute__ ((ignore_writes_begin))
51 __attribute__ ((ignore_writes_end))
52 __attribute__ ((unprotected_read))
54 If multi-threaded code is annotated with these attributes, this analysis
55 pass can detect the following potential thread safety issues:
57 * Accesses to shared variables and function calls are not guarded by
58 proper (read or write) locks
59 * Locks are not acquired in the specified order
60 * A cycle in the lock acquisition order
61 * Try to acquire a lock that is already held by the same thread
62 - Useful when locks are non-reentrant
63 * Locks are not acquired and released in the same routine (or in the
64 control-equivalent blocks)
65 - Having critical sections starting and ending in the same routine
66 is a better practice
68 The analysis pass uses a single-pass (or single iteration) data-flow
69 analysis to maintain live lock sets at each program point, using the
70 attributes to decide when to add locks to the live sets and when to
71 remove them from the sets. With the live lock sets and the attributes
72 attached to shared variables and functions, we are able to check whether
73 the variables and functions are well protected. Note that the reason why
74 we don't need iterative data flow analysis is because critical sections
75 across back edges are considered a bad practice. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "tree.h"
82 #include "gimple.h"
83 #include "c-family/c-common.h"
84 #include "toplev.h"
85 #include "input.h"
86 #include "diagnostic.h"
87 #include "intl.h"
88 #include "basic-block.h"
89 #include "timevar.h"
90 #include "tree-flow.h"
91 #include "tree-pass.h"
92 #include "tree-dump.h"
93 #include "langhooks.h"
94 #include "pointer-set.h"
95 #include "tree-pretty-print.h"
96 #include "tree-threadsafe-analyze.h"
97 #include "tree-ssa-propagate.h"
100 /* A per-BB data structure used for topological traversal and data flow
101 analysis. */
102 struct bb_threadsafe_info
104 /* Indicating whether the BB has been visited in the analysis. */
105 bool visited;
107 /* Flags indicating whether we should ignore reads or writes in the
108 analysis. The flags are set and unset during the analysis when seeing
109 function calls annotated with ignore_{reads|writes}_{begin|end}
110 attributes. */
111 bool reads_ignored;
112 bool writes_ignored;
114 /* Number of predecessors visited. Used for topological traversal of BBs. */
115 unsigned int n_preds_visited;
117 /* Live out exclusive/shared lock sets. */
118 struct pointer_set_t *liveout_exclusive_locks;
119 struct pointer_set_t *liveout_shared_locks;
121 /* Locks released by the Release routine of a scoped lock (e.g.
122 std::unique_lock::Release()). When a lock is released by such routines
123 on certain control-flow paths but not all, we consider it weakly
124 released and keep track of it in this set so that later when we encounter
125 the destructor of the scoped lock (which is also an UNLOCK function),
126 we will not emit a bogus warning. */
127 struct pointer_set_t *weak_released_locks;
129 /* Working live lock sets. These sets are used and updated during the
130 analysis of statements. */
131 struct pointer_set_t *live_excl_locks;
132 struct pointer_set_t *live_shared_locks;
134 /* The outgoing edge that a successful trylock call takes. */
135 edge trylock_live_edge;
137 /* Sets of live-out locks acquired by a successful trylock. */
138 struct pointer_set_t *edge_exclusive_locks;
139 struct pointer_set_t *edge_shared_locks;
143 /* This data structure is created when we see a function call that is a
144 trylock. A map entry that maps the trylock call to its associated
145 trylock_info structure is inserted to trylock_info_map (see below).
146 The map (and the trylock_info structure) will later be used when we
147 analyze a block-ending if-statement whose condition expression is
148 fed by a trylock. */
149 struct trylock_info
151 /* The set of locks the trylock call tries to acquire. */
152 struct pointer_set_t *locks;
154 /* Indicating whether the locks acquired by trylock is exclusive
155 (i.e. writer lock) or not. */
156 bool is_exclusive;
158 /* Specify the trylock return value on a successful lock acquisition. */
159 int succ_retval;
162 /* Access mode used for indicating whether an access to a shared variable
163 is a read or a write. */
164 enum access_mode
166 TSA_READ,
167 TSA_WRITE
170 /* True if the parser is currently parsing a lock attribute. */
171 bool parsing_lock_attribute = false;
173 /* A map of which each entry maps a lock, say A, to a set of locks that
174 lock A should be acquired after. This map is first populated when we
175 parse the lock declarations that are annotated with "acquired_after"
176 or "acquired_before" attributes. Later at the beginning of the thread
177 safety analysis (see build_transitive_acquired_after_sets()), we
178 calculate the transitive closures of the acquired_after sets for the
179 locks and modify the map. For example, if we have global variables
180 declarations like the following:
182 Mutex mu1;
183 Mutex mu2 __attribute__ ((acquired_after(mu1)));
184 Mutex mu3 __attribute__ ((acquired_after(mu2)));
186 After parsing, the contents of the map is shown below:
188 lock acquired_after set
189 --------------------------
190 mu2 -> { mu1 }
191 mu3 -> { mu2 }
193 After we call build_transitive_acquired_after_sets(), the map would be
194 modified as shown below:
196 lock acquired_after set
197 --------------------------
198 mu2 -> { mu1 }
199 mu3 -> { mu2, mu1 } */
200 struct pointer_map_t *lock_acquired_after_map = NULL;
202 /* This flag is used for indicating whether transitive acquired_after sets
203 for the locks have been built so that we only build them once per
204 compilation unit. */
205 static bool transitive_acq_after_sets_built = false;
207 /* These two variables are used during the process of building acquired_after
208 transitive closures. A lock is considered finalized (and then added to the
209 finalized_locks set) when every member of its current acquired_after set
210 (1) is finalized, or
211 (2) doesn't have an acquired_after set (i.e. a root in the partial order
212 of the acquired_after relations)
214 Once a lock is finalized, we never have to calculate its acquired_after set
215 again during the transitive closure building process. This helps make the
216 calculation converge faster.
218 The reason why we needed to use global variables (instead of passing them
219 in as parameters) is because we use pointer_set_traverse routine to visit
220 set members, and the routine only take one additional parameter (besides
221 the set and the applied function). */
222 static struct pointer_set_t *finalized_locks;
223 static bool finalized = true;
225 /* This map contains the locks specified in attributes that couldn't be bound
226 to any decl tree in scope when they were parsed. We would try to bind them
227 during the analysis. */
228 struct pointer_map_t *unbound_lock_map = NULL;
230 /* A map of which each entry maps a scoped lock to the lock it acquires
231 at construction. An entry is created and added to the map when we see
232 the constructor of a scoped lock. It is later used when we see the
233 destructor of the scoped lock because the destructor doesn't take an
234 argument that specifies the lock. */
235 static struct pointer_map_t *scopedlock_to_lock_map;
237 /* Each entry maps a lock to the source location at which it was last
238 acquired. */
239 static struct pointer_map_t *lock_locus_map;
241 /* Each entry maps a lock to its canonicalized expression (see
242 get_canonical_lock_expr()). */
243 static GTY((param_is (union tree_node))) htab_t lock_expr_tab = NULL;
245 /* Each entry is a gimple call statement. Calls to the same function with
246 symbolically identical arguments will hash to the same entry. */
247 static htab_t gimple_call_tab = NULL;
249 /* Each entry maps a trylock call expr to its trylock_info. */
250 static struct pointer_map_t *trylock_info_map;
252 /* Source location of the currently processed expression. In our analysis,
253 we actually tried to pass the source location around through function
254 parameters. However, in the cases where we need to use pointer_set_traverse
255 or pointer_map_traverse, this global variable is used. */
256 static const location_t *current_loc;
258 /* Buffer for pretty print the lock expression in the warning messages. */
259 static pretty_printer pp_buf;
261 /* Forward declaration */
262 static void analyze_expr (tree, tree, bool, struct pointer_set_t *,
263 struct pointer_set_t *, const location_t *,
264 enum access_mode);
267 /* This function hashes an expr tree to a hash value by doing the following:
268 - for a decl, returns the pointer hash of the tree,
269 - for an integer constant, returns the sum of low and high parts,
270 - for other expressions, sums up the hash values of all operands and
271 multiplies it by the opcode,
272 - for all other trees, returns 0. */
274 static hashval_t
275 lock_expr_hash (const void *exp)
277 const_tree expr = (const_tree) exp;
279 STRIP_NOPS (expr);
281 if (DECL_P (expr))
282 return htab_hash_pointer (expr);
283 else if (TREE_CODE (expr) == INTEGER_CST)
284 return (hashval_t) (TREE_INT_CST_LOW (expr) + TREE_INT_CST_HIGH (expr));
285 else if (EXPR_P (expr))
287 int nops = TREE_OPERAND_LENGTH (expr);
288 int i;
289 hashval_t sum = 0;
290 for (i = 0; i < nops; i++)
292 tree op = TREE_OPERAND (expr, i);
293 if (op != 0)
294 sum += lock_expr_hash (op);
296 sum *= (hashval_t) TREE_CODE (expr);
297 return sum;
299 else
300 return 0;
303 /* Given two lock expressions/trees, determine whether they are equal.
304 This is basically a wrapper around operand_equal_p so please see its
305 comments for how two expression trees are considered equal
306 (in fold-const.c). */
308 static int
309 lock_expr_eq (const void *exp1, const void* exp2)
311 const_tree expr1 = (const_tree) exp1;
312 const_tree expr2 = (const_tree) exp2;
314 return operand_equal_p (expr1, expr2, OEP_PURE_SAME);
317 /* This function hashes a gimple call statement to a hash value.
318 Calls to the same function would be hashed to the same value. */
320 static hashval_t
321 call_gs_hash (const void *call)
323 const_gimple call_gs = (const_gimple) call;
324 tree fdecl = gimple_call_fndecl (call_gs);
325 if (fdecl)
326 return htab_hash_pointer (fdecl);
327 else
329 tree fn_ptr = gimple_call_fn (call_gs);
330 return lock_expr_hash (get_canonical_lock_expr (fn_ptr, NULL_TREE, true,
331 NULL_TREE));
335 /* Given two gimple call statements, determine whether they are equal.
336 Two calls are consider equal if they call the same function with the
337 same arguments (which is determined using operand_equal_p). This is
338 a helper function used by gimple_call_tab hash table. */
340 static int
341 call_gs_eq (const void *call1, const void* call2)
343 const_gimple call_gs1 = (const_gimple) call1;
344 const_gimple call_gs2 = (const_gimple) call2;
345 tree fdecl1 = gimple_call_fndecl (call_gs1);
346 tree fdecl2 = gimple_call_fndecl (call_gs2);
347 unsigned i, num_args1, num_args2;
349 if (call_gs1 == call_gs2)
350 return 1;
352 if (fdecl1 != fdecl2)
353 return 0;
355 if (!fdecl1)
357 tree fn_ptr1 = get_canonical_lock_expr (gimple_call_fn (call_gs1),
358 NULL_TREE, true, NULL_TREE);
359 tree fn_ptr2 = get_canonical_lock_expr (gimple_call_fn (call_gs2),
360 NULL_TREE, true, NULL_TREE);
361 if (!operand_equal_p (fn_ptr1, fn_ptr2, OEP_PURE_SAME))
362 return 0;
365 num_args1 = gimple_call_num_args (call_gs1);
366 num_args2 = gimple_call_num_args (call_gs2);
368 if (num_args1 != num_args2)
369 return 0;
371 for (i = 0; i < num_args1; ++i)
373 tree arg1 = get_canonical_lock_expr (gimple_call_arg (call_gs1, i),
374 NULL_TREE, true, NULL_TREE);
375 tree arg2 = get_canonical_lock_expr (gimple_call_arg (call_gs2, i),
376 NULL_TREE, true, NULL_TREE);
377 if (!operand_equal_p (arg1, arg2, OEP_PURE_SAME))
378 return 0;
381 return 1;
384 /* This is a helper function passed in (as a parameter) to the
385 pointer_set_traverse routine when we traverse the acquired_after set
386 of a lock, say lock A, to populate the transitive closure. It should
387 not be called by other functions. Parameter LOCK is a member of lock A's
388 acquired_after set and TRANSITIVE_LOCKS is the set of locks that will
389 eventually be added to lock A's acquired_after set. */
391 static bool
392 add_transitive_locks (const void *lock, void *transitive_locks)
394 void **entry = pointer_map_contains (lock_acquired_after_map, lock);
396 if (!entry)
397 return true;
399 /* Add LOCK's acquired_after set to lock A's transitive closure. */
400 pointer_set_union_inplace ((struct pointer_set_t *) transitive_locks,
401 (struct pointer_set_t *) *entry);
403 /* If LOCK, which is a member of lock A's acquired_after set, is not
404 finalized, lock A is not finalized. */
405 if (!pointer_set_contains (finalized_locks, lock))
406 finalized = false;
408 return true;
411 /* This is a helper function passed in (as a parameter) to the
412 pointer_map_traverse routine when we traverse lock_acquired_after_map
413 to update the acquired_after set for each lock. It should not be
414 called by other functions.
416 This function iterates over members of LOCK's acquired_after set
417 (i.e. ACQ_AFTER_SET) and adds their acquired_after sets to
418 "transitive_lock", which is then union-ed with ACQ_AFTER_SET.
419 If there is any new member added to the ACQ_AFTER_SET, we need to
420 set *UPDATED to true so that the main loop that calculates the transitive
421 closures will iterate again (see build_transitive_acquired_after_sets()).
422 Also if every member of ACQ_AFTER_SET is finalized, LOCK is also finalized
423 and added to the finalized_locks set. */
425 static bool
426 update_acquired_after (const void *lock, void **acq_after_set,
427 void *updated)
429 struct pointer_set_t *transitive_locks;
430 size_t old_num_elements;
431 size_t new_num_elements;
433 /* Skip locks whose acquired_after set is already finalized. */
434 if (pointer_set_contains (finalized_locks, lock))
435 return true;
437 transitive_locks = pointer_set_create();
439 /* Before we traverse the acq_after_set, set finalized to true. If any
440 of acq_after_set's members is not finalized, the flag will be set to
441 false. */
442 finalized = true;
444 pointer_set_traverse ((struct pointer_set_t *) *acq_after_set,
445 add_transitive_locks, transitive_locks);
447 /* Before we union transitive_locks with acq_after_set, get the original
448 member number of acq_after_set. */
449 old_num_elements =
450 pointer_set_cardinality ((struct pointer_set_t *) *acq_after_set);
452 pointer_set_union_inplace ((struct pointer_set_t *) *acq_after_set,
453 transitive_locks);
455 new_num_elements =
456 pointer_set_cardinality ((struct pointer_set_t *) *acq_after_set);
458 gcc_assert (new_num_elements >= old_num_elements);
460 /* If new member number is greater than the original, which means some new
461 members (locks) were added to acq_after_set, set *update to true. */
462 if (new_num_elements > old_num_elements)
464 *((bool *)updated) = true;
465 if (finalized)
466 pointer_set_insert (finalized_locks, lock);
468 else
469 /* If no new locks were added to ACQ_AFTER_SET, LOCK is also finalized. */
470 pointer_set_insert (finalized_locks, lock);
472 pointer_set_destroy (transitive_locks);
474 return true;
477 /* This function builds transitive acquired_after sets (i.e. transitive
478 closures) for locks and updates the lock_acquired_after_map. It iteratively
479 traverses the lock_acquired_after_map, updating the acquired_after sets
480 until the transitive closures converge. This function is called at most
481 once per compilation unit. */
483 static void
484 build_transitive_acquired_after_sets (void)
486 bool updated = false;
488 finalized_locks = pointer_set_create();
490 while (1)
492 pointer_map_traverse (lock_acquired_after_map, update_acquired_after,
493 &updated);
494 if (!updated)
495 return;
497 updated = false;
500 pointer_set_destroy (finalized_locks);
503 /* A helper function used by pointer_map_traverse to destroy ACQ_AFTER_SET
504 when deleting the lock_acquired_after_map. */
506 static bool
507 destroy_acquired_after_set (const void * ARG_UNUSED (lock),
508 void **acq_after_set, void * ARG_UNUSED (data))
510 pointer_set_destroy ((struct pointer_set_t *) *acq_after_set);
511 return true;
514 /* Function to delete the lock_expr_tab, lock_acquired_after_map, and
515 unbound_lock_map. This is called at the end of a compilation unit.
516 (See toplev.c) */
518 void
519 clean_up_threadsafe_analysis (void)
521 /* lock_expr_tab is garbage collected. */
522 lock_expr_tab = NULL;
524 /* Free the lock acquired_after map and the sets */
525 if (lock_acquired_after_map)
527 pointer_map_traverse (lock_acquired_after_map,
528 destroy_acquired_after_set, NULL);
529 pointer_map_destroy (lock_acquired_after_map);
530 lock_acquired_after_map = NULL;
533 transitive_acq_after_sets_built = false;
535 /* Free the unbound lock map */
536 if (unbound_lock_map)
538 pointer_map_destroy (unbound_lock_map);
539 unbound_lock_map = NULL;
543 /* Given a BASE object of a field access (i.e. base->a or base->foo()),
544 this function tells whether BASE is a this pointer (i.e. this->a or
545 this->foo()). */
547 static bool
548 is_base_object_this_pointer (tree base)
550 tree this_ptr;
552 if (TREE_CODE (base) != INDIRECT_REF && TREE_CODE (base) != MEM_REF)
553 return false;
555 this_ptr = TREE_OPERAND (base, 0);
557 if (TREE_CODE (this_ptr) == SSA_NAME)
558 this_ptr = SSA_NAME_VAR (this_ptr);
560 if (TREE_CODE (this_ptr) == PARM_DECL
561 && DECL_NAME (this_ptr) == maybe_get_identifier ("this"))
562 return true;
563 else
564 return false;
568 /* Get the function declaration from a gimple call stmt (CALL). This handles
569 both ordinary function calls and virtual methods. */
571 static tree
572 get_fdecl_from_gimple_stmt (gimple call)
574 tree fdecl = gimple_call_fndecl (call);
575 /* If the callee fndecl is NULL, check if it is a virtual function,
576 and if so, try to get its decl through the reference object. */
577 if (!fdecl)
579 tree callee = gimple_call_fn (call);
580 if (TREE_CODE (callee) == OBJ_TYPE_REF)
582 tree objtype = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
583 /* Check to make sure objtype is a valid type.
584 OBJ_TYPE_REF_OBJECT does not always return the correct static type
585 of the callee. For example: Given
586 foo(void* ptr) { ((Foo*) ptr)->doSomething(); }
587 objtype will be void, not Foo. Whether or not this happens
588 depends on the details of how a particular call is lowered to
589 GIMPLE, and there is no easy fix that works in all cases. For
590 now, we simply rely on gcc's type information; if that information
591 is not accurate, then the analysis will be less precise.
593 if (TREE_CODE (objtype) == RECORD_TYPE)
594 fdecl = lang_hooks.get_virtual_function_decl (callee, objtype);
597 return fdecl;
601 /* Given a CALL gimple statment, check if its function decl is annotated
602 with "lock_returned" attribute. If so, return the lock specified in
603 the attribute. Otherise, return NULL_TREE. */
605 static tree
606 get_lock_returned_by_call (gimple call)
608 tree fdecl = get_fdecl_from_gimple_stmt (call);
609 tree attr = (fdecl
610 ? lookup_attribute ("lock_returned", DECL_ATTRIBUTES (fdecl))
611 : NULL_TREE);
612 if (attr)
614 gcc_assert (TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
615 return TREE_VALUE (TREE_VALUE (attr));
617 else
618 return NULL_TREE;
621 /* Given a lock expression (LOCKABLE), this function returns the
622 var/field/parm decl part of the lockable. For example, if the lockable
623 is a[2].foo->mu, it returns the decl tree of mu. */
625 static tree
626 get_lockable_decl (tree lockable)
628 switch (TREE_CODE (lockable))
630 case VAR_DECL:
631 case FIELD_DECL:
632 case PARM_DECL:
634 /* If the lockable is a compiler-generated temp variable that
635 has a debug expr specifying the original var decl (see
636 lookup_tmp_var() in gimplify.c), return the original var decl. */
637 if (DECL_ARTIFICIAL (lockable)
638 && (DECL_DEBUG_EXPR_IS_FROM (lockable)
639 && DECL_DEBUG_EXPR (lockable)))
641 lockable = DECL_DEBUG_EXPR (lockable);
642 gcc_assert (DECL_P (lockable));
644 return lockable;
646 case ADDR_EXPR:
647 /* Handle the case of mu.Lock(), i.e. Lock(&mu). */
648 return get_lockable_decl (TREE_OPERAND (lockable, 0));
649 case SSA_NAME:
651 /* If the lockable is an SSA_NAME of a temp variable (with or
652 without a name), we get to get the original variable decl
653 by back-tracing its SSA def (as shown in the following example).
654 D.2_1 = &this->mu;
655 Lock (D.2_1);
656 Note that the SSA name doesn't always have a def statement
657 (e.g. "this" pointer). */
658 tree vdecl = SSA_NAME_VAR (lockable);
659 if (DECL_ARTIFICIAL (vdecl)
660 && !gimple_nop_p (SSA_NAME_DEF_STMT (lockable)))
662 gimple def_stmt = SSA_NAME_DEF_STMT (lockable);
663 if (is_gimple_assign (def_stmt)
664 && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
665 == GIMPLE_SINGLE_RHS))
666 return get_lockable_decl (gimple_assign_rhs1 (def_stmt));
667 else if (is_gimple_call (def_stmt))
668 return get_lock_returned_by_call (def_stmt);
669 else
670 return get_lockable_decl (vdecl);
672 else
673 return get_lockable_decl (vdecl);
675 case COMPONENT_REF:
676 /* Handle the case of Foo.mu.Lock() or Foo->mu.Lock() */
677 return get_lockable_decl (TREE_OPERAND (lockable, 1));
678 case ARRAY_REF:
679 return get_lockable_decl (TREE_OPERAND (lockable, 0));
680 default:
681 return NULL_TREE;
685 /* Build a fully-qualified name of a lock that is a class member with the
686 given BASE object tree and the LOCK_FIELD tree. This helper function is
687 usually used when handling lock_returned, lock, and unlock attributes.
688 For example, given the following code
690 class Bar {
691 public:
692 bool MyLock() __attributes__ ((exclusive_lock(mu1_)));
693 void MyUnlock() __attributes__ ((unlock(mu1_)));
694 int a_ __attribute__ ((guarded_by(mu1_)));
695 private:
696 Mutex mu1_;
699 Bar *b1, *b2;
701 void func()
703 b1->MyLock(); // S1
704 b1->a_ = 5; // S2
705 b2->a_ = 3; // S3
706 b1->MyUnlock(); // S4
709 When analyzing statement S1, instead of adding "mu1_" to the live lock
710 set, we need to build the fully-qualified name, b1->mu1, first and add
711 the fully-qualified name to the live lock set. The same goes for the unlock
712 statement in S4. Without using the fully-qualified lock names, we won't
713 be able to tell the lock requirement difference between S2 and S3. */
715 static tree
716 build_fully_qualified_lock (tree lock_field, tree base)
718 tree lock;
719 tree canon_base = get_canonical_lock_expr (base, NULL_TREE,
720 true /* is_temp_expr */,
721 NULL_TREE);
723 /* When the base is a pointer, i.e. b1->MyLock() (or MyLock(base)
724 internally), we need to create a new base that is INDIRECT_REF so that
725 we could form a correct fully-qualified lock expression with the
726 lock_field (e.g. b1->lock_field). On the other hand, if the base is an
727 address_taken operation (i.e. base.foo() or foo(&base)), we need to get
728 rid of the ADDR_EXPR operator before we form the new lock expression. */
729 if (TREE_CODE (canon_base) != ADDR_EXPR)
731 /* Note that if the base object is neither an ADDR_EXPR, nor a pointer,
732 most likely we have done IPA-SRA optimization and the DECL is a
733 cloned method, where reference parameters are changed to be passed
734 by value. So in this case, we don't need to do anything. */
735 if (POINTER_TYPE_P (TREE_TYPE (canon_base)))
736 canon_base = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (canon_base)),
737 canon_base);
739 else
740 canon_base = TREE_OPERAND (canon_base, 0);
742 lock = get_canonical_lock_expr (lock_field, canon_base,
743 false /* is_temp_expr */, NULL_TREE);
745 return lock;
748 /* Given a lock expression, this function returns a canonicalized
749 expression (for matching locks in the analysis). Basically this
750 function does the following to canonicalize the expression:
752 - Fold temp variables. e.g.
754 D.1 = &q->mu; ==> foo (&q->mu);
755 foo (D.1);
757 - Fold SSA names. e.g.
759 q.0_1 = q; ==> q->mu;
760 q.0_1->mu;
762 - Replace function calls that return a lock with the actual lock returned
763 (if it is annotated with "lock_returned" attribute).
765 - Subexpressions of the lock are canonicalized recursively.
767 When matching two expressions, we currently only do it symbolically.
768 That is, if two different pointers, say p and q, point to the same
769 location, they will not map to the same canonical expr.
771 This function can also be called to get a canonical form of an expression
772 which may not be a lock. For example, when trying to canonicalize the base
773 object expression. And in that case, IS_TEMP_EXPR is set to true so that
774 the expression will not be inserted into the LOCK_EXPR_TAB.
776 When this function is called with a non-null NEW_LEFTMOST_BASE_VAR and the
777 lefmost base of LOCK is a PARM_DECL, we are trying to replace a formal
778 parameter with an actual argument (i.e. NEW_LEFTMOST_BASE_VAR). This
779 function will return an expression whose leftmost base/operands is replaced
780 with the given new base var. For example, if LOCK is a->b[3]->c and
781 NEW_LEFTMOST_BASE_VAR is x, the function will return (the canonical form
782 of) x->b[3]->c. */
784 tree
785 get_canonical_lock_expr (tree lock, tree base_obj, bool is_temp_expr,
786 tree new_leftmost_base_var)
788 hashval_t hash;
789 tree canon_lock;
790 void **slot;
792 switch (TREE_CODE (lock))
794 case PARM_DECL:
795 if (new_leftmost_base_var)
796 return new_leftmost_base_var;
797 /* Fall through to the following case. */
798 case VAR_DECL:
800 /* If the lock is a compiler-generated temp variable that
801 has a debug expr specifying the original var decl (see
802 lookup_tmp_var() in gimplify.c), return the original var decl. */
803 if (DECL_ARTIFICIAL (lock)
804 && (DECL_DEBUG_EXPR_IS_FROM (lock)
805 && DECL_DEBUG_EXPR (lock)))
807 lock = DECL_DEBUG_EXPR (lock);
808 gcc_assert (DECL_P (lock));
810 return lock;
812 case FIELD_DECL:
814 /* If the LOCK is a field decl and BASE_OBJ is not NULL, build a
815 component_ref expression for the canonical lock. */
816 if (base_obj)
818 tree full_lock = build3 (COMPONENT_REF, TREE_TYPE (lock),
819 base_obj, lock, NULL_TREE);
820 lock = get_canonical_lock_expr (full_lock, NULL_TREE,
821 true /* is_temp_expr */,
822 NULL_TREE);
824 return lock;
826 case SSA_NAME:
828 /* If the lock is an SSA_NAME of a temp variable (with or
829 without a name), we can possibly get the original variable decl
830 by back-tracing its SSA def (as shown in the following example).
832 D.2_1 = &this->mu;
833 Lock (D.2_1);
835 Note that the SSA name doesn't always have a def statement
836 (e.g. "this" pointer). */
837 tree vdecl = SSA_NAME_VAR (lock);
838 if (DECL_ARTIFICIAL (vdecl)
839 && !gimple_nop_p (SSA_NAME_DEF_STMT (lock)))
841 gimple def_stmt = SSA_NAME_DEF_STMT (lock);
842 if (is_gimple_assign (def_stmt))
844 enum gimple_rhs_class gcls =
845 get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt));
846 tree rhs = NULL_TREE;
848 if (gcls == GIMPLE_SINGLE_RHS)
849 rhs = gimple_assign_rhs1 (def_stmt);
850 else if (gcls == GIMPLE_UNARY_RHS)
851 rhs = build1 (gimple_assign_rhs_code (def_stmt),
852 TREE_TYPE (gimple_assign_lhs (def_stmt)),
853 gimple_assign_rhs1 (def_stmt));
854 else if (gcls == GIMPLE_BINARY_RHS)
855 rhs = build2 (gimple_assign_rhs_code (def_stmt),
856 TREE_TYPE (gimple_assign_lhs (def_stmt)),
857 gimple_assign_rhs1 (def_stmt),
858 gimple_assign_rhs2 (def_stmt));
859 if (rhs)
860 return get_canonical_lock_expr (rhs, base_obj,
861 is_temp_expr, NULL_TREE);
863 else if (is_gimple_call (def_stmt))
865 tree fdecl = get_fdecl_from_gimple_stmt (def_stmt);
866 tree real_lock = get_lock_returned_by_call (def_stmt);
867 if (real_lock)
869 gcc_assert (fdecl);
870 if (TREE_CODE (TREE_TYPE (fdecl)) == METHOD_TYPE)
872 tree base = gimple_call_arg (def_stmt, 0);
873 lock = build_fully_qualified_lock (real_lock, base);
875 else
876 lock = real_lock;
877 break;
879 /* We deal with a lockable object wrapped in a smart pointer
880 here. For example, given the following code
882 auto_ptr<Mutex> mu;
883 mu->Lock();
885 We would like to ignore the "operator->" (or "operator.")
886 and simply return mu. We also treat the "get" method of
887 a smart pointer the same as operator->. And we only do it
888 when LOCK is indeed a lock expr, not some temp expr. */
889 else if (fdecl
890 && ((DECL_NAME (fdecl)
891 == maybe_get_identifier ("operator->"))
892 || (DECL_NAME (fdecl)
893 == maybe_get_identifier ("operator."))
894 || (DECL_NAME (fdecl)
895 == maybe_get_identifier ("get")))
896 && !is_temp_expr
897 && POINTER_TYPE_P (TREE_TYPE (lock))
898 && lookup_attribute ("lockable", TYPE_ATTRIBUTES (
899 TREE_TYPE (TREE_TYPE (lock)))))
901 tree arg = gimple_call_arg (def_stmt, 0);
902 tree canon_arg = get_canonical_lock_expr (
903 arg, base_obj, false /* is_temp_expr */, NULL_TREE);
904 if (TREE_CODE (canon_arg) == ADDR_EXPR)
905 lock = TREE_OPERAND (canon_arg, 0);
906 break;
909 /* For a gimple call statement not annotated with
910 "lock_returned" attr, try to get the canonical lhs of
911 the statement. */
912 hash = call_gs_hash (def_stmt);
913 if (hash)
915 gimple canon_call = (gimple) htab_find_with_hash (
916 gimple_call_tab, def_stmt, hash);
917 if (!canon_call)
919 slot = htab_find_slot_with_hash (gimple_call_tab,
920 def_stmt, hash,
921 INSERT);
922 *slot = def_stmt;
923 canon_call = def_stmt;
925 lock = gimple_call_lhs (canon_call);
926 break;
930 return get_canonical_lock_expr (vdecl, base_obj, is_temp_expr,
931 NULL_TREE);
933 case ADDR_EXPR:
935 tree base = TREE_OPERAND (lock, 0);
936 tree canon_base;
937 /* For expressions that denote locks,
938 When the expr is a pointer to a lockable type (i.e. mu.Lock()
939 or Lock(&mu) internally), we don't need the address-taken
940 operator (&). */
941 if (!is_temp_expr
942 && lookup_attribute("lockable",
943 TYPE_ATTRIBUTES (TREE_TYPE (base))))
944 return get_canonical_lock_expr (base, base_obj,
945 false /* is_temp_expr */,
946 new_leftmost_base_var);
947 canon_base = get_canonical_lock_expr (base, NULL_TREE,
948 true /* is_temp_expr */,
949 new_leftmost_base_var);
950 if (base != canon_base)
951 lock = build1 (ADDR_EXPR, TREE_TYPE (lock), canon_base);
952 break;
954 case COMPONENT_REF:
956 /* Handle the case of Foo.mu.Lock() or Foo->mu.Lock().
957 If the base is "this" pointer or a base class, get the component
958 only. */
959 tree base = TREE_OPERAND (lock, 0);
960 tree component = TREE_OPERAND (lock, 1);
961 tree canon_base;
962 if (is_base_object_this_pointer (base))
963 return get_canonical_lock_expr (component, NULL_TREE, is_temp_expr,
964 NULL_TREE);
966 canon_base = get_canonical_lock_expr (base, base_obj,
967 true /* is_temp_expr */,
968 new_leftmost_base_var);
970 /* If either the base or the component is a compiler-generated base
971 object field, skip it. For example, if a lock expressions is
972 foo->D.2801.mu, where D.2801 is the base field in foo which is
973 a derived class, we want the canonical form of the lock to be
974 foo->mu. */
975 if (lang_hooks.decl_is_base_field (canon_base))
976 return get_canonical_lock_expr (component, NULL_TREE, is_temp_expr,
977 NULL_TREE);
979 if (lang_hooks.decl_is_base_field (component))
981 if (is_temp_expr)
982 return canon_base;
983 else
984 /* return canon_base, but recalculate it so that it is stored
985 in the hash table. */
986 return get_canonical_lock_expr (base, base_obj,
987 false /* is_temp_expr */,
988 new_leftmost_base_var);
991 if (base != canon_base)
992 lock = build3 (COMPONENT_REF, TREE_TYPE (component),
993 canon_base, component, NULL_TREE);
994 break;
996 case ARRAY_REF:
998 tree array = TREE_OPERAND (lock, 0);
999 tree canon_array = get_canonical_lock_expr (array, base_obj,
1000 true /* is_temp_expr */,
1001 new_leftmost_base_var);
1002 tree index = TREE_OPERAND (lock, 1);
1003 tree canon_index = (TREE_CODE (index) == INTEGER_CST
1004 ? index
1005 : get_canonical_lock_expr (index, NULL_TREE,
1006 true /* is_temp_expr */,
1007 NULL_TREE));
1008 if (array != canon_array || index != canon_index)
1009 lock = build4 (ARRAY_REF, TREE_TYPE (lock), canon_array,
1010 canon_index, TREE_OPERAND (lock, 2),
1011 TREE_OPERAND (lock, 3));
1012 break;
1014 case INDIRECT_REF:
1015 case MEM_REF:
1017 tree base = TREE_OPERAND (lock, 0);
1018 tree canon_base = get_canonical_lock_expr (base, base_obj,
1019 true /* is_temp_expr */,
1020 new_leftmost_base_var);
1022 /* If CANON_BASE is an ADDR_EXPR (e.g. &a), doing an indirect or
1023 memory reference on top of it is equivalent to accessing the
1024 variable itself. That is, *(&a) == a. So if that's the case,
1025 simply return the variable. Otherwise, build an indirect ref
1026 expression. */
1027 if (TREE_CODE (canon_base) == ADDR_EXPR)
1028 lock = TREE_OPERAND (canon_base, 0);
1029 else
1030 lock = build1 (INDIRECT_REF,
1031 TREE_TYPE (TREE_TYPE (canon_base)), canon_base);
1032 break;
1034 case PLUS_EXPR:
1035 case POINTER_PLUS_EXPR:
1036 case MULT_EXPR:
1038 tree left = TREE_OPERAND (lock, 0);
1039 tree canon_left = get_canonical_lock_expr (left, base_obj,
1040 true /* is_temp_expr */,
1041 NULL_TREE);
1043 tree right = TREE_OPERAND (lock, 1);
1044 tree canon_right = get_canonical_lock_expr (right, base_obj,
1045 true /* is_temp_expr */,
1046 NULL_TREE);
1047 if (left != canon_left || right != canon_right)
1048 lock = build2 (TREE_CODE (lock), TREE_TYPE (lock),
1049 canon_left, canon_right);
1050 break;
1052 default:
1053 break;
1056 hash = lock_expr_hash (lock);
1058 /* Return the original lock expr if the lock expr is not something we can
1059 handle now. */
1060 if (hash == 0)
1061 return lock;
1063 /* Now that we have built a canonical lock expr, check whether it's already
1064 in the lock_expr_tab. If so, grab and return it. Otherwise, insert the
1065 new lock expr to the map. */
1066 if (lock_expr_tab == NULL)
1067 lock_expr_tab = htab_create_ggc (10, lock_expr_hash, lock_expr_eq, NULL);
1069 canon_lock = (tree) htab_find_with_hash (lock_expr_tab, lock, hash);
1070 if (canon_lock)
1071 return canon_lock;
1073 /* If the lock is created temporarily (e.g. to form a full-path
1074 lock name), don't insert it in the lock_expr_tab as the lock
1075 tree will be manually freed later. */
1076 if (!is_temp_expr)
1078 slot = htab_find_slot_with_hash (lock_expr_tab, lock, hash, INSERT);
1079 *slot = lock;
1082 return lock;
1085 /* Dump the LOCK name/expr in char string to OUT_BUF. If LOCK is a
1086 simple decl, we just use the identifier node of the lock. Otherwise,
1087 we use the tree pretty print mechanism to do that. */
1089 const char*
1090 dump_expr_tree (tree lock, char *out_buf)
1092 if (DECL_P (lock) && DECL_NAME (lock))
1093 snprintf(out_buf, LOCK_NAME_LEN, "'%s'",
1094 IDENTIFIER_POINTER (DECL_NAME (lock)));
1095 else
1097 pp_clear_output_area (&pp_buf);
1098 dump_generic_node (&pp_buf, lock, 0, TDF_DIAGNOSTIC, false);
1099 snprintf(out_buf, LOCK_NAME_LEN, "'%s'",
1100 pp_base_formatted_text (&pp_buf));
1102 return out_buf;
1105 /* A helper function that checks if the left-most operand of
1106 EXPR is a field decl, and if so, returns true. For example, if EXPR is
1107 'a.b->c[2]', it will check if 'a' is a field decl. */
1109 static bool
1110 leftmost_operand_is_field_decl (tree expr)
1112 if (TREE_CODE (get_leftmost_base_var (expr)) == FIELD_DECL)
1113 return true;
1114 else
1115 return false;
1118 /* Check whether the given LOCK is a member of LOCK_SET and return the lock
1119 contained in the set if so. This check is more complicated than just
1120 calling pointer_set_contains with LOCK and LOCKSET because we need to
1121 get the canonical form of the lock. Also the LOCK_SET may contain the
1122 universal lock (i.e. error_mark_node). IGNORE_UNIVERSAL_LOCK indicates
1123 whether to ignore it. In order to be conservative (not to emit false
1124 positives), we don't want to ignore the universal lock when checking for
1125 locks required, but should ignore it when checking for locks excluded. */
1127 static tree
1128 lock_set_contains (const struct pointer_set_t *lock_set, tree lock,
1129 tree base_obj, bool ignore_universal_lock)
1131 /* If the universal lock is in the LOCK_SET and it is not to be ignored,
1132 just assume the LOCK is in the LOCK_SET and returns it. */
1133 if (!ignore_universal_lock
1134 && pointer_set_contains (lock_set, error_mark_node))
1135 return lock;
1137 /* If the lock is a field and the base is not 'this' pointer nor a base
1138 class, we need to check the lock set with the fully-qualified lock name.
1139 Otherwise, we could be confused by the same lock field of a different
1140 object. */
1141 if (leftmost_operand_is_field_decl (lock)
1142 && base_obj != NULL_TREE
1143 && !is_base_object_this_pointer (base_obj)
1144 && !lang_hooks.decl_is_base_field (base_obj))
1146 /* canonical lock is a fully-qualified name. */
1147 tree canonical_lock = get_canonical_lock_expr (lock, base_obj,
1148 true /* is_temp_expr */,
1149 NULL_TREE);
1150 tree result = (pointer_set_contains (lock_set, canonical_lock)
1151 ? canonical_lock : NULL_TREE);
1152 return result;
1154 /* Check the lock set with the given lock directly as it could already be
1155 in canonical form. */
1156 else if (pointer_set_contains (lock_set, lock))
1157 return lock;
1158 /* If the lock is not yet bound to a decl, try to bind it now. */
1159 else if (TREE_CODE (lock) == IDENTIFIER_NODE)
1161 void **entry;
1162 /* If there is any unbound lock in the attribute, the unbound lock map
1163 must not be null. */
1164 gcc_assert (unbound_lock_map);
1165 entry = pointer_map_contains (unbound_lock_map, lock);
1166 gcc_assert (entry);
1167 if (*entry)
1169 tree lock_decl = (tree) *entry;
1170 gcc_assert (TREE_CODE (lock_decl) == VAR_DECL
1171 || TREE_CODE (lock_decl) == PARM_DECL
1172 || TREE_CODE (lock_decl) == FIELD_DECL);
1173 if (pointer_set_contains (lock_set, lock_decl))
1174 return lock_decl;
1175 else
1176 return NULL_TREE;
1178 else
1179 return NULL_TREE;
1181 else
1182 return NULL_TREE;
1185 /* This function checks whether LOCK is in the current live lock sets
1186 (EXCL_LOCKS and SHARED_LOCKS) and emits warning message if it's not.
1187 This function is called when analyzing the expression that either accesses
1188 a shared variable or calls a function whose DECL is annotated with
1189 guarded_by, point_to_guarded_by, or {exclusive|shared}_locks_required
1190 attributes.
1192 IS_INDIRECT_REF indicates whether the (variable) access is indirect or not.
1194 LOCUS specifies the source location of the expression that accesses the
1195 shared variable or calls the guarded function.
1197 MODE indicates whether the access is a read or a write. */
1199 static void
1200 check_lock_required (tree lock, tree decl, tree base_obj, bool is_indirect_ref,
1201 const struct pointer_set_t *excl_locks,
1202 const struct pointer_set_t *shared_locks,
1203 const location_t *locus, enum access_mode mode)
1205 const char *msg;
1206 char dname[LOCK_NAME_LEN], lname[LOCK_NAME_LEN];
1208 if (TREE_CODE (decl) == FUNCTION_DECL)
1210 gcc_assert (!is_indirect_ref);
1211 msg = G_("Calling function");
1212 /* When the base obj tree is not an ADDR_EXPR, which means it is a
1213 pointer (i.e. base->foo(), or foo(base) internally), we will need
1214 to create a new base that is INDIRECT_REF so that we would be able
1215 to form a correct full expression for a lock later. On the other hand,
1216 if the base obj is an ADDR_EXPR (i.e. base.foo(), or foo(&base)
1217 internally), we need to remove the address-taken operation. Note
1218 that this is an issue only for class member functions. If DECL
1219 is a class field, the base_obj is good. */
1220 if (base_obj)
1222 tree canon_base = get_canonical_lock_expr (base_obj, NULL_TREE,
1223 true /* is_temp_expr */,
1224 NULL_TREE);
1225 if (TREE_CODE (canon_base) != ADDR_EXPR)
1227 if (POINTER_TYPE_P (TREE_TYPE (canon_base)))
1228 base_obj = build1 (INDIRECT_REF,
1229 TREE_TYPE (TREE_TYPE (canon_base)),
1230 canon_base);
1231 /* If the base object is neither an ADDR_EXPR, nor a pointer,
1232 and DECL is a cloned method, most likely we have done IPA-SRA
1233 optimization, where reference parameters are changed to be
1234 passed by value. So in this case, just use the CANON_BASE. */
1235 else if (DECL_ABSTRACT_ORIGIN (decl))
1236 base_obj = canon_base;
1237 else
1238 gcc_unreachable ();
1240 else
1241 base_obj = TREE_OPERAND (canon_base, 0);
1244 else
1246 if (mode == TSA_READ)
1247 msg = G_("Reading variable");
1248 else
1249 msg = G_("Writing to variable");
1252 /* We want to use fully-qualified expressions (i.e. including base_obj
1253 if any) for DECL when emitting warning messages. */
1254 if (TREE_CODE (decl) != FUNCTION_DECL)
1256 if (base_obj)
1258 tree full_decl = build3 (COMPONENT_REF, TREE_TYPE (decl),
1259 base_obj, decl, NULL_TREE);
1260 decl = get_canonical_lock_expr (full_decl, NULL_TREE,
1261 true /* is_temp_expr */, NULL_TREE);
1264 else
1265 /* If DECL is a function, call DECL_ORIGIN first in case it is a clone so
1266 that we can avoid using the cloned name in the warning messages. */
1267 decl = DECL_ORIGIN (decl);
1269 if (!lock)
1271 /* If LOCK is NULL, either the attribute is a "guarded" attribute that
1272 doesn't specify a particular lock, or the lock name/expression
1273 is not supported. Just check whether there is any live lock at this
1274 point. */
1275 if (pointer_set_cardinality (excl_locks) == 0)
1277 if (pointer_set_cardinality (shared_locks) == 0)
1279 if (is_indirect_ref)
1280 warning_at (*locus, OPT_Wthread_safety,
1281 G_("Access to memory location pointed to by"
1282 " variable %s requires a lock"),
1283 dump_expr_tree (decl, dname));
1284 else
1285 warning_at (*locus,
1286 OPT_Wthread_safety, G_("%s %s requires a lock"),
1287 msg, dump_expr_tree (decl, dname));
1289 else
1291 if (mode == TSA_WRITE)
1293 if (is_indirect_ref)
1294 warning_at (*locus, OPT_Wthread_safety,
1295 G_("Writing to memory location pointed to by"
1296 " variable %s requires an exclusive lock"),
1297 dump_expr_tree (decl, dname));
1298 else
1299 warning_at (*locus, OPT_Wthread_safety,
1300 G_("%s %s requires an exclusive lock"),
1301 msg, dump_expr_tree (decl, dname));
1305 return;
1308 if (!DECL_P (lock))
1309 lock = get_canonical_lock_expr (lock, NULL_TREE, false /* is_temp_expr */,
1310 NULL_TREE);
1312 if (!lock_set_contains(excl_locks, lock, base_obj, false))
1314 if (!lock_set_contains(shared_locks, lock, base_obj, false))
1316 /* We want to use fully-qualified expressions (i.e. including
1317 base_obj if any) for LOCK when emitting warning
1318 messages. */
1319 if (base_obj)
1321 if (TREE_CODE (lock) == FIELD_DECL)
1323 tree full_lock = build3 (COMPONENT_REF, TREE_TYPE (lock),
1324 base_obj, lock, NULL_TREE);
1325 /* Get the canonical lock tree */
1326 lock = get_canonical_lock_expr (full_lock, NULL_TREE,
1327 true /* is_temp_expr */,
1328 NULL_TREE);
1331 if (is_indirect_ref)
1332 warning_at (*locus, OPT_Wthread_safety,
1333 G_("Access to memory location pointed to by"
1334 " variable %s requires lock %s"),
1335 dump_expr_tree (decl, dname),
1336 dump_expr_tree (lock, lname));
1337 else
1338 warning_at (*locus, OPT_Wthread_safety,
1339 G_("%s %s requires lock %s"),
1340 msg, dump_expr_tree (decl, dname),
1341 dump_expr_tree (lock, lname));
1343 else
1345 if (mode == TSA_WRITE)
1347 if (base_obj)
1349 /* We want to use fully-qualified expressions (i.e.
1350 including base_obj if any) for LOCK when
1351 emitting warning messages. */
1352 if (TREE_CODE (lock) == FIELD_DECL)
1354 tree full_lock = build3 (COMPONENT_REF, TREE_TYPE (lock),
1355 base_obj, lock, NULL_TREE);
1356 /* Get the canonical lock tree */
1357 lock = get_canonical_lock_expr (full_lock, NULL_TREE,
1358 true /* is_temp_expr */,
1359 NULL_TREE);
1362 if (is_indirect_ref)
1363 warning_at (*locus, OPT_Wthread_safety,
1364 G_("Writing to memory location pointed to by"
1365 " variable %s requires exclusive lock %s"),
1366 dump_expr_tree (decl, dname),
1367 dump_expr_tree (lock, lname));
1368 else
1369 warning_at (*locus, OPT_Wthread_safety,
1370 G_("%s %s requires exclusive lock %s"),
1371 msg, dump_expr_tree (decl, dname),
1372 dump_expr_tree (lock, lname));
1378 /* This data structure is created to overcome the limitation of the
1379 pointer_set_traverse routine which only takes one data pointer argument.
1380 Unfortunately when we are trying to decide whether a lock (with an optional
1381 base object) is in a set or not, we will need 2 input parameters and 1
1382 output parameter. Therefore we use the following data structure. */
1384 struct lock_match_info
1386 /* The lock which we want to check if it is in the acquired_after set. */
1387 tree lock;
1389 /* The base object of the lock if lock is a class member. */
1390 tree base;
1392 /* Whether we find a match or not. */
1393 bool match;
1396 /* This is a helper function passed in (as a parameter) to the
1397 pointer_set_traverse routine we invoke to traverse the acquired_after
1398 set to find a match for the lock recorded in the match info
1399 (parameter INFO). This function should not be called by other functions.
1400 Parameter LOCK is a member of the acquired_after set.
1401 If LOCK is a class field, we would reconstruct the LOCK name by
1402 combining it with the base object (recorded in INFO) and do a match.
1403 If we find a match, record the result in INFO->match and return false
1404 so that pointer_set_traverse would stop iterating through the rest of
1405 the set. Also see the comments for function acquired_after_set_contains()
1406 below. */
1408 static bool
1409 match_locks (const void *lock, void *info)
1411 struct lock_match_info *match_info = (struct lock_match_info *)info;
1412 tree acq_after_lock = CONST_CAST_TREE ((const_tree) lock);
1413 bool result = true;
1415 if (TREE_CODE (acq_after_lock) == FIELD_DECL)
1417 tree tmp_lock;
1418 gcc_assert (match_info->base);
1419 tmp_lock = build3 (COMPONENT_REF, TREE_TYPE (acq_after_lock),
1420 match_info->base, acq_after_lock, NULL_TREE);
1421 if (lock_expr_eq (tmp_lock, match_info->lock))
1423 match_info->match = true;
1424 result = false;
1426 /* Since we know tmp_lock is not going to be used any more, we might
1427 as well free it even though it's not necessary. */
1428 ggc_free (tmp_lock);
1431 return result;
1434 /* Check if the LOCK is in the ACQ_AFTER_SET. This check is more complicated
1435 than simply calling pointer_set_contains to see whether ACQ_AFTER_SET
1436 contains LOCK because the ACQ_AFTER_SET could only contains the "bare"
1437 name of the LOCK. For example, suppose we have the following code:
1439 class Foo {
1440 public:
1441 Mutex mu1;
1442 Mutex mu2 attribute__ ((acquired_after(mu1)));
1446 main()
1448 Foo *foo = new Foo();
1450 foo->mu1.Lock();
1452 foo->mu2.Lock();
1456 The lock_acquired_after_map would be
1458 lock acquired_after set
1459 --------------------------
1460 mu2 -> { mu1 }
1462 In our analysis, when we look at foo->mu2.Lock() and want to know whether
1463 foo->mu1 (which was acquired earlier) is in mu2's acquired_after set
1464 (in this case, ACQ_AFTER_SET = { mu1 }, LOCK = foo->mu1, BASE = foo),
1465 a call to pointer_set_contains(mu2_acquired_after_set, foo->mu1) would
1466 return false as it is "mu1", not "foo->mu1", in mu2's acquired_after set.
1467 Therefore we will need to iterate through each member of mu2's
1468 acquired_after set, reconstructing the lock name with the BASE (which is
1469 foo in this example), and check again. */
1471 static bool
1472 acquired_after_set_contains (const struct pointer_set_t *acq_after_set,
1473 tree lock, tree base)
1475 struct lock_match_info *info;
1476 bool result;
1478 if (pointer_set_contains (acq_after_set, lock))
1479 return true;
1480 else if (base == NULL_TREE)
1481 return false;
1483 /* Now that a simple call to pointer_set_contains returns false and
1484 the LOCK appears to be a class member (as BASE is not null),
1485 we need to look at each element of ACQ_AFTER_SET, reconstructing
1486 their names, and check again. */
1487 info = XNEW (struct lock_match_info);
1488 info->lock = lock;
1489 info->base = base;
1490 info->match = false;
1492 pointer_set_traverse (acq_after_set, match_locks, info);
1494 result = info->match;
1496 XDELETE (info);
1498 return result;
1501 /* Returns the base object if EXPR is a component ref tree,
1502 NULL_TREE otherwise.
1504 Note that this routine is different from get_base_address or
1505 get_base_var in that, for example, if we have an expression x[5].a,
1506 this routine will return x[5], while the other two routines will
1507 return x. Also if the expr is b[3], this routine will return NULL_TREE
1508 while the other two will return b. */
1510 static tree
1511 get_component_ref_base (tree expr)
1513 if (TREE_CODE (expr) == COMPONENT_REF)
1514 return TREE_OPERAND (expr, 0);
1515 else if (TREE_CODE (expr) == ARRAY_REF)
1516 return get_component_ref_base (TREE_OPERAND (expr, 0));
1517 else
1518 return NULL_TREE;
1521 /* Given an expression, EXPR, returns the leftmost operand/base of EXPR.
1522 For example, if EXPR is 'a.b->c[2]', it will return 'a'. Unlike
1523 get_base_var, this routine allows the leftmost var to be a field decl. */
1525 tree
1526 get_leftmost_base_var (tree expr)
1528 while (EXPR_P (expr))
1529 expr = TREE_OPERAND (expr, 0);
1530 return expr;
1533 /* This is helper function passed in (as a parameter) to pointer_set_traverse
1534 when we traverse live lock sets to check for acquired_after requirement.
1535 This function should not be called by other functions. The parameter
1536 LIVE_LOCK is a member of the live lock set we are traversing, and parameter
1537 LOCK is the lock we are about to add to the live lock set.
1538 In this function, we first check if LIVE_LOCK is in the acquired_after
1539 set of LOCK. If so, that's great (but we will also check whether LOCK is
1540 in LIVE_LOCK's acquired_after set to see if there is a cycle in the
1541 after_after relationship). Otherwise, we will emit a warning. */
1543 static bool
1544 check_acquired_after (const void *live_lock, void *lock)
1546 char lname1[LOCK_NAME_LEN], lname2[LOCK_NAME_LEN];
1547 tree lock_decl;
1548 tree base;
1549 void **entry;
1550 tree live_lock_tree = CONST_CAST_TREE ((const_tree) live_lock);
1551 tree live_lock_decl;
1552 bool live_lock_in_locks_acq_after_set;
1553 bool lock_in_live_locks_acq_after_set;
1555 /* If lock_acquired_after_map is never created, which means the user code
1556 doesn't contain any acquired_after attributes, then simply return.
1557 This should be changed later if we decide to warn about unspecified
1558 locking order for two locks held simultaneously by a thread. */
1559 if (!lock_acquired_after_map)
1560 return true;
1562 /* Since the lock_acquired_after_map is keyed by the decl tree of
1563 the lock variable (see handle_acquired_after_attribute() in c-common.c),
1564 we cannot use the full expression of the lock to look up the
1565 lock_acquired_after_map. Instead, we need to get the lock decl
1566 component of the expression. e.g. If the lock is a[2].foo->mu,
1567 we cannot use the whole expression tree. We have to use the decl tree
1568 of mu. */
1569 lock_decl = get_lockable_decl ((tree) lock);
1570 base = (lock_decl ? get_component_ref_base ((tree) lock) : NULL_TREE);
1571 entry = (lock_decl
1572 ? pointer_map_contains (lock_acquired_after_map, lock_decl)
1573 : NULL);
1574 /* Check if LIVE_LOCK is in LOCK's acquired_after set. */
1575 live_lock_in_locks_acq_after_set = (entry
1576 && acquired_after_set_contains (
1577 (struct pointer_set_t *) *entry,
1578 live_lock_tree, base));
1580 live_lock_decl = get_lockable_decl (live_lock_tree);
1581 base = (live_lock_decl ? get_component_ref_base (live_lock_tree)
1582 : NULL_TREE);
1583 entry = (live_lock_decl
1584 ? pointer_map_contains (lock_acquired_after_map, live_lock)
1585 : NULL);
1586 /* Check if LOCK is in LIVE_LOCK's acquired_after set. */
1587 lock_in_live_locks_acq_after_set = (entry
1588 && acquired_after_set_contains (
1589 (struct pointer_set_t *) *entry,
1590 (tree) lock, base));
1592 if (!live_lock_in_locks_acq_after_set)
1594 /* When LIVE_LOCK is not in LOCK's acquired_after set, we will emit
1595 warning messages only when LIVE_LOCK is annotated as being acquired
1596 after LOCK. Basically what we are saying here is that if the two
1597 locks don't have an acquired_after relationship based on the
1598 annotations (attributes), we will not check for (and warn about)
1599 their locking order. This is an escape hatch for locks that could
1600 be held simultaneously but their acquisition order is not expressible
1601 using the current attribute/annotation scheme. */
1602 if (lock_in_live_locks_acq_after_set)
1604 void **loc_entry = pointer_map_contains (lock_locus_map, live_lock);
1605 if (loc_entry)
1606 warning_at (*current_loc, OPT_Wthread_safety,
1607 G_("Lock %s is acquired after lock %s (acquired at"
1608 " line %d) but is annotated otherwise"),
1609 dump_expr_tree ((tree) lock, lname1),
1610 dump_expr_tree (live_lock_tree, lname2),
1611 LOCATION_LINE (*((location_t *) *loc_entry)));
1612 else
1613 warning_at (*current_loc, OPT_Wthread_safety,
1614 G_("Lock %s is acquired after lock %s (held at function"
1615 " entry) but is annotated otherwise"),
1616 dump_expr_tree ((tree) lock, lname1),
1617 dump_expr_tree (live_lock_tree, lname2));
1619 return true;
1622 if (lock_in_live_locks_acq_after_set)
1623 warning_at (*current_loc, OPT_Wthread_safety,
1624 G_("There is a cycle in the acquisition order between locks"
1625 " %s and %s"),
1626 dump_expr_tree (live_lock_tree, lname1),
1627 dump_expr_tree ((tree) lock, lname2));
1629 return true;
1632 /* Main driver to check the lock acquisition order. LOCK is the lock we are
1633 about to add to the live lock set. LIVE_EXCL_LOCKS and LIVE_SHARED_LOCKS
1634 are the current live lock sets. LOCUS is the source location at which LOCK
1635 is acquired. */
1637 static void
1638 check_locking_order (tree lock,
1639 const struct pointer_set_t *live_excl_locks,
1640 const struct pointer_set_t *live_shared_locks,
1641 const location_t *locus)
1643 if (!warn_thread_mismatched_lock_order)
1644 return;
1646 current_loc = locus;
1647 pointer_set_traverse (live_excl_locks, check_acquired_after, lock);
1648 pointer_set_traverse (live_shared_locks, check_acquired_after, lock);
1651 /* Given a CALL expr and an integer constant tree POS_ARG that specifies the
1652 argument position, returns the corresponding argument by iterating
1653 through the call's actual parameter list. */
1655 static tree
1656 get_actual_argument_from_position (gimple call, tree pos_arg)
1658 int lock_pos;
1659 int num_args = gimple_call_num_args (call);
1661 gcc_assert (TREE_CODE (pos_arg) == INTEGER_CST);
1663 lock_pos = TREE_INT_CST_LOW (pos_arg);
1665 /* The ipa-sra optimization can occasionally delete arguments, thus
1666 invalidating the index. */
1667 if (lock_pos < 1 || lock_pos > num_args)
1669 if (flag_ipa_sra)
1670 return NULL_TREE; /* Attempt to recover gracefully. */
1671 else
1672 gcc_unreachable ();
1675 /* The lock position specified in the attributes is 1-based, so we need to
1676 subtract 1 from it when accessing the call arguments. */
1677 return gimple_call_arg (call, lock_pos - 1);
1680 /* Given a call (CALL) and its function decl (FDECL), return the actual
1681 argument that corresponds to the given formal parameter (PARAM_DECL). */
1683 static tree
1684 get_actual_argument_from_parameter (gimple call, tree fdecl, tree param_decl)
1686 tree parm;
1687 int parm_pos;
1689 for (parm = DECL_ARGUMENTS (fdecl), parm_pos = 0;
1690 parm;
1691 parm = TREE_CHAIN (parm), ++parm_pos)
1692 if (DECL_NAME (parm) == DECL_NAME (param_decl))
1693 return gimple_call_arg (call, parm_pos);
1695 gcc_unreachable ();
1699 /* A helper function that adds the LOCKABLE, acquired by CALL, to the
1700 corresponding lock sets (LIVE_EXCL_LOCKS or LIVE_SHARED_LOCKS) depending
1701 on the boolean parameter IS_EXCLUSIVE_LOCK. If the CALL is a trylock call,
1702 create a trylock_info data structure which will be used later. */
1704 static void
1705 add_lock_to_lockset (gimple call, tree lockable,
1706 bool is_exclusive_lock, bool is_trylock,
1707 struct pointer_set_t *live_excl_locks,
1708 struct pointer_set_t *live_shared_locks)
1710 void **entry;
1712 if (!is_trylock)
1714 /* Insert the lock to either exclusive or shared live lock set. */
1715 if (is_exclusive_lock)
1716 pointer_set_insert(live_excl_locks, lockable);
1717 else
1718 pointer_set_insert(live_shared_locks, lockable);
1720 else
1722 /* If the primitive is a trylock, create a trylock_info structure and
1723 insert it to trylock_info_map, which will be used later when we
1724 analyze the if-statement whose condition is fed by the trylock. */
1725 struct trylock_info *tryinfo;
1726 entry = pointer_map_insert (trylock_info_map, call);
1727 if (!(*entry))
1729 tryinfo = XNEW (struct trylock_info);
1730 tryinfo->is_exclusive = is_exclusive_lock;
1731 tryinfo->locks = pointer_set_create();
1732 *entry = tryinfo;
1734 else
1736 tryinfo = (struct trylock_info *)*entry;
1737 gcc_assert (tryinfo->locks
1738 && tryinfo->is_exclusive == is_exclusive_lock);
1740 pointer_set_insert (tryinfo->locks, lockable);
1744 /* This function handles function calls that acquire or try to acquire
1745 locks (i.e. the functions annotated with exclusive_lock, shared_lock,
1746 exclusive_trylock, or shared_trylock attribute). Besides adding to the
1747 live lock sets the lock(s) it acquires (except for trylock calls), this
1748 function also does the following:
1750 - Checks the lock acquisition order between the lock it acquires and
1751 existing live locks.
1753 - Checks if any existing live lock is being acquired again
1754 (i.e. re-entered).
1756 - If the function call is a constructor of a scoped lock, adds an entry
1757 with the acquired lock to scopedlock_to_lock_map.
1759 - If the function call is a trylock, creates a trylock_info structure and
1760 inserts it to trylock_info_map.
1762 - Records the source location of this function call in lock_locus_map
1763 (as this is where the lock is acquired).
1765 This function handles one lock at a time, so if a locking primitive
1766 acquires multiple locks, this function is called multiple times (see
1767 process_function_attrs() below).
1769 Besides the call itself (CALL), we also pass in the function decl (FDECL).
1770 While the function decl of a call can be easily extracted by calling
1771 gimple_call_fndecl in most cases, it becomes a bit tricky when the function
1772 is virtual as gimple_call_fndecl will simply return NULL. We will need to
1773 get the function decl through the reference object in this case.
1774 Since we have already done all the things necessary to get the function
1775 decl earlier (see handle_call_gs()), instead of doing the whole dance again
1776 here, we might as well pass in the function decl that we extracted earlier.
1778 The lock to be acquired is either the base object (i.e. BASE_OBJ)
1779 when the primitive is a member function of a lockable class (e.g. "mu" in
1780 mu->Lock()), or specified by an attribute parameter and passed in as ARG.
1781 If ARG is an integer constant, it specifies the position of the primitive's
1782 argument that corresponds to the lock to be acquired. */
1784 static void
1785 handle_lock_primitive_attrs (gimple call, tree fdecl, tree arg, tree base_obj,
1786 bool is_exclusive_lock, bool is_trylock,
1787 struct pointer_set_t *live_excl_locks,
1788 struct pointer_set_t *live_shared_locks,
1789 const location_t *locus)
1791 char lname[LOCK_NAME_LEN];
1792 void **entry;
1793 tree lockable;
1794 tree lockable_type;
1796 /* If ARG is not NULL, it specifies the lock to acquire. Otherwise,
1797 BASE_OBJ is the lock. */
1798 if (!arg)
1799 arg = base_obj;
1800 else if (arg == error_mark_node)
1802 /* If the arg is the universal lock (represented as the error_mark_node),
1803 we don't need to do all the checks mentioned in the comments above.
1804 Just add it to the lock set and return. */
1805 add_lock_to_lockset (call, arg, is_exclusive_lock, is_trylock,
1806 live_excl_locks, live_shared_locks);
1807 return;
1809 /* When ARG is an integer that specifies the position of the
1810 call's argument corresponding to the lock, or if its leftmost base is
1811 a formal parameter, we need to grab the corresponding actual argument
1812 of the call. */
1813 else if (TREE_CODE (arg) == INTEGER_CST)
1815 arg = get_actual_argument_from_position (call, arg);
1816 /* If ipa-sra has rewritten the call, then recover as gracefully as
1817 possible. */
1818 if (!arg)
1820 /* Add the universal lock to the lockset to suppress subsequent
1821 errors. */
1822 if (is_exclusive_lock)
1823 pointer_set_insert (live_excl_locks, error_mark_node);
1824 else
1825 pointer_set_insert (live_shared_locks, error_mark_node);
1827 if (warn_thread_optimization)
1828 warning_at (*locus, OPT_Wthread_safety,
1829 G_("lock attribute has been removed by "
1830 "optimization"));
1832 return;
1835 else if (TREE_CODE (get_leftmost_base_var (arg)) == PARM_DECL)
1837 tree new_base
1838 = get_actual_argument_from_parameter (call, fdecl,
1839 get_leftmost_base_var (arg));
1840 arg = get_canonical_lock_expr (arg, NULL_TREE, false, new_base);
1842 else if (base_obj)
1843 arg = build_fully_qualified_lock (arg, base_obj);
1845 gcc_assert (arg);
1847 lockable = get_canonical_lock_expr (arg, NULL_TREE, false /* is_temp_expr */,
1848 NULL_TREE);
1850 /* If there are unbound locks when the thread safety attributes were parsed,
1851 we should try to bind them now if we see any lock declaration that
1852 matches the name of the unbound lock. */
1853 if (unbound_lock_map
1854 && (TREE_CODE (lockable) == VAR_DECL
1855 || TREE_CODE (lockable) == PARM_DECL
1856 || TREE_CODE (lockable) == FIELD_DECL))
1858 tree lock_id = DECL_NAME (lockable);
1859 void **entry = pointer_map_contains (unbound_lock_map, lock_id);
1860 if (entry)
1861 *entry = lockable;
1864 gcc_assert (fdecl);
1865 lockable_type = DECL_CONTEXT (fdecl);
1866 if (lockable_type && !TYPE_P (lockable_type))
1867 lockable_type = NULL_TREE;
1869 /* Check if the lock primitive is actually a constructor of a scoped lock.
1870 If so, insert to scopedlock_to_lock_map the scoped lock object along
1871 with the lock it acquires. */
1872 if (!is_trylock
1873 && lockable_type
1874 && lookup_attribute("scoped_lockable", TYPE_ATTRIBUTES (lockable_type)))
1876 if (TREE_CODE (base_obj) == ADDR_EXPR)
1878 tree scoped_lock = TREE_OPERAND (base_obj, 0);
1879 void **entry;
1880 if (TREE_CODE (scoped_lock) == SSA_NAME)
1881 scoped_lock = SSA_NAME_VAR (scoped_lock);
1882 gcc_assert(TREE_CODE (scoped_lock) == VAR_DECL);
1883 entry = pointer_map_insert (scopedlock_to_lock_map, scoped_lock);
1884 *entry = lockable;
1888 /* Check if the lock is already held. */
1889 if (pointer_set_contains(live_excl_locks, lockable)
1890 || pointer_set_contains(live_shared_locks, lockable))
1892 if (warn_thread_reentrant_lock)
1894 void **entry = pointer_map_contains (lock_locus_map, lockable);
1895 if (entry)
1896 warning_at (*locus, OPT_Wthread_safety,
1897 G_("Try to acquire lock %s that is already held"
1898 " (previously acquired at line %d)"),
1899 dump_expr_tree (lockable, lname),
1900 LOCATION_LINE (*((location_t *) *entry)));
1901 else
1902 warning_at (*locus, OPT_Wthread_safety,
1903 G_("Try to acquire lock %s that is already held"
1904 " (at function entry)"),
1905 dump_expr_tree (lockable, lname));
1907 /* Normally when we have detected a lock re-entrant issue here, we can
1908 simply return. However, if this primitive is a trylock, we still
1909 need to create an entry in the trylock_info_map (which will happen
1910 later) regardless. Otherwise, the assertion that every trylock call
1911 always has an entry in the map will fail later. */
1912 if (!is_trylock)
1913 return;
1916 /* Check the lock acquisition order. */
1917 check_locking_order (lockable, live_excl_locks, live_shared_locks, locus);
1919 /* Record the source location where the lock is acquired. */
1920 entry = pointer_map_insert (lock_locus_map, lockable);
1921 if (!(*entry))
1922 *entry = XNEW (location_t);
1923 *((location_t *) *entry) = *locus;
1925 add_lock_to_lockset (call, lockable, is_exclusive_lock, is_trylock,
1926 live_excl_locks, live_shared_locks);
1929 /* A helper function that removes the LOCKABLE from either LIVE_EXCL_LOCKS or
1930 LIVE_SHARED_LOCKS, and returns the canonical form of LOCKABLE. If LOCKABLE
1931 does not exist in either lock set, return NULL_TREE. */
1933 static tree
1934 remove_lock_from_lockset (tree lockable, struct pointer_set_t *live_excl_locks,
1935 struct pointer_set_t *live_shared_locks)
1937 tree lock_contained;
1939 /* Try to remove the actual lock. */
1940 if ((lock_contained = lock_set_contains(live_excl_locks, lockable,
1941 NULL_TREE, true)) != NULL_TREE)
1942 pointer_set_delete (live_excl_locks, lock_contained);
1943 else if ((lock_contained = lock_set_contains(live_shared_locks, lockable,
1944 NULL_TREE, true)) != NULL_TREE)
1945 pointer_set_delete (live_shared_locks, lock_contained);
1947 if (lock_contained)
1948 return lock_contained;
1950 /* If either of lock sets contains the universal lock, then pretend that
1951 we've removed it, to avoid a warning about unlocking a lock that was
1952 not acquired. */
1953 if (pointer_set_contains (live_excl_locks, error_mark_node))
1954 return lockable;
1956 if (pointer_set_contains(live_shared_locks, error_mark_node))
1957 return lockable;
1959 return NULL_TREE;
1962 /* This function handles function calls that release locks (i.e. the
1963 functions annotated with the "unlock" attribute). Besides taking the
1964 lock out of the live lock set, it also checks whether the user code
1965 is trying to release a lock that's not currently held. For the
1966 explanations on parameters FDECL, ARG, and BASE_OBJ, please see the
1967 comments for handle_lock_primitive_attrs above. */
1969 static void
1970 handle_unlock_primitive_attr (gimple call, tree fdecl, tree arg, tree base_obj,
1971 struct bb_threadsafe_info *bbinfo,
1972 const location_t *locus)
1974 struct pointer_set_t *live_excl_locks = bbinfo->live_excl_locks;
1975 struct pointer_set_t *live_shared_locks = bbinfo->live_shared_locks;
1976 char lname[LOCK_NAME_LEN];
1977 tree lockable = NULL_TREE;
1978 tree lock_released;
1979 bool is_weak_unlock = false;
1981 /* Check if the unlock attribute specifies a lock or the position of the
1982 primitive's argument corresponding to the lock. */
1983 if (arg)
1985 /* When ARG is an integer that specifies the position of the
1986 call's argument corresponding to the lock, or if its leftmost base is
1987 a formal parameter, we need to grab the corresponding actual argument
1988 of the call. */
1989 if (TREE_CODE (arg) == INTEGER_CST)
1991 lockable = get_actual_argument_from_position (call, arg);
1992 /* If ipa-sra has rewritten the call, then fail as gracefully as
1993 possible -- just leave the lock in the lockset. */
1994 if (!lockable)
1996 if (warn_thread_optimization)
1997 warning_at (*locus, OPT_Wthread_safety,
1998 G_("unlock attribute has been removed by "
1999 "optimization"));
2000 return;
2003 else if (TREE_CODE (get_leftmost_base_var (arg)) == PARM_DECL)
2005 tree new_base = get_actual_argument_from_parameter (
2006 call, fdecl, get_leftmost_base_var (arg));
2007 lockable = get_canonical_lock_expr (arg, NULL_TREE, false, new_base);
2009 else if (base_obj)
2010 lockable = build_fully_qualified_lock (arg, base_obj);
2011 else
2012 lockable = arg;
2013 gcc_assert (lockable);
2014 lockable = get_canonical_lock_expr (lockable, NULL_TREE,
2015 false /* is_temp_expr */, NULL_TREE);
2017 else
2019 /* If ipa-sra has killed arguments, then base_obj may be NULL.
2020 Attempt to recover gracefully by leaving the lock in the lockset. */
2021 if (!base_obj)
2023 if (!flag_ipa_sra)
2024 gcc_unreachable ();
2025 else if (warn_thread_optimization)
2026 warning_at (*locus, OPT_Wthread_safety,
2027 G_("unlock attribute has been removed by "
2028 "optimization"));
2029 return;
2032 /* Check if the primitive is an unlock routine (e.g. the destructor or
2033 a release function) of a scoped_lock. If so, get the lock that is
2034 being released from scopedlock_to_lock_map. */
2035 if (TREE_CODE (base_obj) == ADDR_EXPR)
2037 tree scoped_lock = TREE_OPERAND (base_obj, 0);
2038 if (TREE_CODE (scoped_lock) == SSA_NAME)
2039 scoped_lock = SSA_NAME_VAR (scoped_lock);
2040 /* A scoped lock should be a local variable. */
2041 if (TREE_CODE (scoped_lock) == VAR_DECL)
2043 void **entry = pointer_map_contains (scopedlock_to_lock_map,
2044 scoped_lock);
2045 if (entry)
2047 gcc_assert (fdecl);
2048 lockable = (tree) *entry;
2049 /* Since this is a scoped lock, if the unlock routine is
2050 not the destructor, we assume it is a release function
2051 (e.g. std::unique_lock::release()). And therefore the
2052 lock is considered weakly released and should be added
2053 to the weak released lock set. */
2054 if (!lang_hooks.decl_is_destructor (fdecl))
2055 is_weak_unlock = true;
2059 /* If the function is not a destructor of a scoped_lock, base_obj
2060 is the lock. */
2061 if (!lockable)
2062 lockable = get_canonical_lock_expr (base_obj, NULL_TREE,
2063 false /* is_temp_expr */,
2064 NULL_TREE);
2067 /* Remove the lock from the live lock set and, if it is not currently held,
2068 warn about the issue. */
2069 if ((lock_released = remove_lock_from_lockset (lockable, live_excl_locks,
2070 live_shared_locks))
2071 != NULL_TREE)
2073 if (is_weak_unlock)
2075 gcc_assert (bbinfo->weak_released_locks);
2076 pointer_set_insert (bbinfo->weak_released_locks, lock_released);
2079 else if (!is_weak_unlock
2080 && ((lock_released =
2081 lock_set_contains (bbinfo->weak_released_locks, lockable,
2082 NULL_TREE, false)) != NULL_TREE))
2084 /* If the unlock function is not a weak release and the lock is currently
2085 in the weak release set, we need to remove it from the set as it is
2086 no longer considered weakly released after this point. */
2087 pointer_set_delete (bbinfo->weak_released_locks, lock_released);
2089 else if (warn_thread_mismatched_lock_acq_rel)
2090 warning_at (*locus, OPT_Wthread_safety,
2091 G_("Try to unlock %s that was not acquired"),
2092 dump_expr_tree (lockable, lname));
2095 /* A helper function for handling function "locks_excluded" attribute.
2096 Check if LOCK is in the current live lock sets and emit warnings if so.
2098 LOCK: the lock being examined.
2099 FDECL: function decl of the call.
2100 BASE_OBJ: base object if FDECL is a method (member function).
2101 LIVE_EXCL_LOCKS: current live exclusive lock set.
2102 LIVE_SHARED LOCKS: current live shared lock set.
2103 LOCUS: location info of the call. */
2105 static void
2106 check_func_lock_excluded (tree lock, tree fdecl, tree base_obj,
2107 const struct pointer_set_t *live_excl_locks,
2108 const struct pointer_set_t *live_shared_locks,
2109 const location_t *locus)
2111 tree lock_contained;
2113 /* LOCK could be NULL if the front-end/parser cannot recognize it.
2114 Simply ignore it and return. */
2115 if (!lock)
2116 return;
2118 /* When the base obj tree is not an ADDR_EXPR, which means it is a
2119 pointer (i.e. base->foo() or foo(base)), we will need to create
2120 a new base that is INDIRECT_REF so that we would be able to form
2121 a correct full expression for a lock later. On the other hand,
2122 if the base obj is an ADDR_EXPR (i.e. base.foo() or foo(&base)),
2123 we need to remove the address-taken operation. */
2124 if (base_obj)
2126 tree canon_base = get_canonical_lock_expr (base_obj, NULL_TREE,
2127 true /* is_temp_expr */,
2128 NULL_TREE);
2129 if (TREE_CODE (canon_base) != ADDR_EXPR)
2131 if (POINTER_TYPE_P (TREE_TYPE (canon_base)))
2132 base_obj = build1 (INDIRECT_REF,
2133 TREE_TYPE (TREE_TYPE (canon_base)),
2134 canon_base);
2135 /* If the base object is neither an ADDR_EXPR, nor a pointer,
2136 and DECL is a cloned method, most likely we have done IPA-SRA
2137 optimization, where reference parameters are changed to be
2138 passed by value. So in this case, just use the CANON_BASE. */
2139 else if (DECL_ABSTRACT_ORIGIN (fdecl))
2140 base_obj = canon_base;
2141 else
2142 gcc_unreachable ();
2144 else
2145 base_obj = TREE_OPERAND (canon_base, 0);
2148 if (!DECL_P (lock))
2149 lock = get_canonical_lock_expr (lock, NULL_TREE, false /* is_temp_expr */,
2150 NULL_TREE);
2152 /* Check if the excluded lock is in the live lock sets when the
2153 function is called. If so, issue a warning. */
2154 if ((lock_contained = lock_set_contains (live_excl_locks, lock,
2155 base_obj, true))
2156 || (lock_contained = lock_set_contains (live_shared_locks, lock,
2157 base_obj, true)))
2159 char lname[LOCK_NAME_LEN];
2160 void **entry = pointer_map_contains (lock_locus_map, lock_contained);
2161 if (entry)
2162 warning_at (*locus, OPT_Wthread_safety,
2163 G_("Cannot call function %qE with lock %s held"
2164 " (previously acquired at line %d)"),
2165 DECL_NAME (fdecl),
2166 dump_expr_tree (lock_contained, lname),
2167 LOCATION_LINE (*((location_t *) *entry)));
2168 else
2169 warning_at (*locus, OPT_Wthread_safety,
2170 G_("Cannot call function %qE with lock %s held"
2171 " (at function entry)"),
2172 DECL_NAME (fdecl),
2173 dump_expr_tree (lock_contained, lname));
2177 /* Function lock requirement type. */
2179 enum FUNC_LOCK_REQ_TYPE {
2180 FLR_EXCL_LOCK_REQUIRED,
2181 FLR_SHARED_LOCK_REQUIRED,
2182 FLR_LOCK_EXCLUDED
2185 /* Handle function lock requirement attributes ("exclusive_locks_required",
2186 "shared_locks_required", and "locks_excluded").
2188 CALL: function/method call that's currently being examined.
2189 FDECL: function/method decl of the call.
2190 BASE_OBJ: base object if FDECL is a method (member function).
2191 ATTR: attribute of type FUNC_LOCK_REQ_TYPE.
2192 REQ_TYPE: function lock requirement type.
2193 LIVE_EXCL_LOCKS: current live exclusive lock set.
2194 LIVE_SHARED LOCKS: current live shared lock set.
2195 LOCUS: location info of the call. */
2197 static void
2198 handle_function_lock_requirement (gimple call, tree fdecl, tree base_obj,
2199 tree attr, enum FUNC_LOCK_REQ_TYPE req_type,
2200 const struct pointer_set_t *live_excl_locks,
2201 const struct pointer_set_t *live_shared_locks,
2202 const location_t *locus)
2204 tree arg;
2205 tree lock;
2207 for (arg = TREE_VALUE (attr); arg; arg = TREE_CHAIN (arg))
2209 tree tmp_base_obj = base_obj;
2210 lock = TREE_VALUE (arg);
2211 gcc_assert (lock);
2212 /* If lock is the error_mark_node, just set it to NULL_TREE so that
2213 we will reduce the level of checking later. (i.e. Only check whether
2214 there is any live lock at this point in check_lock_required and
2215 ignore the lock in check_func_lock_excluded.) */
2216 if (lock == error_mark_node)
2217 lock = NULL_TREE;
2218 else if (TREE_CODE (lock) == INTEGER_CST)
2220 lock = get_actual_argument_from_position (call, lock);
2221 /* Ignore attribute if ipa-sra has killed the argument. */
2222 if (!lock)
2223 return;
2224 /* If the lock is a function argument, we don't want to
2225 prepend the base object to the lock name. Set the
2226 TMP_BASE_OBJ to NULL. */
2227 tmp_base_obj = NULL_TREE;
2229 /* If LOCK's leftmost base is a formal parameter, we need to grab the
2230 corresponding actual argument of the call and replace the formal
2231 parameter with the actual argument in LOCK. */
2232 else if (TREE_CODE (get_leftmost_base_var (lock)) == PARM_DECL)
2234 tree new_base = get_actual_argument_from_parameter (
2235 call, fdecl, get_leftmost_base_var (lock));
2236 lock = get_canonical_lock_expr (lock, NULL_TREE, false, new_base);
2237 tmp_base_obj = NULL_TREE;
2240 if (req_type == FLR_EXCL_LOCK_REQUIRED)
2241 check_lock_required (lock, fdecl, tmp_base_obj,
2242 false /* is_indirect_ref */,
2243 live_excl_locks, live_shared_locks,
2244 locus, TSA_WRITE);
2245 else if (req_type == FLR_SHARED_LOCK_REQUIRED)
2246 check_lock_required (lock, fdecl, tmp_base_obj,
2247 false /* is_indirect_ref */,
2248 live_excl_locks, live_shared_locks,
2249 locus, TSA_READ);
2250 else
2252 gcc_assert (req_type == FLR_LOCK_EXCLUDED);
2253 check_func_lock_excluded (lock, fdecl, tmp_base_obj,
2254 live_excl_locks, live_shared_locks, locus);
2259 /* The main routine that handles the thread safety attributes for
2260 functions. CALL is the call expression being analyzed. FDECL is its
2261 corresponding function decl tree. LIVE_EXCL_LOCKS and LIVE_SHARED_LOCKS
2262 are the live lock sets when the control flow reaches this call expression.
2263 LOCUS is the source location of the call expression. */
2265 static void
2266 process_function_attrs (gimple call, tree fdecl,
2267 struct bb_threadsafe_info *current_bb_info,
2268 const location_t *locus)
2270 struct pointer_set_t *live_excl_locks = current_bb_info->live_excl_locks;
2271 struct pointer_set_t *live_shared_locks = current_bb_info->live_shared_locks;
2272 tree attr = NULL_TREE;
2273 tree base_obj = NULL_TREE;
2274 bool is_exclusive_lock;
2275 bool is_trylock;
2276 bool optimized_args = false;
2278 gcc_assert (is_gimple_call (call));
2280 /* First check if the function call is annotated with any escape-hatch
2281 related attributes and set/reset the corresponding flags if so. */
2282 if (lookup_attribute("ignore_reads_begin", DECL_ATTRIBUTES (fdecl))
2283 != NULL_TREE)
2284 current_bb_info->reads_ignored = true;
2285 if (lookup_attribute("ignore_reads_end", DECL_ATTRIBUTES (fdecl))
2286 != NULL_TREE)
2287 current_bb_info->reads_ignored = false;
2288 if (lookup_attribute("ignore_writes_begin", DECL_ATTRIBUTES (fdecl))
2289 != NULL_TREE)
2290 current_bb_info->writes_ignored = true;
2291 if (lookup_attribute("ignore_writes_end", DECL_ATTRIBUTES (fdecl))
2292 != NULL_TREE)
2293 current_bb_info->writes_ignored = false;
2295 /* If the given function is a clone, and if some of the parameters of the
2296 clone have been optimized away, then the function attribute is no longer
2297 correct, and we should suppress certain warnings. Clones are often created
2298 when -fipa-sra is enabled, which happens by default at -O2 starting from
2299 GCC-4.5. The clones could be created as early as when constructing SSA.
2300 ipa-sra is particularly fond of optimizing away the "this" pointer,
2301 which is a problem because that makes it impossible to determine the
2302 base object, which then causes spurious errors. It's better to just
2303 remain silent in this case. */
2304 if ((TREE_CODE (TREE_TYPE (fdecl)) == FUNCTION_TYPE
2305 || TREE_CODE (TREE_TYPE (fdecl)) == METHOD_TYPE) /* sanity check */
2306 && (fdecl != DECL_ORIGIN (fdecl)) /* is it a clone? */
2307 && (type_num_arguments (TREE_TYPE (fdecl)) != /* compare args */
2308 type_num_arguments (TREE_TYPE (DECL_ORIGIN(fdecl)))))
2309 optimized_args = true;
2311 /* If the function is a class member, the first argument of the function
2312 (i.e. "this" pointer) would be the base object. Note that here we call
2313 DECL_ORIGIN on fdecl first before we check whether it's a METHOD_TYPE
2314 because if fdecl is a cloned method, the TREE_CODE of its type would be
2315 FUNCTION_DECL instead of METHOD_DECL, which would lead us to not grab
2316 its base object. */
2317 if (TREE_CODE (TREE_TYPE (DECL_ORIGIN (fdecl))) == METHOD_TYPE
2318 && gimple_call_num_args(call) > 0)
2319 base_obj = gimple_call_arg (call, 0);
2321 /* Check whether this is a locking primitive of any kind. */
2322 if ((attr = lookup_attribute("exclusive_lock",
2323 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2325 is_exclusive_lock = true;
2326 is_trylock = false;
2328 else if ((attr = lookup_attribute("exclusive_trylock",
2329 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2331 is_exclusive_lock = true;
2332 is_trylock = true;
2334 else if ((attr = lookup_attribute("shared_lock",
2335 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2337 is_exclusive_lock = false;
2338 is_trylock = false;
2340 else if ((attr = lookup_attribute("shared_trylock",
2341 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2343 is_exclusive_lock = false;
2344 is_trylock = true;
2347 /* Handle locking primitives */
2348 if (attr)
2350 if (TREE_VALUE (attr))
2352 int succ_retval = 0;
2353 tree arg = TREE_VALUE (attr);
2354 /* If the locking primitive is a trylock, the first argument of
2355 the attribute specifies the return value of a successful lock
2356 acquisition. */
2357 if (is_trylock)
2359 gcc_assert (TREE_CODE (TREE_VALUE (arg)) == INTEGER_CST);
2360 succ_retval = TREE_INT_CST_LOW (TREE_VALUE (arg));
2361 arg = TREE_CHAIN (arg);
2364 /* If the primitive is a trylock, after we consume the first
2365 argument of the attribute, there might not be any argument
2366 left. So we need to check if arg is NULL again here. */
2367 if (arg)
2369 /* If the locking primitive attribute specifies multiple locks
2370 in its arguments, we iterate through the argument list and
2371 handle each of the locks individually. */
2372 for (; arg; arg = TREE_CHAIN (arg))
2373 handle_lock_primitive_attrs (call, fdecl, TREE_VALUE (arg),
2374 base_obj, is_exclusive_lock,
2375 is_trylock, live_excl_locks,
2376 live_shared_locks, locus);
2378 else
2380 /* If the attribute does not have any argument left, the lock to
2381 be acquired is the base obj (e.g. "mu" in mu->TryLock()). */
2382 handle_lock_primitive_attrs (call, fdecl, NULL_TREE, base_obj,
2383 is_exclusive_lock, is_trylock,
2384 live_excl_locks, live_shared_locks,
2385 locus);
2387 /* If the primitive is a trylock, fill in the return value on
2388 successful lock acquisition in the trylock_info that was
2389 created in handle_lock_primitive_attrs. */
2390 if (is_trylock)
2392 struct trylock_info *tryinfo;
2393 void **entry = pointer_map_contains (trylock_info_map, call);
2394 gcc_assert (entry);
2395 tryinfo = (struct trylock_info *)*entry;
2396 tryinfo->succ_retval = succ_retval;
2399 else
2401 /* If the attribute does not have any argument, the lock to be
2402 acquired is the base obj (e.g. "mu" in mu->Lock()). */
2403 gcc_assert (!is_trylock);
2404 handle_lock_primitive_attrs (call, fdecl, NULL_TREE, base_obj,
2405 is_exclusive_lock, is_trylock,
2406 live_excl_locks, live_shared_locks,
2407 locus);
2410 /* Handle unlocking primitive */
2411 else if ((attr = lookup_attribute ("unlock", DECL_ATTRIBUTES (fdecl)))
2412 != NULL_TREE)
2414 if (TREE_VALUE (attr))
2416 /* If the unlocking primitive attribute specifies multiple locks
2417 in its arguments, we iterate through the argument list and
2418 handle each of the locks individually. */
2419 tree arg;
2420 for (arg = TREE_VALUE (attr); arg; arg = TREE_CHAIN (arg))
2422 /* If the unlock arg is an error_mark_node, which means an
2423 unsupported lock name/expression was encountered during
2424 parsing, the conservative approach to take is not to check
2425 the lock acquire/release mismatch issue in the current
2426 function by setting the flag to 0. Note that the flag will
2427 be restored to its original value after finishing analyzing
2428 the current function. */
2429 if (TREE_VALUE (arg) == error_mark_node)
2431 warn_thread_mismatched_lock_acq_rel = 0;
2432 continue;
2434 handle_unlock_primitive_attr (call, fdecl, TREE_VALUE (arg),
2435 base_obj, current_bb_info, locus);
2438 else
2439 /* If the attribute does not have any argument, the lock to be
2440 released is the base obj (e.g. "mu" in mu->Unlock()). */
2441 handle_unlock_primitive_attr (call, fdecl, NULL_TREE, base_obj,
2442 current_bb_info, locus);
2445 /* suppress warnings if the function arguments are no longer accurate. */
2446 if (warn_thread_unguarded_func && !optimized_args)
2448 /* Handle the attributes specifying the lock requirements of
2449 functions. */
2450 if ((attr = lookup_attribute ("exclusive_locks_required",
2451 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2452 handle_function_lock_requirement (call, fdecl, base_obj, attr,
2453 FLR_EXCL_LOCK_REQUIRED,
2454 live_excl_locks, live_shared_locks,
2455 locus);
2457 if ((attr = lookup_attribute ("shared_locks_required",
2458 DECL_ATTRIBUTES (fdecl))) != NULL_TREE)
2459 handle_function_lock_requirement (call, fdecl, base_obj, attr,
2460 FLR_SHARED_LOCK_REQUIRED,
2461 live_excl_locks, live_shared_locks,
2462 locus);
2464 if ((attr = lookup_attribute ("locks_excluded", DECL_ATTRIBUTES (fdecl)))
2465 != NULL_TREE)
2466 handle_function_lock_requirement (call, fdecl, base_obj, attr,
2467 FLR_LOCK_EXCLUDED,
2468 live_excl_locks, live_shared_locks,
2469 locus);
2473 /* The main routine that handles the attributes specifying variables' lock
2474 requirements. */
2476 static void
2477 process_guarded_by_attrs (tree vdecl, tree base_obj, bool is_indirect_ref,
2478 const struct pointer_set_t *excl_locks,
2479 const struct pointer_set_t *shared_locks,
2480 const location_t *locus, enum access_mode mode)
2482 tree attr;
2483 tree lockable = NULL_TREE;
2484 /* A flag indicating whether the attribute is {point_to_}guarded_by with
2485 a lock specified or simply {point_to_}guarded. */
2486 bool lock_specified = true;
2488 if (!warn_thread_unguarded_var)
2489 return;
2491 if (is_indirect_ref)
2493 attr = lookup_attribute ("point_to_guarded_by", DECL_ATTRIBUTES (vdecl));
2494 if (!attr)
2496 attr = lookup_attribute ("point_to_guarded",
2497 DECL_ATTRIBUTES (vdecl));
2498 lock_specified = false;
2501 else
2503 attr = lookup_attribute ("guarded_by", DECL_ATTRIBUTES (vdecl));
2504 if (!attr)
2506 attr = lookup_attribute ("guarded", DECL_ATTRIBUTES (vdecl));
2507 lock_specified = false;
2511 /* If the variable does not have an attribute specifying that it should
2512 be protected by a lock, simply return. */
2513 if (!attr)
2514 return;
2516 /* If the variable is a compiler-created temporary pointer, grab the
2517 original variable's decl from the debug expr (as we don't want to
2518 print out the temp name in the warnings. For reasons why we only
2519 need to do this for pointers, see lookup_tmp_var() in gimplify.c. */
2520 if (is_indirect_ref && DECL_ARTIFICIAL (vdecl))
2522 gcc_assert (DECL_DEBUG_EXPR_IS_FROM (vdecl) && DECL_DEBUG_EXPR (vdecl));
2523 vdecl = DECL_DEBUG_EXPR (vdecl);
2524 gcc_assert (DECL_P (vdecl));
2527 if (lock_specified)
2529 gcc_assert (TREE_VALUE (attr));
2530 lockable = TREE_VALUE (TREE_VALUE (attr));
2531 gcc_assert (lockable);
2534 check_lock_required (lockable, vdecl, base_obj, is_indirect_ref,
2535 excl_locks, shared_locks, locus, mode);
2538 /* This routine is called when we see an indirect reference in our
2539 analysis of expressions. In an indirect reference, the pointer itself
2540 is accessed as a read. The parameter MODE indicates how the memory
2541 location pointed to by the PTR is being accessed (either read or write). */
2543 static void
2544 handle_indirect_ref (tree ptr, struct pointer_set_t *excl_locks,
2545 struct pointer_set_t *shared_locks,
2546 const location_t *locus, enum access_mode mode)
2548 tree vdecl;
2550 /* The pointer itself is accessed as a read */
2551 analyze_expr (ptr, NULL_TREE, false /* is_indirect_ref */, excl_locks,
2552 shared_locks, locus, TSA_READ);
2554 if (TREE_CODE (ptr) == SSA_NAME)
2556 vdecl = SSA_NAME_VAR (ptr);
2557 if (!DECL_NAME (vdecl))
2559 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
2560 if (is_gimple_assign (def_stmt)
2561 && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
2562 == GIMPLE_SINGLE_RHS))
2563 vdecl = gimple_assign_rhs1 (def_stmt);
2566 else
2567 vdecl = ptr;
2569 if (DECL_P (vdecl))
2570 process_guarded_by_attrs (vdecl, NULL_TREE, true /* is_indirect_ref */,
2571 excl_locks, shared_locks, locus, mode);
2572 else
2573 analyze_expr (vdecl, NULL_TREE, true /* is_indirect_ref */,
2574 excl_locks, shared_locks, locus, mode);
2576 return;
2580 /* The main routine that handles gimple call statements. This will update
2581 the set of held locks.
2582 CALL -- the gimple call statement.
2583 CURRENT_BB_INFO -- a pointer to the lockset structures for the current
2584 basic block.
2586 static void
2587 handle_call_gs (gimple call, struct bb_threadsafe_info *current_bb_info)
2589 tree fdecl = get_fdecl_from_gimple_stmt (call);
2590 int num_args = gimple_call_num_args (call);
2591 int arg_index = 0;
2592 tree arg_type = NULL_TREE;
2593 tree arg;
2594 tree lhs;
2595 location_t locus;
2597 if (!gimple_has_location (call))
2598 locus = input_location;
2599 else
2600 locus = gimple_location (call);
2602 /* The callee fndecl could be NULL, e.g., when the function is passed in
2603 as an argument. */
2604 if (fdecl)
2606 arg_type = TYPE_ARG_TYPES (TREE_TYPE (fdecl));
2607 if (TREE_CODE (TREE_TYPE (fdecl)) == METHOD_TYPE)
2609 /* If an object, x, is guarded by a lock, whether or not
2610 calling x.foo() requires an exclusive lock depends on
2611 if foo() is const. */
2612 enum access_mode rw_mode =
2613 lang_hooks.decl_is_const_member_func (fdecl) ? TSA_READ
2614 : TSA_WRITE;
2616 /* A method should have at least one argument, i.e."this" pointer */
2617 gcc_assert (num_args);
2618 arg = gimple_call_arg (call, 0);
2620 /* If the base object (i.e. "this" object) is a SSA name of a temp
2621 variable, as shown in the following example (assuming the source
2622 code is 'base.method_call()'):
2624 D.5041_2 = &this_1(D)->base;
2625 result.0_3 = method_call (D.5041_2);
2627 we will need to get the rhs of the SSA def of the temp variable
2628 in order for the analysis to work correctly. */
2629 if (TREE_CODE (arg) == SSA_NAME)
2631 tree vdecl = SSA_NAME_VAR (arg);
2632 if (DECL_ARTIFICIAL (vdecl)
2633 && !gimple_nop_p (SSA_NAME_DEF_STMT (arg)))
2635 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2636 if (is_gimple_assign (def_stmt)
2637 && (get_gimple_rhs_class (gimple_assign_rhs_code (
2638 def_stmt)) == GIMPLE_SINGLE_RHS))
2639 arg = gimple_assign_rhs1 (def_stmt);
2643 /* Analyze the base object ("this") if we are not instructed to
2644 ignore it. */
2645 if (!(current_bb_info->reads_ignored && rw_mode == TSA_READ)
2646 && !(current_bb_info->writes_ignored && rw_mode == TSA_WRITE))
2648 if (TREE_CODE (arg) == ADDR_EXPR)
2650 /* Handle smart/scoped pointers. They are not actual
2651 pointers but they can be annotated with
2652 "point_to_guarded_by" attribute and have overloaded "->"
2653 and "*" operators, so we treat them as normal pointers. */
2654 if ((DECL_NAME (fdecl) == maybe_get_identifier ("operator->"))
2655 || (DECL_NAME (fdecl)
2656 == maybe_get_identifier ("operator*")))
2657 handle_indirect_ref(TREE_OPERAND (arg, 0),
2658 current_bb_info->live_excl_locks,
2659 current_bb_info->live_shared_locks,
2660 &locus, rw_mode);
2661 else
2662 /* Handle the case of x.foo() or foo(&x) */
2663 analyze_expr (TREE_OPERAND (arg, 0), NULL_TREE,
2664 false /* is_indirect_ref */,
2665 current_bb_info->live_excl_locks,
2666 current_bb_info->live_shared_locks, &locus,
2667 rw_mode);
2669 else
2671 /* Handle the case of x->foo() or foo(x) */
2672 gcc_assert (POINTER_TYPE_P (TREE_TYPE (arg)));
2673 handle_indirect_ref(arg, current_bb_info->live_excl_locks,
2674 current_bb_info->live_shared_locks,
2675 &locus, rw_mode);
2679 /* Advance to the next argument */
2680 ++arg_index;
2681 arg_type = TREE_CHAIN (arg_type);
2685 /* Analyze the call's arguments if we are not instructed to ignore the
2686 reads. */
2687 if (!current_bb_info->reads_ignored
2688 && (!fdecl
2689 || !lookup_attribute("unprotected_read", DECL_ATTRIBUTES (fdecl))))
2691 for ( ; arg_index < num_args; ++arg_index)
2693 arg = gimple_call_arg (call, arg_index);
2694 if (!CONSTANT_CLASS_P (arg))
2696 /* In analyze_expr routine, an address-taken expr (e.g. &x) will
2697 be skipped because the variable itself is not actually
2698 accessed. However, if an argument which is an address-taken
2699 expr, say &x, is passed into a function for a reference
2700 parameter, we want to treat it as a use of x because this is
2701 what users expect and most likely x will be used in the
2702 callee body anyway. */
2703 if (arg_type
2704 && TREE_CODE (TREE_VALUE (arg_type)) == REFERENCE_TYPE)
2706 tree canon_arg = arg;
2707 /* The argument could be an SSA_NAME. Try to get the rhs of
2708 its SSA_DEF. */
2709 if (TREE_CODE (arg) == SSA_NAME)
2711 tree vdecl = SSA_NAME_VAR (arg);
2712 if (DECL_ARTIFICIAL (vdecl)
2713 && !gimple_nop_p (SSA_NAME_DEF_STMT (arg)))
2715 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2716 if (is_gimple_assign (def_stmt)
2717 && (get_gimple_rhs_class (
2718 gimple_assign_rhs_code (def_stmt))
2719 == GIMPLE_SINGLE_RHS))
2720 canon_arg = gimple_assign_rhs1 (def_stmt);
2723 /* For an argument which is an ADDR_EXPR, say &x, that
2724 corresponds to a reference parameter, remove the
2725 address-taken operator and only pass 'x' to
2726 analyze_expr. */
2727 if (TREE_CODE (canon_arg) == ADDR_EXPR)
2728 arg = TREE_OPERAND (canon_arg, 0);
2731 analyze_expr (arg, NULL_TREE, false /* is_indirect_ref */,
2732 current_bb_info->live_excl_locks,
2733 current_bb_info->live_shared_locks, &locus,
2734 TSA_READ);
2737 if (arg_type)
2738 arg_type = TREE_CHAIN (arg_type);
2742 /* Analyze the call's lhs if it exists and we are not instructed to ignore
2743 the writes. */
2744 lhs = gimple_call_lhs (call);
2745 if (lhs != NULL_TREE && !current_bb_info->writes_ignored)
2746 analyze_expr (lhs, NULL_TREE, false /* is_indirect_ref */,
2747 current_bb_info->live_excl_locks,
2748 current_bb_info->live_shared_locks, &locus, TSA_WRITE);
2750 /* Process the attributes associated with the callee func decl.
2751 Note that we want to process the arguments first so that the callee
2752 func decl attributes have no effects on the arguments. */
2753 if (fdecl)
2754 process_function_attrs (call, fdecl, current_bb_info, &locus);
2756 return;
2759 /* The main routine that handles decls and expressions. It in turn calls
2760 other helper routines to analyze different kinds of trees.
2762 EXPR is the expression/decl being analyzed.
2763 BASE_OBJ is the base component of EXPR if EXPR is a component reference
2764 (e.g. b.a).
2765 EXCL_LOCKS and SHARED_LOCKS are the live lock sets when the control flow
2766 reaches EXPR.
2767 LOCUS is the source location of EXPR.
2768 MODE indicates whether the access is a read or a write. */
2770 static void
2771 analyze_expr (tree expr, tree base_obj, bool is_indirect_ref,
2772 struct pointer_set_t *excl_locks,
2773 struct pointer_set_t *shared_locks,
2774 const location_t *locus, enum access_mode mode)
2776 tree vdecl;
2778 if (EXPR_P (expr))
2780 int nops;
2781 int i;
2782 /* For an address-taken expression (i.e. &x), the memory location of
2783 the operand is not actually accessed. So no thread safe check
2784 necessary here. */
2785 if (TREE_CODE (expr) == ADDR_EXPR)
2786 return;
2788 if (TREE_CODE (expr) == INDIRECT_REF || TREE_CODE (expr) == MEM_REF)
2790 tree ptr = TREE_OPERAND (expr, 0);
2791 handle_indirect_ref (ptr, excl_locks, shared_locks, locus, mode);
2792 return;
2795 /* For a component reference, we need to look at both base and
2796 component trees. */
2797 if (TREE_CODE (expr) == COMPONENT_REF)
2799 tree base = TREE_OPERAND (expr, 0);
2800 tree component = TREE_OPERAND (expr, 1);
2801 analyze_expr (base, NULL_TREE, false /* is_indirect_ref */,
2802 excl_locks, shared_locks, locus, mode);
2803 analyze_expr (component, base, is_indirect_ref,
2804 excl_locks, shared_locks, locus, mode);
2805 return;
2808 /* For all other expressions, just iterate through their operands
2809 and call analyze_expr on them recursively */
2810 nops = TREE_OPERAND_LENGTH (expr);
2811 for (i = 0; i < nops; i++)
2813 tree op = TREE_OPERAND (expr, i);
2814 if (op != 0 && !CONSTANT_CLASS_P (op))
2815 analyze_expr (op, base_obj, false /* is_indirect_ref */,
2816 excl_locks, shared_locks, locus, mode);
2819 return;
2822 /* If EXPR is a ssa name, grab its original variable decl. */
2823 if (TREE_CODE (expr) == SSA_NAME)
2825 vdecl = SSA_NAME_VAR (expr);
2826 /* If VDECL is a nameless temp variable and we are analyzing an indirect
2827 reference, we will need to grab and analyze the RHS of its SSA def
2828 because the RHS is the actual pointer that gets dereferenced.
2829 For example, in the following snippet of gimple IR, when we first
2830 analyzed S1, we only saw a direct access to foo.a_. Later, when
2831 analyzing the RHS of S2 (i.e. *D1803_1), which is an indirect
2832 reference, we need to look at foo.a_ again because what's really
2833 being referenced is *foo.a_.
2835 S1: D.1803_1 = foo.a_;
2836 S2: res.1_4 = *D.1803_1; */
2837 if (!DECL_NAME (vdecl) && is_indirect_ref)
2839 gimple def_stmt = SSA_NAME_DEF_STMT (expr);
2840 if (is_gimple_assign (def_stmt)
2841 && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
2842 == GIMPLE_SINGLE_RHS))
2843 vdecl = gimple_assign_rhs1 (def_stmt);
2846 else if (DECL_P (expr))
2847 vdecl = expr;
2848 else
2849 return;
2851 if (DECL_P (vdecl))
2852 process_guarded_by_attrs (vdecl, base_obj, is_indirect_ref,
2853 excl_locks, shared_locks, locus, mode);
2854 else
2855 analyze_expr (vdecl, base_obj, is_indirect_ref, excl_locks, shared_locks,
2856 locus, mode);
2859 /* This is a helper function called by handle_cond_gs() to check if
2860 GS is a trylock call (or a simple expression fed by a trylock
2861 call that involves only logic "not" operations). And if so, grab and
2862 return the corresponding trylock_info structure. Otherwise, return NULL.
2863 In the process, *LOCK_ON_TRUE_PATH is set to indicate whether the true
2864 (control flow) path should be taken when the lock is successfully
2865 acquired. */
2867 static struct trylock_info *
2868 get_trylock_info(gimple gs, bool *lock_on_true_path)
2870 while (1)
2872 if (is_gimple_assign (gs))
2874 enum tree_code subcode = gimple_assign_rhs_code (gs);
2875 if (subcode == SSA_NAME)
2877 gs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (gs));
2878 continue;
2880 else if (subcode == TRUTH_NOT_EXPR)
2882 /* If the expr is a logic "not" operation, negate the value
2883 pointed to by lock_on_true_apth and continue trace back
2884 the expr's operand. */
2885 *lock_on_true_path = !*lock_on_true_path;
2886 gs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (gs));
2887 continue;
2889 else
2890 return NULL;
2892 else if (is_gimple_call (gs))
2894 tree fdecl = get_fdecl_from_gimple_stmt (gs);
2896 /* The function decl could be null in some cases, e.g.
2897 a function pointer passed in as a parameter. */
2898 if (fdecl
2899 && (lookup_attribute ("exclusive_trylock",
2900 DECL_ATTRIBUTES (fdecl))
2901 || lookup_attribute ("shared_trylock",
2902 DECL_ATTRIBUTES (fdecl))))
2904 void **entry = pointer_map_contains (trylock_info_map, gs);
2905 gcc_assert (entry);
2906 return (struct trylock_info *)*entry;
2908 else
2909 return NULL;
2911 else
2912 return NULL;
2915 gcc_unreachable ();
2917 return NULL;
2920 /* This routine determines whether the given condition statment (COND_STMT) is
2921 fed by a trylock call through expressions involving only "not", "equal"
2922 "not-equal" operations. Here are several examples where the condition
2923 statements are fed by trylock calls:
2925 (1) if (mu.Trylock()) {
2929 (2) bool a = mu.Trylock();
2930 bool b = !a;
2931 if (b) {
2935 (3) int a = pthread_mutex_trylock(mu);
2936 bool b = (a == 1);
2937 if (!b) {
2941 (4) int a = pthread_mutex_trylock(mu);
2942 bool b = (a != 0);
2943 bool c = b;
2944 if (c == true) {
2948 If COND_STMT is determined to be fed by a trylock call, this routine
2949 populates the data structure pointed to by CURRENT_BB_INFO, and
2950 sets *LOCK_ON_TRUE_PATH to indicate whether the true (control flow) path
2951 should be taken when the lock is successfully acquired. */
2953 static void
2954 handle_cond_gs (gimple cond_stmt, struct bb_threadsafe_info *current_bb_info)
2956 gimple gs = NULL;
2957 bool lock_on_true_path = true;
2958 bool is_cond_stmt = true;
2959 edge true_edge, false_edge;
2960 basic_block bb = gimple_bb (cond_stmt);
2961 tree op0 = gimple_cond_lhs (cond_stmt);
2962 tree op1 = gimple_cond_rhs (cond_stmt);
2963 enum tree_code subcode = gimple_cond_code (cond_stmt);
2965 /* We only handle condition expressions with "equal" or "not-equal"
2966 operations. */
2967 if (subcode != EQ_EXPR && subcode != NE_EXPR)
2968 return;
2970 /* In the new gimple tuple IR, a single operand if-condition such as
2972 if (a) {
2975 would be represented as
2977 GIMPLE_COND <NE_EXPR, a, 0, TRUE_LABEL, FALSE_LABEL>
2979 Here we are trying this case and grab the SSA definition of a. */
2981 if (TREE_CODE (op0) == SSA_NAME
2982 && subcode == NE_EXPR
2983 && TREE_CODE (op1) == INTEGER_CST
2984 && TREE_INT_CST_LOW (op1) == 0)
2986 gs = SSA_NAME_DEF_STMT (op0);
2987 is_cond_stmt = false;
2990 /* Iteratively back-tracing the SSA definitions to determine if the
2991 condition expression is fed by a trylock call. If so, record the
2992 edge that indicates successful lock acquisition. */
2993 while (1)
2995 if (is_cond_stmt || is_gimple_assign (gs))
2997 if (!is_cond_stmt)
2999 gcc_assert (gs);
3000 subcode = gimple_assign_rhs_code (gs);
3003 if (subcode == SSA_NAME)
3005 gcc_assert (!is_cond_stmt);
3006 gs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (gs));
3007 continue;
3009 else if (subcode == TRUTH_NOT_EXPR)
3011 gcc_assert (!is_cond_stmt);
3012 lock_on_true_path = !lock_on_true_path;
3013 gs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (gs));
3014 continue;
3016 else if (subcode == EQ_EXPR || subcode == NE_EXPR)
3018 struct trylock_info *tryinfo;
3019 int const_op;
3020 if (!is_cond_stmt)
3022 op0 = gimple_assign_rhs1 (gs);
3023 op1 = gimple_assign_rhs2 (gs);
3025 if (TREE_CODE (op0) == INTEGER_CST
3026 && TREE_CODE (op1) == SSA_NAME)
3028 const_op = TREE_INT_CST_LOW (op0);
3029 tryinfo = get_trylock_info (SSA_NAME_DEF_STMT (op1),
3030 &lock_on_true_path);
3032 else if (TREE_CODE (op1) == INTEGER_CST
3033 && TREE_CODE (op0) == SSA_NAME)
3035 const_op = TREE_INT_CST_LOW (op1);
3036 tryinfo = get_trylock_info (SSA_NAME_DEF_STMT (op0),
3037 &lock_on_true_path);
3039 else
3040 return;
3042 if (tryinfo)
3044 struct pointer_set_t *edge_locks;
3045 /* Depending on the operation (eq or neq) and whether the
3046 succ_retval of the trylock is the same as the constant
3047 integer operand, we might need to toggle the value
3048 pointed to bylock_on_true_path. For example, if the
3049 succ_retval of TryLock() is 0 and the cond expression is
3050 (mu.TryLock() != 0), we need to negate the
3051 lock_on_true_path value. */
3052 if ((tryinfo->succ_retval == const_op
3053 && subcode == NE_EXPR)
3054 || (tryinfo->succ_retval != const_op
3055 && subcode == EQ_EXPR))
3056 lock_on_true_path = !lock_on_true_path;
3058 edge_locks = pointer_set_copy (tryinfo->locks);
3059 if (tryinfo->is_exclusive)
3060 current_bb_info->edge_exclusive_locks = edge_locks;
3061 else
3062 current_bb_info->edge_shared_locks = edge_locks;
3063 break;
3065 else
3066 return;
3068 else
3069 return;
3071 else if (is_gimple_call (gs))
3073 struct trylock_info *tryinfo;
3074 tryinfo = get_trylock_info (gs, &lock_on_true_path);
3075 if (tryinfo)
3077 struct pointer_set_t *edge_locks;
3078 /* If the succ_retval of the trylock is 0 (or boolean
3079 "false"), we need to negate the value pointed to by
3080 lock_on_true_path. */
3081 if (tryinfo->succ_retval == 0)
3082 lock_on_true_path = !lock_on_true_path;
3083 edge_locks = pointer_set_copy (tryinfo->locks);
3084 if (tryinfo->is_exclusive)
3085 current_bb_info->edge_exclusive_locks = edge_locks;
3086 else
3087 current_bb_info->edge_shared_locks = edge_locks;
3088 break;
3090 else
3091 return;
3093 else
3094 return;
3097 gcc_assert (current_bb_info->edge_exclusive_locks
3098 || current_bb_info->edge_shared_locks);
3099 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3100 if (lock_on_true_path)
3101 current_bb_info->trylock_live_edge = true_edge;
3102 else
3103 current_bb_info->trylock_live_edge = false_edge;
3106 /* This is a helper function to populate the LOCK_SET with the locks
3107 specified in ATTR's arguments. */
3109 static void
3110 populate_lock_set_with_attr (struct pointer_set_t *lock_set, tree attr)
3112 tree arg;
3114 for (arg = TREE_VALUE (attr); arg; arg = TREE_CHAIN (arg))
3116 tree lock = TREE_VALUE (arg);
3117 gcc_assert (lock);
3118 /* If the lock is an integer specifying the argument position, grab
3119 the corresponding formal parameter. */
3120 if (TREE_CODE (lock) == INTEGER_CST)
3122 int lock_pos = TREE_INT_CST_LOW (lock);
3123 int i;
3124 tree parm;
3125 for (parm = DECL_ARGUMENTS (current_function_decl), i = 1;
3126 parm;
3127 parm = TREE_CHAIN (parm), ++i)
3128 if (i == lock_pos)
3129 break;
3130 gcc_assert (parm);
3131 lock = parm;
3134 /* Canonicalize the lock before we add it to the lock set. */
3135 if (!DECL_P (lock))
3136 lock = get_canonical_lock_expr (lock, NULL_TREE,
3137 false /* is_temp_expr */, NULL_TREE);
3139 /* Add the lock to the lock set. */
3140 pointer_set_insert (lock_set, lock);
3142 /* If there are unbound locks when the thread safety attributes were
3143 parsed, we should try to bind them now if we see any lock declaration
3144 that matches the name of the unbound lock. */
3145 if (unbound_lock_map
3146 && (TREE_CODE (lock) == VAR_DECL
3147 || TREE_CODE (lock) == PARM_DECL
3148 || TREE_CODE (lock) == FIELD_DECL))
3150 tree lock_id = DECL_NAME (lock);
3151 void **entry = pointer_map_contains (unbound_lock_map, lock_id);
3152 if (entry)
3153 *entry = lock;
3158 /* This is a helper function passed in (as a parameter) to pointer_set_traverse
3159 when we traverse the set containing locks that are not properly released
3160 and emit a warning message for each of them. By improper release, we meant
3161 the places these locks are released are not control equivalent to where
3162 they are acquired. The improperly-released lock set was calculated when we
3163 reach a joint point during the data flow analysis. Any lock that is not
3164 in all of the preceding basic blocks' live-out sets is considered not
3165 released locally. REPORTED set contains the locks for which we have
3166 already printed out a warning message. We use this set to avoid emitting
3167 duplicate warnings for a lock. Here is an example why duplicate warnings
3168 could be emitted if we don't keep a REPORTED set.
3171 mu.Lock()
3173 / \ \
3174 / \ \
3175 B2: B3: B4:
3176 mu.Unlock()
3177 \ / /
3178 \ / /
3182 When we reach B5, "mu" would be in the live out sets of B3 and B4, but
3183 not that of B2. If we do a live out set intersection between B2 and B3
3184 first, and then intersect the resulting set with B4's live out set, we
3185 could've emitted the warning message for "mu" twice if we had not kept
3186 a reported set. */
3188 static bool
3189 warn_locally_unreleased_locks (const void *lock, void *reported)
3191 char lname[LOCK_NAME_LEN];
3192 void **entry;
3193 tree lock_tree = CONST_CAST_TREE ((const_tree) lock);
3194 location_t *loc;
3195 struct pointer_set_t *reported_unreleased_locks;
3197 reported_unreleased_locks = (struct pointer_set_t *) reported;
3199 /* If this unreleased lock has been reported or is a universal lock (i.e.
3200 error_mark_node), don't emit a warning message for it again. */
3201 if (lock != error_mark_node
3202 && !pointer_set_contains (reported_unreleased_locks, lock))
3204 entry = pointer_map_contains (lock_locus_map, lock);
3205 if (entry)
3207 loc = (location_t *) *entry;
3208 warning_at (*loc, OPT_Wthread_safety,
3209 G_("Lock %s (acquired at line %d) is not released at"
3210 " the end of its scope in function %qE"),
3211 dump_expr_tree (lock_tree, lname),
3212 LOCATION_LINE (*loc),
3213 DECL_NAME (current_function_decl));
3215 else
3216 warning_at (DECL_SOURCE_LOCATION (current_function_decl),
3217 OPT_Wthread_safety,
3218 G_("Lock %s (held at entry) is released on some but not all"
3219 " control flow paths in function %qE"),
3220 dump_expr_tree (lock_tree, lname),
3221 DECL_NAME (current_function_decl));
3223 pointer_set_insert (reported_unreleased_locks, lock);
3226 return true;
3229 /* This is a helper function passed in (as a parameter) to traverse_pointer_set
3230 when we iterate through the set of locks that are not released at the end
3231 of a function. A warning message is emitted for each of them unless they
3232 were not acquired in the current function (i.e. acquired before calling
3233 the current function). */
3235 static bool
3236 warn_unreleased_locks (const void *lock, void *locks_at_entry)
3238 /* If the unreleased lock was actually acquired before calling the current
3239 function, we don't emit a warning for it as the lock is not expected to
3240 be released in the current function anyway. Also if the lock is a
3241 universal lock (i.e. error_mark_node), don't emit a warning either. */
3242 if (lock != error_mark_node
3243 && !pointer_set_contains ((struct pointer_set_t *) locks_at_entry, lock))
3245 char lname[LOCK_NAME_LEN];
3246 void **entry = pointer_map_contains (lock_locus_map, lock);
3247 tree lock_tree = CONST_CAST_TREE ((const_tree) lock);
3248 location_t *loc;
3249 gcc_assert (entry);
3250 loc = (location_t *) *entry;
3251 warning_at (*loc, OPT_Wthread_safety,
3252 G_("Lock %s (acquired at line %d) is not released at the end"
3253 " of function %qE"),
3254 dump_expr_tree (lock_tree, lname),
3255 LOCATION_LINE (*loc),
3256 DECL_NAME (current_function_decl));
3259 return true;
3262 /* This is a helper function passed in (as a parameter) to
3263 pointer_map_traverse when we delete lock_locus_map. */
3265 static bool
3266 delete_lock_locations (const void * ARG_UNUSED (lock),
3267 void **entry, void * ARG_UNUSED (data))
3269 XDELETE (*entry);
3270 return true;
3273 /* This is a helper function passed in (as a parameter) to
3274 pointer_map_traverse when we delete trylock_info_map. */
3276 static bool
3277 delete_trylock_info (const void * ARG_UNUSED (call),
3278 void **entry, void * ARG_UNUSED (data))
3280 struct trylock_info *tryinfo = (struct trylock_info *)*entry;
3281 gcc_assert (tryinfo);
3282 pointer_set_destroy (tryinfo->locks);
3283 XDELETE (tryinfo);
3284 return true;
3287 /* Helper function for walk_gimple_stmt() that is called on each gimple
3288 statement. Except for call statements and SSA definitions of namesless
3289 temp variables, the operands of the statements will be analyzed by
3290 analyze_op_r(). */
3292 static tree
3293 analyze_stmt_r (gimple_stmt_iterator *gsi, bool *handled_ops,
3294 struct walk_stmt_info *wi)
3296 gimple stmt = gsi_stmt (*gsi);
3297 struct bb_threadsafe_info *current_bb_info =
3298 (struct bb_threadsafe_info *) wi->info;
3300 if (is_gimple_call (stmt))
3302 handle_call_gs (stmt, current_bb_info);
3303 /* The arguments of the call is already analyzed in handle_call_gs.
3304 Set *handled_ops to true to skip calling analyze_op_r later. */
3305 *handled_ops = true;
3307 else if (gimple_code (stmt) == GIMPLE_COND)
3308 handle_cond_gs (stmt, current_bb_info);
3310 return NULL_TREE;
3313 /* Helper function for walk_gimple_stmt() that is called on each operand of
3314 a visited gimple statement. */
3316 static tree
3317 analyze_op_r (tree *tp, int *walk_subtrees, void *data)
3319 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3320 struct bb_threadsafe_info *current_bb_info =
3321 (struct bb_threadsafe_info *) wi->info;
3322 gimple stmt = gsi_stmt (wi->gsi);
3323 enum access_mode mode = wi->is_lhs ? TSA_WRITE : TSA_READ;
3324 location_t locus;
3326 /* Analyze the statement operand if we are not instructed to ignore the
3327 reads or writes and if it is not a constant. */
3328 if (!(current_bb_info->reads_ignored && mode == TSA_READ)
3329 && !(current_bb_info->writes_ignored && mode == TSA_WRITE)
3330 && !CONSTANT_CLASS_P (*tp))
3332 if (!gimple_has_location (stmt))
3333 locus = input_location;
3334 else
3335 locus = gimple_location (stmt);
3337 analyze_expr(*tp, NULL_TREE, false /* is_indirect_ref */,
3338 current_bb_info->live_excl_locks,
3339 current_bb_info->live_shared_locks, &locus, mode);
3342 *walk_subtrees = 0;
3344 return NULL_TREE;
3347 /* Perform thread safety analysis using the attributes mentioned above
3348 (see comments at the beginning of the file). The analysis pass uses
3349 a single-pass (or single iteration) data-flow analysis to calculate
3350 live lock sets at each program point, using the attributes to decide
3351 when to add locks to the live sets and when to remove them from the
3352 sets. With the live lock sets and the attributes attached to shared
3353 variables and functions, we are able to check whether the variables
3354 and functions are well protected. Note that the reason why we don't
3355 need iterative data flow analysis is because critical sections across
3356 back edges are considered a bad practice.
3358 The single-iteration data flow analysis is performed by visiting
3359 each basic block only once in a topological order. The topological
3360 traversal is achieved by maintaining a work list (or ready list) which
3361 is seeded with the successors of the function's entry block. A basic
3362 block is added to the work list when all of its predecessors have been
3363 visited. During the traversal, back edges are ignored. */
3365 static unsigned int
3366 execute_threadsafe_analyze (void)
3368 size_t append_ptr = 0, visit_ptr = 0;
3369 basic_block bb;
3370 edge e;
3371 edge_iterator ei;
3372 tree fdecl_attrs;
3373 struct bb_threadsafe_info *threadsafe_info;
3374 struct pointer_set_t *live_excl_locks_at_entry;
3375 struct pointer_set_t *live_shared_locks_at_entry;
3376 tree attr;
3377 basic_block *worklist;
3378 int i;
3379 int old_mismatched_lock_acq_rel = warn_thread_mismatched_lock_acq_rel;
3381 /* Skip the compiler-generated functions. */
3382 if (DECL_ARTIFICIAL (current_function_decl))
3383 return 0;
3385 /* Constructors and destructors should only be accessed by a single
3386 thread and therefore are ignored here. */
3387 if (lang_hooks.decl_is_constructor (current_function_decl)
3388 || lang_hooks.decl_is_destructor (current_function_decl))
3389 return 0;
3391 /* If the current function is annotated as a locking or unlocking primitive,
3392 or if it is marked to be skipped (with no_thread_safety_analysis
3393 attribute), ignore it. */
3394 fdecl_attrs = DECL_ATTRIBUTES (current_function_decl);
3395 if (lookup_attribute("exclusive_lock", fdecl_attrs)
3396 || lookup_attribute("shared_lock", fdecl_attrs)
3397 || lookup_attribute("exclusive_trylock", fdecl_attrs)
3398 || lookup_attribute("shared_trylock", fdecl_attrs)
3399 || lookup_attribute("unlock", fdecl_attrs)
3400 || lookup_attribute("no_thread_safety_analysis", fdecl_attrs))
3401 return 0;
3403 /* If this is the first function of the current compilation unit, we need
3404 to build the transitive acquired_after sets for the locks. */
3405 if (lock_acquired_after_map && !transitive_acq_after_sets_built)
3407 build_transitive_acquired_after_sets();
3408 transitive_acq_after_sets_built = true;
3411 /* Mark the back edges in the cfg so that we can skip them later
3412 in our (single-iteration) data-flow analysis. */
3413 mark_dfs_back_edges ();
3415 /* Allocate lock-related global maps. */
3416 scopedlock_to_lock_map = pointer_map_create ();
3417 lock_locus_map = pointer_map_create ();
3418 trylock_info_map = pointer_map_create ();
3419 gimple_call_tab = htab_create (10, call_gs_hash, call_gs_eq, NULL);
3421 /* Initialize the pretty printer buffer for warning emitting. */
3422 pp_construct (&pp_buf, /* prefix */ NULL, /* line-width */ 0);
3424 /* Allocate the threadsafe info array.
3425 Use XCNEWVEC to clear out the info. */
3426 threadsafe_info = XCNEWVEC(struct bb_threadsafe_info, last_basic_block);
3428 /* Since the back/complex edges are not traversed in the analysis,
3429 mark them as visited. */
3430 FOR_EACH_BB (bb)
3432 FOR_EACH_EDGE (e, ei, bb->preds)
3434 if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
3435 ++threadsafe_info[bb->index].n_preds_visited;
3439 /* Populate ENTRY_BLOCK's live out sets with "exclusive_locks_required"
3440 and "shared_locks_required" attributes. */
3441 live_excl_locks_at_entry = pointer_set_create();
3442 live_shared_locks_at_entry = pointer_set_create();
3444 attr = lookup_attribute("exclusive_locks_required",
3445 DECL_ATTRIBUTES (current_function_decl));
3446 if (attr)
3447 populate_lock_set_with_attr(live_excl_locks_at_entry, attr);
3449 attr = lookup_attribute("shared_locks_required",
3450 DECL_ATTRIBUTES (current_function_decl));
3451 if (attr)
3452 populate_lock_set_with_attr(live_shared_locks_at_entry, attr);
3454 threadsafe_info[ENTRY_BLOCK_PTR->index].liveout_exclusive_locks =
3455 live_excl_locks_at_entry;
3456 threadsafe_info[ENTRY_BLOCK_PTR->index].liveout_shared_locks =
3457 live_shared_locks_at_entry;
3459 threadsafe_info[ENTRY_BLOCK_PTR->index].weak_released_locks =
3460 pointer_set_create ();
3462 /* Allocate the worklist of BBs for topological traversal, which is
3463 basically an array of pointers to basic blocks. */
3464 worklist = XNEWVEC (basic_block, n_basic_blocks);
3466 /* Seed the worklist by adding the successors of the entry block
3467 to the worklist. */
3468 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3470 worklist[append_ptr++] = e->dest;
3473 /* The basic blocks in the current function are traversed in a topological
3474 order. Both "visit_ptr" and "append_ptr" are indices to the worklist
3475 array and initialized to zero. "append_ptr" is incremented whenever a BB
3476 is added to the work list, while "visit_ptr" is incremented when we
3477 visit a BB. When "visit_ptr" catches up with "append_ptr", the traversal
3478 is done. */
3479 while (visit_ptr != append_ptr)
3481 struct pointer_set_t *reported_unreleased_locks = pointer_set_create();
3482 struct bb_threadsafe_info *current_bb_info;
3483 gimple_stmt_iterator gsi;
3485 bb = worklist[visit_ptr++];
3486 current_bb_info = &threadsafe_info[bb->index];
3487 current_bb_info->visited = true;
3489 /* First create the live-in lock sets for bb by intersecting all of its
3490 predecessors' live-out sets */
3491 FOR_EACH_EDGE (e, ei, bb->preds)
3493 basic_block pred_bb = e->src;
3494 struct pointer_set_t *unreleased_locks;
3495 struct pointer_set_t *pred_liveout_excl_locks;
3496 struct pointer_set_t *pred_liveout_shared_locks;
3498 /* Skip the back/complex edge. */
3499 if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
3500 continue;
3502 /* If this is the first predecessor of bb's, simply copy the
3503 predecessor's live-out sets and reads/writes_ignored flags
3504 to bb's live (working) sets and corresponding flags. */
3505 if (current_bb_info->live_excl_locks == NULL)
3507 current_bb_info->reads_ignored =
3508 threadsafe_info[pred_bb->index].reads_ignored;
3509 current_bb_info->writes_ignored =
3510 threadsafe_info[pred_bb->index].writes_ignored;
3511 current_bb_info->live_excl_locks = pointer_set_copy (
3512 threadsafe_info[pred_bb->index].liveout_exclusive_locks);
3513 current_bb_info->live_shared_locks = pointer_set_copy (
3514 threadsafe_info[pred_bb->index].liveout_shared_locks);
3515 current_bb_info->weak_released_locks = pointer_set_copy (
3516 threadsafe_info[pred_bb->index].weak_released_locks);
3517 /* If the pred bb has a trylock call and its edge to the current
3518 bb is the one for successful lock acquisition, add the
3519 trylock live sets to the bb's live working sets. */
3520 if (threadsafe_info[pred_bb->index].trylock_live_edge == e)
3522 gcc_assert (
3523 threadsafe_info[pred_bb->index].edge_exclusive_locks
3524 || threadsafe_info[pred_bb->index].edge_shared_locks);
3525 if (threadsafe_info[pred_bb->index].edge_exclusive_locks)
3526 pointer_set_union_inplace (
3527 current_bb_info->live_excl_locks,
3528 threadsafe_info[pred_bb->index].edge_exclusive_locks);
3529 if (threadsafe_info[pred_bb->index].edge_shared_locks)
3530 pointer_set_union_inplace (
3531 current_bb_info->live_shared_locks,
3532 threadsafe_info[pred_bb->index].edge_shared_locks);
3534 continue;
3537 unreleased_locks = pointer_set_create();
3538 pred_liveout_excl_locks =
3539 threadsafe_info[pred_bb->index].liveout_exclusive_locks;
3540 pred_liveout_shared_locks =
3541 threadsafe_info[pred_bb->index].liveout_shared_locks;
3543 /* If the pred bb has a trylock call and its edge to the current
3544 bb is the one for successful lock acquisition, add the
3545 trylock live sets to the pred bb's live-out sets. */
3546 if (threadsafe_info[pred_bb->index].trylock_live_edge == e)
3548 gcc_assert(threadsafe_info[pred_bb->index].edge_exclusive_locks
3549 || threadsafe_info[pred_bb->index].edge_shared_locks);
3550 /* The following code will clobber the original contents of
3551 edge_exclusive_locks set and/or edge_shared_locks set of
3552 the pred bb, but that is fine because they will not be
3553 used in the future (as this edge is visited only once in
3554 our single-iteration data-flow analysis). */
3555 if (threadsafe_info[pred_bb->index].edge_exclusive_locks)
3557 pred_liveout_excl_locks =
3558 threadsafe_info[pred_bb->index].edge_exclusive_locks;
3559 pointer_set_union_inplace (pred_liveout_excl_locks,
3560 threadsafe_info[pred_bb->index].liveout_exclusive_locks);
3563 if (threadsafe_info[pred_bb->index].edge_shared_locks)
3565 pred_liveout_shared_locks =
3566 threadsafe_info[pred_bb->index].edge_shared_locks;
3567 pointer_set_union_inplace (pred_liveout_shared_locks,
3568 threadsafe_info[pred_bb->index].liveout_shared_locks);
3572 /* Logical-and'ing the current BB's reads/writes_ignored flags with
3573 predecessor's flags. These flags will be true at the beginning
3574 of a BB only when they are true at the end of all the
3575 precedecessors. */
3576 current_bb_info->reads_ignored &=
3577 threadsafe_info[pred_bb->index].reads_ignored;
3578 current_bb_info->writes_ignored &=
3579 threadsafe_info[pred_bb->index].writes_ignored;
3581 /* Intersect the current (working) live set with the predecessor's
3582 live-out set. Locks that are not in the intersection (i.e.
3583 complement set) should be reported as improperly released. */
3584 pointer_set_intersection_complement (
3585 current_bb_info->live_excl_locks,
3586 pred_liveout_excl_locks,
3587 unreleased_locks);
3588 pointer_set_intersection_complement (
3589 current_bb_info->live_shared_locks,
3590 pred_liveout_shared_locks,
3591 unreleased_locks);
3593 /* Take the union of the weak released lock sets of the
3594 predecessors. */
3595 pointer_set_union_inplace (
3596 current_bb_info->weak_released_locks,
3597 threadsafe_info[pred_bb->index].weak_released_locks);
3599 /* If a lock is released by a Release function of a scoped lock on
3600 some control-flow paths (but not all), the lock would still be
3601 live on other paths, which is OK as the destructor of the scoped
3602 lock will eventually release the lock. We don't want to emit
3603 bogus warnings about the release inconsistency at the
3604 control-flow join point. To avoid that, we simply add those
3605 weakly-released locks in the REPORTED_UNRELEASED_LOCKS set. */
3606 pointer_set_union_inplace (
3607 reported_unreleased_locks,
3608 current_bb_info->weak_released_locks);
3610 /* Emit warnings for the locks that are not properly released.
3611 That is, the places they are released are not control
3612 equivalent to where they are acquired. */
3613 if (warn_thread_mismatched_lock_acq_rel)
3614 pointer_set_traverse (unreleased_locks,
3615 warn_locally_unreleased_locks,
3616 reported_unreleased_locks);
3618 pointer_set_destroy (unreleased_locks);
3621 pointer_set_destroy (reported_unreleased_locks);
3622 gcc_assert (current_bb_info->live_excl_locks != NULL);
3624 /* Now iterate through each statement of the bb and analyze its
3625 operands. */
3626 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3628 struct walk_stmt_info wi;
3629 memset (&wi, 0, sizeof (wi));
3630 wi.info = (void *) current_bb_info;
3631 walk_gimple_stmt (&gsi, analyze_stmt_r, analyze_op_r, &wi);
3634 /* Now that we have visited the current bb, check if any of its
3635 successors can be added to the work list. */
3636 FOR_EACH_EDGE (e, ei, bb->succs)
3638 basic_block succ_bb;
3639 if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
3640 continue;
3641 succ_bb = e->dest;
3642 /* Since we skip the back edges, we shouldn't see a visited basic
3643 block again here. */
3644 gcc_assert (!threadsafe_info[succ_bb->index].visited);
3645 if ((++threadsafe_info[succ_bb->index].n_preds_visited) ==
3646 EDGE_COUNT(succ_bb->preds))
3647 worklist[append_ptr++] = succ_bb;
3650 current_bb_info->liveout_exclusive_locks =
3651 current_bb_info->live_excl_locks;
3652 current_bb_info->liveout_shared_locks =
3653 current_bb_info->live_shared_locks;
3656 /* If there are still live locks at the end of the function that are held
3657 at the entry of the function (i.e. not in the function's locks_required
3658 sets), emit warning messages for them.
3659 Note that the exit block may not be reachable from the entry (e.g. when
3660 there are abort() or exit() calls that collectively dominate the exit
3661 block). We need to check whether its liveout_exclusive_locks and
3662 liveout_shared_locks are empty before trying to traverse them.
3663 TODO: Besides the exit block, we also need to check the basic blocks
3664 that don't have any successors as they are practically "exit" blocks
3665 as well. */
3666 if (warn_thread_mismatched_lock_acq_rel)
3668 if (threadsafe_info[EXIT_BLOCK_PTR->index].liveout_exclusive_locks)
3669 pointer_set_traverse(
3670 threadsafe_info[EXIT_BLOCK_PTR->index].liveout_exclusive_locks,
3671 warn_unreleased_locks, live_excl_locks_at_entry);
3672 if (threadsafe_info[EXIT_BLOCK_PTR->index].liveout_shared_locks)
3673 pointer_set_traverse(
3674 threadsafe_info[EXIT_BLOCK_PTR->index].liveout_shared_locks,
3675 warn_unreleased_locks, live_shared_locks_at_entry);
3678 /* Free the allocated data structures. */
3679 for (i = 0; i < last_basic_block; ++i)
3681 if (threadsafe_info[i].liveout_exclusive_locks != NULL)
3683 pointer_set_destroy(threadsafe_info[i].liveout_exclusive_locks);
3684 pointer_set_destroy(threadsafe_info[i].liveout_shared_locks);
3686 if (threadsafe_info[i].weak_released_locks != NULL)
3687 pointer_set_destroy (threadsafe_info[i].weak_released_locks);
3688 if (threadsafe_info[i].edge_exclusive_locks != NULL)
3689 pointer_set_destroy (threadsafe_info[i].edge_exclusive_locks);
3690 if (threadsafe_info[i].edge_shared_locks != NULL)
3691 pointer_set_destroy (threadsafe_info[i].edge_shared_locks);
3694 XDELETEVEC (threadsafe_info);
3695 XDELETEVEC (worklist);
3697 pp_clear_output_area (&pp_buf);
3698 pointer_map_destroy (scopedlock_to_lock_map);
3699 pointer_map_traverse (lock_locus_map, delete_lock_locations, NULL);
3700 pointer_map_destroy (lock_locus_map);
3701 pointer_map_traverse (trylock_info_map, delete_trylock_info, NULL);
3702 pointer_map_destroy (trylock_info_map);
3703 htab_delete (gimple_call_tab);
3704 gimple_call_tab = NULL;
3706 /* The flag that controls the warning of mismatched lock acquire/release
3707 could be turned off when we see an unlock primitive with an unsupported
3708 lock name/expression (see process_function_attrs). We need to restore
3709 the original value of the flag after we finish analyzing the current
3710 function. */
3711 if (old_mismatched_lock_acq_rel != warn_thread_mismatched_lock_acq_rel)
3712 warn_thread_mismatched_lock_acq_rel = old_mismatched_lock_acq_rel;
3714 return 0;
3717 static bool
3718 gate_threadsafe_analyze (void)
3720 return warn_thread_safety != 0;
3723 struct gimple_opt_pass pass_threadsafe_analyze =
3726 GIMPLE_PASS,
3727 "threadsafe_analyze", /* name */
3728 gate_threadsafe_analyze, /* gate */
3729 execute_threadsafe_analyze, /* execute */
3730 NULL, /* sub */
3731 NULL, /* next */
3732 0, /* static_pass_number */
3733 TV_TREE_THREADSAFE, /* tv_id */
3734 PROP_cfg | PROP_ssa, /* properties_required */
3735 0, /* properties_provided */
3736 0, /* properties_destroyed */
3737 0, /* todo_flags_start */
3738 TODO_dump_func /* todo_flags_finish */
3742 #include "gt-tree-threadsafe-analyze.h"