Toplevel entrypoints for classes/traits/interfaces
[hiphop-php.git] / hphp / util / lock-free-ptr-wrapper.h
blobdc4185bf951471c5e551aa8e4803fa0f88e74a1c
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #pragma once
19 #include <atomic>
20 #include <type_traits>
22 #include "hphp/util/assertions.h"
23 #include "hphp/util/low-ptr.h"
24 #include "hphp/util/smalllocks.h"
26 namespace HPHP {
28 //////////////////////////////////////////////////////////////////////
31 * Implements a wrapper around a pointer-like thing such that readers
32 * can read the pointer without locking (and without writing memory at
33 * all).
35 * A single writer will use a compare-and-swap followed by an exchange
36 * to update the pointer; multiple concurrent writers will wait on a
37 * futex.
39 * For this to work, T's behavior must be fully determined by its bit
40 * pattern (ie it can't care about its address), so that after:
42 * auto bits = *(uintptr_t*)&t;
43 * auto &t2 = *(T*)&bits;
45 * t and t2 can be used interchangeably.
47 * All pointer types, and CompactVector in particular, satisfy this.
49 template<class T>
50 struct LockFreePtrWrapper {
51 static_assert((sizeof(T) == sizeof(uintptr_t)) ||
52 (sizeof(T) == sizeof(uint32_t)),
53 "LockFreePtrWrapper operates on pointers, or "
54 "classes that wrap pointers (with no other data)");
55 using raw_type =
56 typename std::conditional<sizeof(T) == 4, uint32_t, uintptr_t>::type;
58 LockFreePtrWrapper() : val{} { assertx(notLocked()); }
59 LockFreePtrWrapper(const T& v) : val{v} { assertx(notLocked()); }
60 ~LockFreePtrWrapper() {
61 assertx(notLocked());
62 val.~T();
65 LockFreePtrWrapper(const LockFreePtrWrapper<T>&) = delete;
66 LockFreePtrWrapper<T>& operator=(const LockFreePtrWrapper<T>&) = delete;
69 * We can't return a T* to callers because a valid T doesn't always exist in
70 * LockFreePtrWrapper. Holder is a convenience wrapper around a bitwise copy
71 * of T that avoids calling T's constructor or destructor, giving nearly the
72 * same effect as returning a T*.
74 struct Holder {
75 friend struct LockFreePtrWrapper;
76 auto get() { return getter(val, false); }
77 auto operator->() { return get(); }
78 Holder(const Holder& h) : bits{h.bits} { assertx(!(bits & ~kPtrMask)); }
79 Holder(raw_type bits) : bits{bits} { assertx(!(bits & ~kPtrMask)); }
80 Holder(T&& val) : val{std::move(val)} { assertx(!(bits & ~kPtrMask)); }
81 ~Holder() {}
82 private:
83 union {
84 raw_type bits;
85 T val;
87 template<typename U>
88 static const U* getter(const U& p, int f) { return &p; }
89 template<typename U>
90 static auto getter(const U& p, bool f) -> decltype(p.operator->(), p) {
91 return p;
95 // Get a bitwise copy of the current value
96 auto get() const {
97 return getImpl<T>();
100 auto operator->() const {
101 return get();
104 T copy() const {
105 return copyImpl<T>();
109 * Get an exclusive lock on the wrapped value. Other threads can
110 * still read its current value via get() or copy(). After calling
111 * this, you must unlock it either with update_and_unlock (if you
112 * want to change the value), or unlock (if you don't).
114 void lock_for_update();
117 * Like lock_for_update(), but returns false if it fails to acquire
118 * the lock rather than blocking. Returns true on success.
120 bool try_lock_for_update();
123 * Unlock it.
125 void unlock();
127 * Update the wrapped value, and return the old value. The old value
128 * will typically need to be destroyed via a treadmill-like
129 * mechanism, because other threads may have read the old value just
130 * prior to the update (and still be using it).
132 T update_and_unlock(T&& v);
134 protected:
135 // Constructor that accepts raw bits, for use in the unsafe version.
136 LockFreePtrWrapper(raw_type rawBits) : bits(rawBits) {
137 assertx(notLocked());
139 raw_type raw() const { return bits.load(std::memory_order_relaxed); }
140 const bool notLocked() {
141 return !(low_bits.load(std::memory_order_relaxed) & ~kPtrMask);
144 template<typename U>
145 typename std::enable_if<std::is_same<T,U>::value &&
146 std::is_pointer<U>::value,U>::type
147 getImpl() const {
148 return reinterpret_cast<T>(unlocked());
151 template<typename U>
152 typename std::enable_if<std::is_same<T,U>::value && is_lowptr_v<U>,
153 typename lowptr_traits<U>::pointer>::type
154 getImpl() const {
155 auto p = unlocked();
156 return reinterpret_cast<T*>(&p)->get();
159 template<typename U>
160 typename std::enable_if<std::is_same<T,U>::value &&
161 !std::is_pointer<U>::value &&
162 !is_lowptr_v<U>, Holder>::type
163 getImpl() const {
164 return Holder { unlocked() };
167 template<typename U>
168 typename std::enable_if<std::is_same<T,U>::value &&
169 (std::is_pointer<U>::value || is_lowptr_v<U>),
170 T>::type
171 copyImpl() const {
172 return get();
175 template<typename U>
176 typename std::enable_if<std::is_same<T,U>::value &&
177 !std::is_pointer<U>::value &&
178 !is_lowptr_v<U>, T>::type
179 copyImpl() const {
180 // We need to force a copy, rather than a move from get().val. If you
181 // change this, make sure you know what you're doing.
182 auto const& x = get();
183 return x.val;
186 raw_type unlock_helper(raw_type rep) {
187 auto const c = bits.exchange(rep, std::memory_order_release);
188 if (c & kLockWithWaitersBit) {
189 futex_wake(&low_bits, 1);
190 } else {
191 assertx(c & kLockNoWaitersBit);
193 return c & kPtrMask;
195 raw_type unlocked() const {
196 return bits.load(std::memory_order_acquire) & kPtrMask;
198 union {
199 std::atomic<raw_type> bits;
200 std::atomic<uint32_t> low_bits;
201 T val;
203 static_assert(
204 folly::kIsLittleEndian,
205 "The low bits of low_bits must correspond to the low bits of bits"
207 static constexpr raw_type kPtrMask = static_cast<raw_type>(-4);
208 static constexpr raw_type kLockNoWaitersBit = 1;
209 static constexpr raw_type kLockWithWaitersBit = 2;
213 * The unsafe-version of LockFreePtrWrapper, provides copy constructor and
214 * copy assignment operator, to be used when other mechanisms provide guarantee
215 * that no concurrent access happens during the unsafe copying.
217 * Warning: a thorough idea on when and in which threads all accesses happen is
218 * needed to safely use this.
220 template<class T>
221 struct UnsafeLockFreePtrWrapper : LockFreePtrWrapper<T> {
222 using LockFreePtrWrapper<T>::notLocked;
224 UnsafeLockFreePtrWrapper() : LockFreePtrWrapper<T>() {}
225 UnsafeLockFreePtrWrapper(const T& v) : LockFreePtrWrapper<T>(v) {}
227 UnsafeLockFreePtrWrapper(const UnsafeLockFreePtrWrapper& o)
228 : LockFreePtrWrapper<T>(o.raw()) {}
229 auto const& operator=(const UnsafeLockFreePtrWrapper& o) {
230 assertx(notLocked());
231 LockFreePtrWrapper<T>::bits.store(o.raw(), std::memory_order_relaxed);
232 assertx(notLocked());
233 return *this;
237 static_assert(!std::is_copy_constructible<LockFreePtrWrapper<int*>>::value, "");
238 static_assert(std::is_copy_constructible<UnsafeLockFreePtrWrapper<int*>>::
239 value, "");
241 //////////////////////////////////////////////////////////////////////
243 template<class T>
244 void LockFreePtrWrapper<T>::lock_for_update() {
245 auto lockBit = kLockNoWaitersBit;
246 while (true) {
247 auto c = raw() & kPtrMask;
248 // writing is expected to be unusual, so start by assuming the low
249 // two bits are clear, and attempt to set the appropriate low bit.
250 if (bits.compare_exchange_weak(c, c + lockBit, std::memory_order_relaxed)) {
251 return;
253 // We didn't get the lock, so someone else had it. c holds the
254 // value we found there.
255 if (c & kLockNoWaitersBit) {
256 auto const desired = (c & kPtrMask) + kLockWithWaitersBit;
257 if (!bits.compare_exchange_weak(c, desired, std::memory_order_relaxed)) {
258 // We failed, so someone else got in before us. start over.
259 continue;
261 // compare_exchange_weak only upates c when it fails, so set it
262 // to the value we actually wrote to memory.
263 c = desired;
265 assertx(!(c & kLockNoWaitersBit));
266 if (c & kLockWithWaitersBit) {
267 futex_wait(&low_bits, static_cast<uint32_t>(c));
268 // If we were waiting on the futex, others might have been
269 // waiting too (but we can't tell), so when we grab the lock, we
270 // have to record that fact.
271 lockBit = kLockWithWaitersBit;
276 template<typename T>
277 bool LockFreePtrWrapper<T>::try_lock_for_update() {
278 auto c = raw() & kPtrMask;
279 return bits.compare_exchange_weak(
281 c + kLockNoWaitersBit,
282 std::memory_order_relaxed
286 template<class T>
287 void LockFreePtrWrapper<T>::unlock() {
288 unlock_helper(raw() & kPtrMask);
291 template<class T>
292 T LockFreePtrWrapper<T>::update_and_unlock(T&& v) {
293 Holder h{std::move(v)};
295 h.bits = unlock_helper(h.bits);
296 return std::move(h.val);
299 //////////////////////////////////////////////////////////////////////