lockfree: use pointer compression on both x86_64 and on alpha
[boost_lockfree.git] / boost / lockfree / detail / cas.hpp
blob569a23b2fa04e692edea74dd4db74238b1adc5f1
1 // Copyright (C) 2007, 2008, 2009 Tim Blechmann & Thomas Grill
2 //
3 // Distributed under the Boost Software License, Version 1.0. (See
4 // accompanying file LICENSE_1_0.txt or copy at
5 // http://www.boost.org/LICENSE_1_0.txt)
7 // Disclaimer: Not a Boost library.
9 #ifndef BOOST_LOCKFREE_CAS_HPP_INCLUDED
10 #define BOOST_LOCKFREE_CAS_HPP_INCLUDED
12 #include <boost/lockfree/detail/prefix.hpp>
13 #include <boost/interprocess/detail/atomic.hpp>
14 #include <boost/detail/lightweight_mutex.hpp>
15 #include <boost/static_assert.hpp>
17 #include <boost/cstdint.hpp>
19 #include <boost/mpl/map.hpp>
20 #include <boost/mpl/at.hpp>
21 #include <boost/mpl/if.hpp>
22 #include <boost/mpl/long.hpp>
24 #ifdef __SSE2__
25 #include "emmintrin.h"
26 #endif
28 namespace boost
30 namespace lockfree
33 inline void memory_barrier(void)
35 #if defined(__SSE2__)
36 _mm_mfence();
37 #elif defined(__GNUC__) && ( (__GNUC__ > 4) || ((__GNUC__ >= 4) && \
38 (__GNUC_MINOR__ >= 1))) \
39 || defined(__INTEL_COMPILER)
40 __sync_synchronize();
41 #elif defined(_MSC_VER) && (_MSC_VER >= 1300)
42 _ReadWriteBarrier();
43 #elif defined(__APPLE__)
44 OSMemoryBarrier();
45 #elif defined(__GNUC__) && defined (__i386__)
46 asm volatile("lock; addl $0,0(%%esp)":::"memory");
47 #elif defined(AO_HAVE_nop_full)
48 AO_nop_full();
49 #else
50 # warning "no memory barrier implemented for this platform"
51 #endif
54 inline void read_memory_barrier(void)
56 #if defined(__SSE2__)
57 _mm_lfence();
58 #else
59 memory_barrier();
60 #endif
63 template <typename C>
64 struct atomic_cas_emulator
66 static inline bool cas(volatile C * addr, C old, C nw)
68 static boost::detail::lightweight_mutex guard;
69 boost::detail::lightweight_mutex::scoped_lock lock(guard);
71 C * address = (C*) addr;
73 if (*address == old)
75 *address = nw;
76 return true;
78 else
79 return false;
82 typedef C cas_type;
86 template <typename C>
87 inline bool atomic_cas_emulation(volatile C * addr, C old, C nw)
89 return atomic_cas_emulator<C>::cas(addr, old, nw);
92 using boost::uint32_t;
93 using boost::uint64_t;
95 struct atomic_cas32
97 static inline bool cas(volatile uint32_t * addr,
98 uint32_t const & old,
99 uint32_t const & nw)
101 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) || defined(__INTEL_COMPILER)
102 return __sync_bool_compare_and_swap(addr, old, nw);
103 #elif defined(__APPLE__)
104 return OSAtomicCompareAndSwap32Barrier(old, nw, addr);
105 #else
106 return boost::interprocess::detail::atomic_cas32(addr, nw, old) == old;
107 #endif
109 typedef uint32_t cas_type;
111 static const bool is_lockfree = true;
114 struct atomic_cas64
116 typedef uint64_t cas_type;
118 static inline bool cas(volatile uint64_t * addr,
119 uint64_t const & old,
120 uint64_t const & nw)
122 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || \
123 (defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 1) \
124 || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 1)))) && (defined(__x86_64__) || defined(__alpha__) ) ) \
125 || defined(__INTEL_COMPILER)
126 return __sync_bool_compare_and_swap(addr, old, nw);
127 #elif defined(_M_IX86)
128 return InterlockedCompareExchange64(reinterpret_cast<volatile LONGLONG*>(addr),
129 reinterpret_cast<LONGLONG>(nw),
130 reinterpret_cast<LONGLONG>(old)) == old;
131 #elif defined(_M_X64)
132 return InterlockedCompareExchange64(reinterpret_cast<volatile LONGLONG*>(addr),
133 reinterpret_cast<LONGLONG>(nw),
134 reinterpret_cast<LONGLONG>(old)) == old;
135 #elif defined(__APPLE__)
136 return OSAtomicCompareAndSwap64Barrier(old, nw, addr);
137 #else
138 #define CAS_BLOCKING
139 return atomic_cas_emulation((uint64_t *)addr, old, nw);
140 #endif
143 #ifdef CAS_BLOCKING
144 #undef CAS_BLOCKING
145 static const bool is_lockfree = false;
146 #else
147 static const bool is_lockfree = true;
148 #endif
151 struct atomic_cas128
153 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
154 typedef int cas_type __attribute__ ((mode (TI)));
155 #else
156 struct cas_type
158 bool operator==(cas_type const & rhs)
160 return (data[0] == rhs.data[0]) &&
161 (data[1] == rhs.data[1]);
164 uint64_t data[2];
166 #endif
168 static inline bool cas(volatile cas_type * addr, cas_type const & old, cas_type const & nw)
170 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
171 return __sync_bool_compare_and_swap_16(addr, old, nw);
172 #else
173 #define CAS_BLOCKING
174 return atomic_cas_emulation((cas_type*)addr, old, nw);
175 #endif
178 #ifdef CAS_BLOCKING
179 #undef CAS_BLOCKING
180 static const bool is_lockfree = false;
181 #else
182 static const bool is_lockfree = true;
183 #endif
186 namespace detail
188 using namespace boost::mpl;
190 template<typename C>
191 struct atomic_cas
193 private:
194 typedef map3<pair<long_<4>, atomic_cas32>,
195 pair<long_<8>, atomic_cas64>,
196 pair<long_<16>, atomic_cas128>
197 > cas_map;
199 typedef typename at<cas_map, long_<sizeof(C)> >::type atomic_cas_t;
201 typedef typename if_<has_key<cas_map, long_<sizeof(C)> >,
202 atomic_cas_t,
203 atomic_cas_emulator<C> >::type cas_t;
205 typedef typename cas_t::cas_type cas_value_t;
207 public:
208 static inline bool cas(volatile C * addr, C const & old, C const & nw)
210 return cas_t::cas((volatile cas_value_t*)addr,
211 *(cas_value_t*)&old,
212 *(cas_value_t*)&nw);
215 static const bool is_lockfree = cas_t::is_lockfree;
218 } /* namespace detail */
220 using detail::atomic_cas;
222 template <typename C>
223 inline bool cas(volatile C * addr, C const & old, C const & nw)
225 return atomic_cas<C>::cas(addr, old, nw);
228 } /* namespace lockfree */
229 } /* namespace boost */
231 #endif /* BOOST_LOCKFREE_CAS_HPP_INCLUDED */