boost.lockfree: workaround for missing memory_order_consume
[boost_lockfree.git] / boost / lockfree / detail / freelist.hpp
blob9e715339e3f7f4764738a89d8ff74518b4661457
1 // lock-free freelist
2 //
3 // Copyright (C) 2008, 2009 Tim Blechmann
4 //
5 // Distributed under the Boost Software License, Version 1.0. (See
6 // accompanying file LICENSE_1_0.txt or copy at
7 // http://www.boost.org/LICENSE_1_0.txt)
9 // Disclaimer: Not a Boost library.
11 #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
12 #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
14 #include <boost/version.hpp>
15 #if (BOOST_VERSION < 104200) && !defined(memory_order_consume)
16 #define memory_order_consume (boost::memory_order)8
17 #endif
19 #include <boost/lockfree/detail/tagged_ptr.hpp>
21 #include <boost/atomic.hpp>
22 #include <boost/noncopyable.hpp>
24 #include <boost/mpl/map.hpp>
25 #include <boost/mpl/apply.hpp>
26 #include <boost/mpl/at.hpp>
27 #include <boost/type_traits/is_pod.hpp>
29 #include <algorithm> /* for std::min */
31 namespace boost
33 namespace lockfree
35 namespace detail
38 template <typename T, typename Alloc = std::allocator<T> >
39 class dummy_freelist:
40 private boost::noncopyable,
41 private Alloc
43 public:
44 T * allocate (void)
46 return Alloc::allocate(1);
49 void deallocate (T * n)
51 Alloc::deallocate(n, 1);
55 } /* namespace detail */
57 template <typename T, typename Alloc = std::allocator<T> >
58 class caching_freelist:
59 private detail::dummy_freelist<T, Alloc>
61 struct freelist_node
63 lockfree::tagged_ptr<freelist_node> next;
66 typedef lockfree::tagged_ptr<freelist_node> tagged_ptr;
68 public:
69 caching_freelist(void):
70 pool_(tagged_ptr(NULL, 0))
73 explicit caching_freelist(std::size_t initial_nodes):
74 pool_(tagged_ptr(NULL, 0))
76 for (std::size_t i = 0; i != initial_nodes; ++i)
78 T * node = detail::dummy_freelist<T, Alloc>::allocate();
79 deallocate(node);
83 ~caching_freelist(void)
85 free_memory_pool();
88 T * allocate (void)
90 for(;;)
92 tagged_ptr old_pool = pool_.load(memory_order_consume);
94 if (!old_pool.get_ptr())
95 return detail::dummy_freelist<T, Alloc>::allocate();
97 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
98 tagged_ptr new_pool (new_pool_ptr, old_pool.get_tag() + 1);
100 if (pool_.compare_exchange_strong(old_pool, new_pool)) {
101 void * ptr = old_pool.get_ptr();
102 return reinterpret_cast<T*>(ptr);
107 void deallocate (T * n)
109 void * node = n;
110 for(;;)
112 tagged_ptr old_pool = pool_.load(memory_order_consume);
114 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
115 tagged_ptr new_pool (new_pool_ptr, old_pool.get_tag() + 1);
117 new_pool->next.set_ptr(old_pool.get_ptr());
119 if (pool_.compare_exchange_strong(old_pool, new_pool))
120 return;
124 private:
125 void free_memory_pool(void)
127 tagged_ptr current (pool_);
129 while (current)
131 void * n = current.get_ptr();
132 current = current->next;
133 detail::dummy_freelist<T, Alloc>::deallocate(reinterpret_cast<T*>(n));
137 atomic<tagged_ptr> pool_;
140 template <typename T, typename Alloc = std::allocator<T> >
141 class static_freelist:
142 private Alloc
144 struct freelist_node
146 lockfree::tagged_ptr<freelist_node> next;
149 typedef lockfree::tagged_ptr<freelist_node> tagged_ptr;
151 public:
152 explicit static_freelist(std::size_t max_nodes):
153 pool_(tagged_ptr(NULL, 0)), total_nodes(max_nodes)
155 chunks = Alloc::allocate(max_nodes);
156 for (std::size_t i = 0; i != max_nodes; ++i)
158 T * node = chunks + i;
159 deallocate(node);
163 ~static_freelist(void)
165 Alloc::deallocate(chunks, total_nodes);
168 T * allocate (void)
170 for(;;)
172 tagged_ptr old_pool = pool_.load(memory_order_consume);
174 if (!old_pool.get_ptr())
175 return NULL; /* allocation fails */
177 freelist_node * new_pool_ptr = old_pool->next.get_ptr();
178 tagged_ptr new_pool (new_pool_ptr, old_pool.get_tag() + 1);
180 if (pool_.compare_exchange_strong(old_pool, new_pool)) {
181 void * ptr = old_pool.get_ptr();
182 return reinterpret_cast<T*>(ptr);
187 void deallocate (T * n)
189 void * node = n;
190 for(;;)
192 tagged_ptr old_pool = pool_.load(memory_order_consume);
194 freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
195 tagged_ptr new_pool (new_pool_ptr, old_pool.get_tag());
197 new_pool->next.set_ptr(old_pool.get_ptr());
199 if (pool_.compare_exchange_strong(old_pool, new_pool))
200 return;
204 private:
205 atomic<tagged_ptr> pool_;
207 const std::size_t total_nodes;
208 T* chunks;
212 struct caching_freelist_t {};
213 struct static_freelist_t {};
215 namespace detail
218 #if 0
219 template <typename T, typename Alloc, typename tag>
220 struct select_freelist
222 private:
223 typedef typename Alloc::template rebind<T>::other Allocator;
225 typedef typename boost::lockfree::caching_freelist<T, Allocator> cfl;
226 typedef typename boost::lockfree::static_freelist<T, Allocator> sfl;
228 typedef typename boost::mpl::map<
229 boost::mpl::pair < caching_freelist_t, cfl/* typename boost::lockfree::caching_freelist<T, Alloc> */ >,
230 boost::mpl::pair < static_freelist_t, sfl/* typename boost::lockfree::static_freelist<T, Alloc> */ >,
232 > freelists;
233 public:
234 typedef typename boost::mpl::at<freelists, tag>::type type;
236 #endif
238 } /* namespace detail */
239 } /* namespace lockfree */
240 } /* namespace boost */
242 #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */