3 // Copyright (C) 2008, 2009, 2011 Tim Blechmann
5 // Distributed under the Boost Software License, Version 1.0. (See
6 // accompanying file LICENSE_1_0.txt or copy at
7 // http://www.boost.org/LICENSE_1_0.txt)
9 // Disclaimer: Not a Boost library.
11 #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
12 #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
14 #include <boost/lockfree/detail/tagged_ptr.hpp>
16 #include <boost/lockfree/detail/atomic.hpp>
17 #include <boost/noncopyable.hpp>
19 #include <boost/mpl/map.hpp>
20 #include <boost/mpl/apply.hpp>
21 #include <boost/mpl/at.hpp>
22 #include <boost/type_traits/is_pod.hpp>
24 #include <algorithm> /* for std::min */
35 tagged_ptr
<freelist_node
> next
;
39 bool allocate_may_allocate
,
40 typename Alloc
= std::allocator
<T
>
45 typedef tagged_ptr
<freelist_node
> tagged_node_ptr
;
48 freelist_stack (std::size_t n
= 0):
49 pool_(tagged_node_ptr(NULL
))
54 void reserve (std::size_t count
)
56 for (std::size_t i
= 0; i
!= count
; ++i
) {
57 T
* node
= Alloc::allocate(1);
62 void reserve_unsafe (std::size_t count
)
64 for (std::size_t i
= 0; i
!= count
; ++i
) {
65 T
* node
= Alloc::allocate(1);
66 deallocate_unsafe(node
);
72 T
* node
= allocate();
78 template <typename ArgumentType
>
79 T
* construct (ArgumentType
const & arg
)
81 T
* node
= allocate();
87 T
* construct_unsafe (void)
89 T
* node
= allocate_unsafe();
95 template <typename ArgumentType
>
96 T
* construct_unsafe (ArgumentType
const & arg
)
98 T
* node
= allocate_unsafe();
105 void destruct (T
* n
)
111 void destruct_unsafe (T
* n
)
114 deallocate_unsafe(n
);
119 tagged_node_ptr old_pool
= pool_
.load(memory_order_consume
);
122 if (!old_pool
.get_ptr()) {
123 if (allocate_may_allocate
)
124 return Alloc::allocate(1);
129 freelist_node
* new_pool_ptr
= old_pool
->next
.get_ptr();
130 tagged_node_ptr
new_pool (new_pool_ptr
, old_pool
.get_tag() + 1);
132 if (pool_
.compare_exchange_weak(old_pool
, new_pool
)) {
133 void * ptr
= old_pool
.get_ptr();
134 return reinterpret_cast<T
*>(ptr
);
139 T
* allocate_unsafe (void)
141 tagged_node_ptr old_pool
= pool_
.load(memory_order_relaxed
);
143 if (!old_pool
.get_ptr()) {
144 if (allocate_may_allocate
)
145 return Alloc::allocate(1);
150 freelist_node
* new_pool_ptr
= old_pool
->next
.get_ptr();
151 tagged_node_ptr
new_pool (new_pool_ptr
, old_pool
.get_tag() + 1);
153 pool_
.store(new_pool
, memory_order_relaxed
);
154 void * ptr
= old_pool
.get_ptr();
155 return reinterpret_cast<T
*>(ptr
);
158 void deallocate (T
* n
)
161 tagged_node_ptr old_pool
= pool_
.load(memory_order_consume
);
162 freelist_node
* new_pool_ptr
= reinterpret_cast<freelist_node
*>(node
);
165 tagged_node_ptr
new_pool (new_pool_ptr
, old_pool
.get_tag());
166 new_pool
->next
.set_ptr(old_pool
.get_ptr());
168 if (pool_
.compare_exchange_weak(old_pool
, new_pool
))
173 void deallocate_unsafe (T
* n
)
176 tagged_node_ptr old_pool
= pool_
.load(memory_order_relaxed
);
177 freelist_node
* new_pool_ptr
= reinterpret_cast<freelist_node
*>(node
);
179 tagged_node_ptr
new_pool (new_pool_ptr
, old_pool
.get_tag());
180 new_pool
->next
.set_ptr(old_pool
.get_ptr());
182 pool_
.store(new_pool
, memory_order_relaxed
);
185 ~freelist_stack(void)
187 tagged_node_ptr
current (pool_
);
190 freelist_node
* current_ptr
= current
.get_ptr();
192 current
= current_ptr
->next
;
193 Alloc::deallocate((T
*)current_ptr
, 1);
197 bool is_lock_free(void) const
199 return pool_
.is_lock_free();
203 atomic
<tagged_node_ptr
> pool_
;
207 } /* namespace detail */
210 struct caching_freelist_t
{};
211 struct static_freelist_t
{};
215 } /* namespace lockfree */
216 } /* namespace boost */
218 #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */