1 // Bitmap Allocator. -*- C++ -*-
3 // Copyright (C) 2004-2022 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file ext/bitmap_allocator.h
26 * This file is a GNU extension to the Standard C++ Library.
29 #ifndef _BITMAP_ALLOCATOR_H
30 #define _BITMAP_ALLOCATOR_H 1
32 #include <bits/requires_hosted.h> // GNU extensions are currently omitted
34 #include <utility> // For std::pair.
35 #include <bits/functexcept.h> // For __throw_bad_alloc().
36 #include <bits/stl_function.h> // For greater_equal, and less_equal.
37 #include <new> // For operator new.
38 #include <debug/debug.h> // _GLIBCXX_DEBUG_ASSERT
39 #include <ext/concurrence.h>
40 #include <bits/move.h>
42 /** @brief The constant in the expression below is the alignment
45 #define _BALLOC_ALIGN_BYTES 8
47 namespace __gnu_cxx
_GLIBCXX_VISIBILITY(default)
49 _GLIBCXX_BEGIN_NAMESPACE_VERSION
53 /** @class __mini_vector bitmap_allocator.h bitmap_allocator.h
55 * @brief __mini_vector<> is a stripped down version of the
56 * full-fledged std::vector<>.
58 * It is to be used only for built-in types or PODs. Notable
61 * 1. Not all accessor functions are present.
62 * 2. Used ONLY for PODs.
63 * 3. No Allocator template argument. Uses ::operator new() to get
64 * memory, and ::operator delete() to free it.
65 * Caveat: The dtor does NOT free the memory allocated, so this a
66 * memory-leaking vector!
68 template<typename _Tp
>
71 __mini_vector(const __mini_vector
&);
72 __mini_vector
& operator=(const __mini_vector
&);
75 typedef _Tp value_type
;
77 typedef _Tp
& reference
;
78 typedef const _Tp
& const_reference
;
79 typedef std::size_t size_type
;
80 typedef std::ptrdiff_t difference_type
;
81 typedef pointer iterator
;
86 pointer _M_end_of_storage
;
89 _M_space_left() const throw()
90 { return _M_end_of_storage
- _M_finish
; }
92 _GLIBCXX_NODISCARD pointer
93 allocate(size_type __n
)
94 { return static_cast<pointer
>(::operator new(__n
* sizeof(_Tp
))); }
97 deallocate(pointer __p
, size_type
)
98 { ::operator delete(__p
); }
101 // Members used: size(), push_back(), pop_back(),
102 // insert(iterator, const_reference), erase(iterator),
103 // begin(), end(), back(), operator[].
106 : _M_start(0), _M_finish(0), _M_end_of_storage(0) { }
110 { return _M_finish
- _M_start
; }
113 begin() const throw()
114 { return this->_M_start
; }
118 { return this->_M_finish
; }
122 { return *(this->end() - 1); }
125 operator[](const size_type __pos
) const throw()
126 { return this->_M_start
[__pos
]; }
129 insert(iterator __pos
, const_reference __x
);
132 push_back(const_reference __x
)
134 if (this->_M_space_left())
140 this->insert(this->end(), __x
);
145 { --this->_M_finish
; }
148 erase(iterator __pos
) throw();
152 { this->_M_finish
= this->_M_start
; }
155 // Out of line function definitions.
156 template<typename _Tp
>
157 void __mini_vector
<_Tp
>::
158 insert(iterator __pos
, const_reference __x
)
160 if (this->_M_space_left())
162 size_type __to_move
= this->_M_finish
- __pos
;
163 iterator __dest
= this->end();
164 iterator __src
= this->end() - 1;
170 --__dest
; --__src
; --__to_move
;
176 size_type __new_size
= this->size() ? this->size() * 2 : 1;
177 iterator __new_start
= this->allocate(__new_size
);
178 iterator __first
= this->begin();
179 iterator __start
= __new_start
;
180 while (__first
!= __pos
)
183 ++__start
; ++__first
;
187 while (__first
!= this->end())
190 ++__start
; ++__first
;
193 this->deallocate(this->_M_start
, this->size());
195 this->_M_start
= __new_start
;
196 this->_M_finish
= __start
;
197 this->_M_end_of_storage
= this->_M_start
+ __new_size
;
201 template<typename _Tp
>
202 void __mini_vector
<_Tp
>::
203 erase(iterator __pos
) throw()
205 while (__pos
+ 1 != this->end())
214 template<typename _Tp
>
215 struct __mv_iter_traits
217 typedef typename
_Tp::value_type value_type
;
218 typedef typename
_Tp::difference_type difference_type
;
221 template<typename _Tp
>
222 struct __mv_iter_traits
<_Tp
*>
224 typedef _Tp value_type
;
225 typedef std::ptrdiff_t difference_type
;
231 bits_per_block
= sizeof(std::size_t) * std::size_t(bits_per_byte
)
234 template<typename _ForwardIterator
, typename _Tp
, typename _Compare
>
236 __lower_bound(_ForwardIterator __first
, _ForwardIterator __last
,
237 const _Tp
& __val
, _Compare __comp
)
239 typedef typename __mv_iter_traits
<_ForwardIterator
>::difference_type
242 _DistanceType __len
= __last
- __first
;
243 _DistanceType __half
;
244 _ForwardIterator __middle
;
251 if (__comp(*__middle
, __val
))
255 __len
= __len
- __half
- 1;
263 /** @brief The number of Blocks pointed to by the address pair
264 * passed to the function.
266 template<typename _AddrPair
>
268 __num_blocks(_AddrPair __ap
)
269 { return (__ap
.second
- __ap
.first
) + 1; }
271 /** @brief The number of Bit-maps pointed to by the address pair
272 * passed to the function.
274 template<typename _AddrPair
>
276 __num_bitmaps(_AddrPair __ap
)
277 { return __num_blocks(__ap
) / std::size_t(bits_per_block
); }
279 // _Tp should be a pointer type.
280 template<typename _Tp
>
281 class _Inclusive_between
284 pointer _M_ptr_value
;
285 typedef typename
std::pair
<_Tp
, _Tp
> _Block_pair
;
288 _Inclusive_between(pointer __ptr
) : _M_ptr_value(__ptr
)
292 operator()(_Block_pair __bp
) const throw()
294 if (std::less_equal
<pointer
>()(_M_ptr_value
, __bp
.second
)
295 && std::greater_equal
<pointer
>()(_M_ptr_value
, __bp
.first
))
302 // Used to pass a Functor to functions by reference.
303 template<typename _Functor
>
309 typedef typename
_Functor::argument_type argument_type
;
310 typedef typename
_Functor::result_type result_type
;
312 _Functor_Ref(_Functor
& __fref
) : _M_fref(__fref
)
316 operator()(argument_type __arg
)
317 { return _M_fref(__arg
); }
320 /** @class _Ffit_finder bitmap_allocator.h bitmap_allocator.h
322 * @brief The class which acts as a predicate for applying the
323 * first-fit memory allocation policy for the bitmap allocator.
325 // _Tp should be a pointer type, and _Alloc is the Allocator for
327 template<typename _Tp
>
330 typedef std::pair
<_Tp
, _Tp
> _Block_pair
;
331 typedef __detail::__mini_vector
<_Block_pair
> _BPVector
;
332 typedef typename
_BPVector::difference_type _Counter_type
;
334 std::size_t* _M_pbitmap
;
335 _Counter_type _M_data_offset
;
338 typedef bool result_type
;
339 typedef _Block_pair argument_type
;
341 _Ffit_finder() : _M_pbitmap(0), _M_data_offset(0)
345 operator()(_Block_pair __bp
) throw()
348 // Set the _rover to the last physical location bitmap,
349 // which is the bitmap which belongs to the first free
350 // block. Thus, the bitmaps are in exact reverse order of
351 // the actual memory layout. So, we count down the bitmaps,
352 // which is the same as moving up the memory.
354 // If the used count stored at the start of the Bit Map headers
355 // is equal to the number of Objects that the current Block can
356 // store, then there is definitely no space for another single
357 // object, so just return false.
358 _Counter_type __diff
= __detail::__num_bitmaps(__bp
);
360 if (*(reinterpret_cast<size_t*>
361 (__bp
.first
) - (__diff
+ 1)) == __detail::__num_blocks(__bp
))
364 size_t* __rover
= reinterpret_cast<size_t*>(__bp
.first
) - 1;
366 for (_Counter_type __i
= 0; __i
< __diff
; ++__i
)
368 _M_data_offset
= __i
;
371 _M_pbitmap
= __rover
;
380 _M_get() const throw()
381 { return _M_pbitmap
; }
384 _M_offset() const throw()
385 { return _M_data_offset
* std::size_t(bits_per_block
); }
388 /** @class _Bitmap_counter bitmap_allocator.h bitmap_allocator.h
390 * @brief The bitmap counter which acts as the bitmap
391 * manipulator, and manages the bit-manipulation functions and
392 * the searching and identification functions on the bit-map.
394 // _Tp should be a pointer type.
395 template<typename _Tp
>
396 class _Bitmap_counter
399 __detail::__mini_vector
<typename
std::pair
<_Tp
, _Tp
> > _BPVector
;
400 typedef typename
_BPVector::size_type _Index_type
;
404 std::size_t* _M_curr_bmap
;
405 std::size_t* _M_last_bmap_in_block
;
406 _Index_type _M_curr_index
;
409 // Use the 2nd parameter with care. Make sure that such an
410 // entry exists in the vector before passing that particular
411 // index to this ctor.
412 _Bitmap_counter(_BPVector
& Rvbp
, long __index
= -1) : _M_vbp(Rvbp
)
413 { this->_M_reset(__index
); }
416 _M_reset(long __index
= -1) throw()
421 _M_curr_index
= static_cast<_Index_type
>(-1);
425 _M_curr_index
= __index
;
426 _M_curr_bmap
= reinterpret_cast<std::size_t*>
427 (_M_vbp
[_M_curr_index
].first
) - 1;
429 _GLIBCXX_DEBUG_ASSERT(__index
<= (long)_M_vbp
.size() - 1);
431 _M_last_bmap_in_block
= _M_curr_bmap
432 - ((_M_vbp
[_M_curr_index
].second
433 - _M_vbp
[_M_curr_index
].first
+ 1)
434 / std::size_t(bits_per_block
) - 1);
437 // Dangerous Function! Use with extreme care. Pass to this
438 // function ONLY those values that are known to be correct,
439 // otherwise this will mess up big time.
441 _M_set_internal_bitmap(std::size_t* __new_internal_marker
) throw()
442 { _M_curr_bmap
= __new_internal_marker
; }
445 _M_finished() const throw()
446 { return(_M_curr_bmap
== 0); }
451 if (_M_curr_bmap
== _M_last_bmap_in_block
)
453 if (++_M_curr_index
== _M_vbp
.size())
456 this->_M_reset(_M_curr_index
);
464 _M_get() const throw()
465 { return _M_curr_bmap
; }
468 _M_base() const throw()
469 { return _M_vbp
[_M_curr_index
].first
; }
472 _M_offset() const throw()
474 return std::size_t(bits_per_block
)
475 * ((reinterpret_cast<std::size_t*>(this->_M_base())
476 - _M_curr_bmap
) - 1);
480 _M_where() const throw()
481 { return _M_curr_index
; }
484 /** @brief Mark a memory address as allocated by re-setting the
485 * corresponding bit in the bit-map.
488 __bit_allocate(std::size_t* __pbmap
, std::size_t __pos
) throw()
490 std::size_t __mask
= 1 << __pos
;
495 /** @brief Mark a memory address as free by setting the
496 * corresponding bit in the bit-map.
499 __bit_free(std::size_t* __pbmap
, std::size_t __pos
) throw()
501 std::size_t __mask
= 1 << __pos
;
504 } // namespace __detail
506 /** @brief Generic Version of the bsf instruction.
509 _Bit_scan_forward(std::size_t __num
)
510 { return static_cast<std::size_t>(__builtin_ctzl(__num
)); }
512 /** @class free_list bitmap_allocator.h bitmap_allocator.h
514 * @brief The free list class for managing chunks of memory to be
515 * given to and returned by the bitmap_allocator.
520 typedef std::size_t* value_type
;
521 typedef __detail::__mini_vector
<value_type
> vector_type
;
522 typedef vector_type::iterator iterator
;
523 typedef __mutex __mutex_type
;
526 struct _LT_pointer_compare
529 operator()(const std::size_t* __pui
,
530 const std::size_t __cui
) const throw()
531 { return *__pui
< __cui
; }
534 #if defined __GTHREADS
538 static __mutex_type _S_mutex
;
546 static vector_type _S_free_list
;
550 /** @brief Performs validation of memory based on their size.
552 * @param __addr The pointer to the memory block to be
555 * Validates the memory block passed to this function and
556 * appropriately performs the action of managing the free list of
557 * blocks by adding this block to the free list or deleting this
558 * or larger blocks from the free list.
561 _M_validate(std::size_t* __addr
) throw()
563 vector_type
& __free_list
= _M_get_free_list();
564 const vector_type::size_type __max_size
= 64;
565 if (__free_list
.size() >= __max_size
)
567 // Ok, the threshold value has been reached. We determine
568 // which block to remove from the list of free blocks.
569 if (*__addr
>= *__free_list
.back())
571 // Ok, the new block is greater than or equal to the
572 // last block in the list of free blocks. We just free
574 ::operator delete(static_cast<void*>(__addr
));
579 // Deallocate the last block in the list of free lists,
580 // and insert the new one in its correct position.
581 ::operator delete(static_cast<void*>(__free_list
.back()));
582 __free_list
.pop_back();
586 // Just add the block to the list of free lists unconditionally.
587 iterator __temp
= __detail::__lower_bound
588 (__free_list
.begin(), __free_list
.end(),
589 *__addr
, _LT_pointer_compare());
591 // We may insert the new free list before _temp;
592 __free_list
.insert(__temp
, __addr
);
595 /** @brief Decides whether the wastage of memory is acceptable for
596 * the current memory request and returns accordingly.
598 * @param __block_size The size of the block available in the free
601 * @param __required_size The required size of the memory block.
603 * @return true if the wastage incurred is acceptable, else returns
607 _M_should_i_give(std::size_t __block_size
,
608 std::size_t __required_size
) throw()
610 const std::size_t __max_wastage_percentage
= 36;
611 if (__block_size
>= __required_size
&&
612 (((__block_size
- __required_size
) * 100 / __block_size
)
613 < __max_wastage_percentage
))
620 /** @brief This function returns the block of memory to the
621 * internal free list.
623 * @param __addr The pointer to the memory block that was given
624 * by a call to the _M_get function.
627 _M_insert(std::size_t* __addr
) throw()
629 #if defined __GTHREADS
630 __scoped_lock
__bfl_lock(_M_get_mutex());
632 // Call _M_validate to decide what should be done with
633 // this particular free list.
634 this->_M_validate(reinterpret_cast<std::size_t*>(__addr
) - 1);
635 // See discussion as to why this is 1!
638 /** @brief This function gets a block of memory of the specified
639 * size from the free list.
641 * @param __sz The size in bytes of the memory required.
643 * @return A pointer to the new memory block of size at least
644 * equal to that requested.
647 _M_get(std::size_t __sz
) _GLIBCXX_THROW(std::bad_alloc
);
649 /** @brief This function just clears the internal Free List, and
650 * gives back all the memory to the OS.
657 // Forward declare the class.
658 template<typename _Tp
>
659 class bitmap_allocator
;
661 // Specialize for void:
663 class bitmap_allocator
<void>
666 typedef void* pointer
;
667 typedef const void* const_pointer
;
669 // Reference-to-void members are impossible.
670 typedef void value_type
;
671 template<typename _Tp1
>
674 typedef bitmap_allocator
<_Tp1
> other
;
679 * @brief Bitmap Allocator, primary template.
680 * @ingroup allocators
682 template<typename _Tp
>
683 class bitmap_allocator
: private free_list
686 typedef std::size_t size_type
;
687 typedef std::ptrdiff_t difference_type
;
688 typedef _Tp
* pointer
;
689 typedef const _Tp
* const_pointer
;
690 typedef _Tp
& reference
;
691 typedef const _Tp
& const_reference
;
692 typedef _Tp value_type
;
693 typedef free_list::__mutex_type __mutex_type
;
695 template<typename _Tp1
>
698 typedef bitmap_allocator
<_Tp1
> other
;
701 #if __cplusplus >= 201103L
702 // _GLIBCXX_RESOLVE_LIB_DEFECTS
703 // 2103. propagate_on_container_move_assignment
704 typedef std::true_type propagate_on_container_move_assignment
;
708 template<std::size_t _BSize
, std::size_t _AlignSize
>
713 modulus
= _BSize
% _AlignSize
,
714 value
= _BSize
+ (modulus
? _AlignSize
- (modulus
) : 0)
720 char __M_unused
[aligned_size
<sizeof(value_type
),
721 _BALLOC_ALIGN_BYTES
>::value
];
725 typedef typename
std::pair
<_Alloc_block
*, _Alloc_block
*> _Block_pair
;
727 typedef typename
__detail::__mini_vector
<_Block_pair
> _BPVector
;
728 typedef typename
_BPVector::iterator _BPiter
;
730 template<typename _Predicate
>
732 _S_find(_Predicate __p
)
734 _BPiter __first
= _S_mem_blocks
.begin();
735 while (__first
!= _S_mem_blocks
.end() && !__p(*__first
))
740 #if defined _GLIBCXX_DEBUG
741 // Complexity: O(lg(N)). Where, N is the number of block of size
742 // sizeof(value_type).
744 _S_check_for_free_blocks() throw()
746 typedef typename
__detail::_Ffit_finder
<_Alloc_block
*> _FFF
;
747 _BPiter __bpi
= _S_find(_FFF());
749 _GLIBCXX_DEBUG_ASSERT(__bpi
== _S_mem_blocks
.end());
753 /** @brief Responsible for exponentially growing the internal
756 * @throw std::bad_alloc. If memory cannot be allocated.
758 * Complexity: O(1), but internally depends upon the
759 * complexity of the function free_list::_M_get. The part where
760 * the bitmap headers are written has complexity: O(X),where X
761 * is the number of blocks of size sizeof(value_type) within
762 * the newly acquired block. Having a tight bound.
765 _S_refill_pool() _GLIBCXX_THROW(std::bad_alloc
)
768 #if defined _GLIBCXX_DEBUG
769 _S_check_for_free_blocks();
772 const size_t __num_bitmaps
= (_S_block_size
773 / size_t(__detail::bits_per_block
));
774 const size_t __size_to_allocate
= sizeof(size_t)
775 + _S_block_size
* sizeof(_Alloc_block
)
776 + __num_bitmaps
* sizeof(size_t);
779 reinterpret_cast<size_t*>(this->_M_get(__size_to_allocate
));
783 // The Header information goes at the Beginning of the Block.
785 std::make_pair(reinterpret_cast<_Alloc_block
*>
786 (__temp
+ __num_bitmaps
),
787 reinterpret_cast<_Alloc_block
*>
788 (__temp
+ __num_bitmaps
)
789 + _S_block_size
- 1);
791 // Fill the Vector with this information.
792 _S_mem_blocks
.push_back(__bp
);
794 for (size_t __i
= 0; __i
< __num_bitmaps
; ++__i
)
795 __temp
[__i
] = ~static_cast<size_t>(0); // 1 Indicates all Free.
800 static _BPVector _S_mem_blocks
;
801 static std::size_t _S_block_size
;
802 static __detail::_Bitmap_counter
<_Alloc_block
*> _S_last_request
;
803 static typename
_BPVector::size_type _S_last_dealloc_index
;
804 #if defined __GTHREADS
805 static __mutex_type _S_mut
;
810 /** @brief Allocates memory for a single object of size
813 * @throw std::bad_alloc. If memory cannot be allocated.
815 * Complexity: Worst case complexity is O(N), but that
816 * is hardly ever hit. If and when this particular case is
817 * encountered, the next few cases are guaranteed to have a
818 * worst case complexity of O(1)! That's why this function
819 * performs very well on average. You can consider this
820 * function to have a complexity referred to commonly as:
821 * Amortized Constant time.
824 _M_allocate_single_object() _GLIBCXX_THROW(std::bad_alloc
)
827 #if defined __GTHREADS
828 __scoped_lock
__bit_lock(_S_mut
);
831 // The algorithm is something like this: The last_request
832 // variable points to the last accessed Bit Map. When such a
833 // condition occurs, we try to find a free block in the
834 // current bitmap, or succeeding bitmaps until the last bitmap
835 // is reached. If no free block turns up, we resort to First
838 // WARNING: Do not re-order the condition in the while
839 // statement below, because it relies on C++'s short-circuit
840 // evaluation. The return from _S_last_request->_M_get() will
841 // NOT be dereference able if _S_last_request->_M_finished()
842 // returns true. This would inevitably lead to a NULL pointer
843 // dereference if tinkered with.
844 while (_S_last_request
._M_finished() == false
845 && (*(_S_last_request
._M_get()) == 0))
846 _S_last_request
.operator++();
848 if (__builtin_expect(_S_last_request
._M_finished() == true, false))
850 // Fall Back to First Fit algorithm.
851 typedef typename
__detail::_Ffit_finder
<_Alloc_block
*> _FFF
;
853 _BPiter __bpi
= _S_find(__detail::_Functor_Ref
<_FFF
>(__fff
));
855 if (__bpi
!= _S_mem_blocks
.end())
857 // Search was successful. Ok, now mark the first bit from
858 // the right as 0, meaning Allocated. This bit is obtained
859 // by calling _M_get() on __fff.
860 size_t __nz_bit
= _Bit_scan_forward(*__fff
._M_get());
861 __detail::__bit_allocate(__fff
._M_get(), __nz_bit
);
863 _S_last_request
._M_reset(__bpi
- _S_mem_blocks
.begin());
865 // Now, get the address of the bit we marked as allocated.
866 pointer __ret
= reinterpret_cast<pointer
>
867 (__bpi
->first
+ __fff
._M_offset() + __nz_bit
);
868 size_t* __puse_count
=
869 reinterpret_cast<size_t*>
870 (__bpi
->first
) - (__detail::__num_bitmaps(*__bpi
) + 1);
877 // Search was unsuccessful. We Add more memory to the
878 // pool by calling _S_refill_pool().
881 // _M_Reset the _S_last_request structure to the first
882 // free block's bit map.
883 _S_last_request
._M_reset(_S_mem_blocks
.size() - 1);
885 // Now, mark that bit as allocated.
889 // _S_last_request holds a pointer to a valid bit map, that
890 // points to a free block in memory.
891 size_t __nz_bit
= _Bit_scan_forward(*_S_last_request
._M_get());
892 __detail::__bit_allocate(_S_last_request
._M_get(), __nz_bit
);
894 pointer __ret
= reinterpret_cast<pointer
>
895 (_S_last_request
._M_base() + _S_last_request
._M_offset() + __nz_bit
);
897 size_t* __puse_count
= reinterpret_cast<size_t*>
898 (_S_mem_blocks
[_S_last_request
._M_where()].first
)
900 __num_bitmaps(_S_mem_blocks
[_S_last_request
._M_where()]) + 1);
906 /** @brief Deallocates memory that belongs to a single object of
909 * Complexity: O(lg(N)), but the worst case is not hit
910 * often! This is because containers usually deallocate memory
911 * close to each other and this case is handled in O(1) time by
912 * the deallocate function.
915 _M_deallocate_single_object(pointer __p
) throw()
918 #if defined __GTHREADS
919 __scoped_lock
__bit_lock(_S_mut
);
921 _Alloc_block
* __real_p
= reinterpret_cast<_Alloc_block
*>(__p
);
923 typedef typename
_BPVector::iterator _Iterator
;
924 typedef typename
_BPVector::difference_type _Difference_type
;
926 _Difference_type __diff
;
929 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
>= 0);
931 __detail::_Inclusive_between
<_Alloc_block
*> __ibt(__real_p
);
932 if (__ibt(_S_mem_blocks
[_S_last_dealloc_index
]))
934 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
935 <= _S_mem_blocks
.size() - 1);
937 // Initial Assumption was correct!
938 __diff
= _S_last_dealloc_index
;
939 __displacement
= __real_p
- _S_mem_blocks
[__diff
].first
;
943 _Iterator _iter
= _S_find(__ibt
);
945 _GLIBCXX_DEBUG_ASSERT(_iter
!= _S_mem_blocks
.end());
947 __diff
= _iter
- _S_mem_blocks
.begin();
948 __displacement
= __real_p
- _S_mem_blocks
[__diff
].first
;
949 _S_last_dealloc_index
= __diff
;
952 // Get the position of the iterator that has been found.
953 const size_t __rotate
= (__displacement
954 % size_t(__detail::bits_per_block
));
956 reinterpret_cast<size_t*>
957 (_S_mem_blocks
[__diff
].first
) - 1;
958 __bitmapC
-= (__displacement
/ size_t(__detail::bits_per_block
));
960 __detail::__bit_free(__bitmapC
, __rotate
);
961 size_t* __puse_count
= reinterpret_cast<size_t*>
962 (_S_mem_blocks
[__diff
].first
)
963 - (__detail::__num_bitmaps(_S_mem_blocks
[__diff
]) + 1);
965 _GLIBCXX_DEBUG_ASSERT(*__puse_count
!= 0);
969 if (__builtin_expect(*__puse_count
== 0, false))
973 // We can safely remove this block.
974 // _Block_pair __bp = _S_mem_blocks[__diff];
975 this->_M_insert(__puse_count
);
976 _S_mem_blocks
.erase(_S_mem_blocks
.begin() + __diff
);
978 // Reset the _S_last_request variable to reflect the
979 // erased block. We do this to protect future requests
980 // after the last block has been removed from a particular
981 // memory Chunk, which in turn has been returned to the
982 // free list, and hence had been erased from the vector,
983 // so the size of the vector gets reduced by 1.
984 if ((_Difference_type
)_S_last_request
._M_where() >= __diff
--)
985 _S_last_request
._M_reset(__diff
);
987 // If the Index into the vector of the region of memory
988 // that might hold the next address that will be passed to
989 // deallocated may have been invalidated due to the above
990 // erase procedure being called on the vector, hence we
991 // try to restore this invariant too.
992 if (_S_last_dealloc_index
>= _S_mem_blocks
.size())
994 _S_last_dealloc_index
=(__diff
!= -1 ? __diff
: 0);
995 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index
>= 0);
1001 bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
1004 bitmap_allocator(const bitmap_allocator
&) _GLIBCXX_USE_NOEXCEPT
1007 template<typename _Tp1
>
1008 bitmap_allocator(const bitmap_allocator
<_Tp1
>&) _GLIBCXX_USE_NOEXCEPT
1011 ~bitmap_allocator() _GLIBCXX_USE_NOEXCEPT
1014 _GLIBCXX_NODISCARD pointer
1015 allocate(size_type __n
)
1017 if (__n
> this->max_size())
1018 std::__throw_bad_alloc();
1020 #if __cpp_aligned_new
1021 if (alignof(value_type
) > __STDCPP_DEFAULT_NEW_ALIGNMENT__
)
1023 const size_type __b
= __n
* sizeof(value_type
);
1024 std::align_val_t __al
= std::align_val_t(alignof(value_type
));
1025 return static_cast<pointer
>(::operator new(__b
, __al
));
1029 if (__builtin_expect(__n
== 1, true))
1030 return this->_M_allocate_single_object();
1033 const size_type __b
= __n
* sizeof(value_type
);
1034 return reinterpret_cast<pointer
>(::operator new(__b
));
1038 _GLIBCXX_NODISCARD pointer
1039 allocate(size_type __n
, typename bitmap_allocator
<void>::const_pointer
)
1040 { return allocate(__n
); }
1043 deallocate(pointer __p
, size_type __n
) throw()
1045 if (__builtin_expect(__p
!= 0, true))
1047 #if __cpp_aligned_new
1048 // Types with extended alignment are handled by operator delete.
1049 if (alignof(value_type
) > __STDCPP_DEFAULT_NEW_ALIGNMENT__
)
1051 ::operator delete(__p
, std::align_val_t(alignof(value_type
)));
1056 if (__builtin_expect(__n
== 1, true))
1057 this->_M_deallocate_single_object(__p
);
1059 ::operator delete(__p
);
1064 address(reference __r
) const _GLIBCXX_NOEXCEPT
1065 { return std::__addressof(__r
); }
1068 address(const_reference __r
) const _GLIBCXX_NOEXCEPT
1069 { return std::__addressof(__r
); }
1072 max_size() const _GLIBCXX_USE_NOEXCEPT
1073 { return size_type(-1) / sizeof(value_type
); }
1075 #if __cplusplus >= 201103L
1076 template<typename _Up
, typename
... _Args
>
1078 construct(_Up
* __p
, _Args
&&... __args
)
1079 { ::new((void *)__p
) _Up(std::forward
<_Args
>(__args
)...); }
1081 template<typename _Up
>
1087 construct(pointer __p
, const_reference __data
)
1088 { ::new((void *)__p
) value_type(__data
); }
1091 destroy(pointer __p
)
1092 { __p
->~value_type(); }
1096 template<typename _Tp1
, typename _Tp2
>
1098 operator==(const bitmap_allocator
<_Tp1
>&,
1099 const bitmap_allocator
<_Tp2
>&) throw()
1102 #if __cpp_impl_three_way_comparison < 201907L
1103 template<typename _Tp1
, typename _Tp2
>
1105 operator!=(const bitmap_allocator
<_Tp1
>&,
1106 const bitmap_allocator
<_Tp2
>&) throw()
1110 // Static member definitions.
1111 template<typename _Tp
>
1112 typename bitmap_allocator
<_Tp
>::_BPVector
1113 bitmap_allocator
<_Tp
>::_S_mem_blocks
;
1115 template<typename _Tp
>
1116 std::size_t bitmap_allocator
<_Tp
>::_S_block_size
1117 = 2 * std::size_t(__detail::bits_per_block
);
1119 template<typename _Tp
>
1120 typename bitmap_allocator
<_Tp
>::_BPVector::size_type
1121 bitmap_allocator
<_Tp
>::_S_last_dealloc_index
= 0;
1123 template<typename _Tp
>
1124 __detail::_Bitmap_counter
1125 <typename bitmap_allocator
<_Tp
>::_Alloc_block
*>
1126 bitmap_allocator
<_Tp
>::_S_last_request(_S_mem_blocks
);
1128 #if defined __GTHREADS
1129 template<typename _Tp
>
1130 typename bitmap_allocator
<_Tp
>::__mutex_type
1131 bitmap_allocator
<_Tp
>::_S_mut
;
1134 _GLIBCXX_END_NAMESPACE_VERSION
1135 } // namespace __gnu_cxx