This commit was manufactured by cvs2svn to create branch
[official-gcc.git] / libstdc++-v3 / include / ext / bitmap_allocator.h
blob71b278b30dc4d638d25b344f055b98f027e9e254
1 // Bitmapped Allocator. -*- C++ -*-
3 // Copyright (C) 2004 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
9 // any later version.
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
19 // USA.
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
32 #if !defined _BITMAP_ALLOCATOR_H
33 #define _BITMAP_ALLOCATOR_H 1
35 #include <cstddef>
36 //For std::size_t, and ptrdiff_t.
37 #include <utility>
38 //For std::pair.
39 #include <algorithm>
40 //std::find_if.
41 #include <vector>
42 //For the free list of exponentially growing memory blocks. At max,
43 //size of the vector should be not more than the number of bits in an
44 //integer or an unsigned integer.
45 #include <functional>
46 //For greater_equal, and less_equal.
47 #include <new>
48 //For operator new.
49 #include <bits/gthr.h>
50 //For __gthread_mutex_t, __gthread_mutex_lock and __gthread_mutex_unlock.
51 #include <ext/new_allocator.h>
52 //For __gnu_cxx::new_allocator for std::vector.
54 #include <cassert>
55 #define NDEBUG
57 //#define CHECK_FOR_ERRORS
59 namespace __gnu_cxx
62 class _Mutex {
63 __gthread_mutex_t _M_mut;
64 //Prevent Copying and assignment.
65 _Mutex (_Mutex const&);
66 _Mutex& operator= (_Mutex const&);
67 public:
68 _Mutex ()
70 #if !defined __GTHREAD_MUTEX_INIT
71 __GTHREAD_MUTEX_INIT_FUNCTION(&_M_mut);
72 #else
73 __gthread_mutex_t __mtemp = __GTHREAD_MUTEX_INIT;
74 _M_mut = __mtemp;
75 #endif
77 ~_Mutex ()
79 //Gthreads does not define a Mutex Destruction Function.
81 __gthread_mutex_t *_M_get() { return &_M_mut; }
85 class _Lock {
86 _Mutex& _M_mt;
87 //Prevent Copying and assignment.
88 _Lock (_Lock const&);
89 _Lock& operator= (_Lock const&);
90 public:
91 _Lock (_Mutex& __mref) : _M_mt(__mref)
93 __gthread_mutex_lock(_M_mt._M_get());
95 ~_Lock () { __gthread_mutex_unlock(_M_mt._M_get()); }
98 namespace __aux_balloc {
100 static const unsigned int _Bits_Per_Byte = 8;
101 static const unsigned int _Bits_Per_Block = sizeof(unsigned int) * _Bits_Per_Byte;
103 template <typename _Addr_Pair_t>
104 inline size_t __balloc_num_blocks (_Addr_Pair_t __ap)
106 return (__ap.second - __ap.first) + 1;
109 template <typename _Addr_Pair_t>
110 inline size_t __balloc_num_bit_maps (_Addr_Pair_t __ap)
112 return __balloc_num_blocks(__ap) / _Bits_Per_Block;
115 //T should be a pointer type.
116 template <typename _Tp>
117 class _Inclusive_between : public std::unary_function<typename std::pair<_Tp, _Tp>, bool> {
118 typedef _Tp pointer;
119 pointer _M_ptr_value;
120 typedef typename std::pair<_Tp, _Tp> _Block_pair;
122 public:
123 _Inclusive_between (pointer __ptr) : _M_ptr_value(__ptr) { }
124 bool operator () (_Block_pair __bp) const throw ()
126 if (std::less_equal<pointer> ()(_M_ptr_value, __bp.second) &&
127 std::greater_equal<pointer> ()(_M_ptr_value, __bp.first))
128 return true;
129 else
130 return false;
134 //Used to pass a Functor to functions by reference.
135 template <typename _Functor>
136 class _Functor_Ref :
137 public std::unary_function<typename _Functor::argument_type, typename _Functor::result_type> {
138 _Functor& _M_fref;
140 public:
141 typedef typename _Functor::argument_type argument_type;
142 typedef typename _Functor::result_type result_type;
144 _Functor_Ref (_Functor& __fref) : _M_fref(__fref) { }
145 result_type operator() (argument_type __arg) { return _M_fref (__arg); }
149 //T should be a pointer type, and A is the Allocator for the vector.
150 template <typename _Tp, typename _Alloc>
151 class _Ffit_finder : public std::unary_function<typename std::pair<_Tp, _Tp>, bool> {
152 typedef typename std::vector<std::pair<_Tp, _Tp>, _Alloc> _BPVector;
153 typedef typename _BPVector::difference_type _Counter_type;
154 typedef typename std::pair<_Tp, _Tp> _Block_pair;
156 unsigned int *_M_pbitmap;
157 unsigned int _M_data_offset;
159 public:
160 _Ffit_finder () : _M_pbitmap (0), _M_data_offset (0) { }
162 bool operator() (_Block_pair __bp) throw()
164 //Set the _rover to the last unsigned integer, which is the
165 //bitmap to the first free block. Thus, the bitmaps are in exact
166 //reverse order of the actual memory layout. So, we count down
167 //the bimaps, which is the same as moving up the memory.
169 //If the used count stored at the start of the Bit Map headers
170 //is equal to the number of Objects that the current Block can
171 //store, then there is definitely no space for another single
172 //object, so just return false.
173 _Counter_type __diff = __gnu_cxx::__aux_balloc::__balloc_num_bit_maps (__bp);
175 assert (*(reinterpret_cast<unsigned int*>(__bp.first) - (__diff + 1)) <=
176 __gnu_cxx::__aux_balloc::__balloc_num_blocks (__bp));
178 if (*(reinterpret_cast<unsigned int*>(__bp.first) - (__diff + 1)) ==
179 __gnu_cxx::__aux_balloc::__balloc_num_blocks (__bp))
180 return false;
182 unsigned int *__rover = reinterpret_cast<unsigned int*>(__bp.first) - 1;
183 for (_Counter_type __i = 0; __i < __diff; ++__i)
185 _M_data_offset = __i;
186 if (*__rover)
188 _M_pbitmap = __rover;
189 return true;
191 --__rover;
193 return false;
196 unsigned int *_M_get () { return _M_pbitmap; }
197 unsigned int _M_offset () { return _M_data_offset * _Bits_Per_Block; }
200 //T should be a pointer type.
201 template <typename _Tp, typename _Alloc>
202 class _Bit_map_counter {
204 typedef typename std::vector<std::pair<_Tp, _Tp>, _Alloc> _BPVector;
205 typedef typename _BPVector::size_type _Index_type;
206 typedef _Tp pointer;
208 _BPVector& _M_vbp;
209 unsigned int *_M_curr_bmap;
210 unsigned int *_M_last_bmap_in_block;
211 _Index_type _M_curr_index;
213 public:
214 //Use the 2nd parameter with care. Make sure that such an entry
215 //exists in the vector before passing that particular index to
216 //this ctor.
217 _Bit_map_counter (_BPVector& Rvbp, int __index = -1) : _M_vbp(Rvbp)
219 this->_M_reset(__index);
222 void _M_reset (int __index = -1) throw()
224 if (__index == -1)
226 _M_curr_bmap = 0;
227 _M_curr_index = (_Index_type)-1;
228 return;
231 _M_curr_index = __index;
232 _M_curr_bmap = reinterpret_cast<unsigned int*>(_M_vbp[_M_curr_index].first) - 1;
234 assert (__index <= (int)_M_vbp.size() - 1);
236 _M_last_bmap_in_block = _M_curr_bmap -
237 ((_M_vbp[_M_curr_index].second - _M_vbp[_M_curr_index].first + 1) / _Bits_Per_Block - 1);
240 //Dangerous Function! Use with extreme care. Pass to this
241 //functions ONLY those values that are known to be correct,
242 //otherwise this will mess up big time.
243 void _M_set_internal_bit_map (unsigned int *__new_internal_marker) throw()
245 _M_curr_bmap = __new_internal_marker;
248 bool _M_finished () const throw()
250 return (_M_curr_bmap == 0);
253 _Bit_map_counter& operator++ () throw()
255 if (_M_curr_bmap == _M_last_bmap_in_block)
257 if (++_M_curr_index == _M_vbp.size())
259 _M_curr_bmap = 0;
261 else
263 this->_M_reset (_M_curr_index);
266 else
268 --_M_curr_bmap;
270 return *this;
273 unsigned int *_M_get ()
275 return _M_curr_bmap;
278 pointer base () { return _M_vbp[_M_curr_index].first; }
279 unsigned int _M_offset ()
281 return _Bits_Per_Block * ((reinterpret_cast<unsigned int*>(this->base()) - _M_curr_bmap) - 1);
284 unsigned int _M_where () { return _M_curr_index; }
288 //Generic Version of the bsf instruction.
289 typedef unsigned int _Bit_map_type;
290 static inline unsigned int _Bit_scan_forward (_Bit_map_type __num)
292 unsigned int __ret_val = 0;
293 while (__num % 2 == 0)
295 ++__ret_val;
296 __num >>= 1;
298 return __ret_val;
301 struct _OOM_handler {
302 static std::new_handler _S_old_handler;
303 static bool _S_handled_oom;
304 typedef void (*_FL_clear_proc)(void);
305 static _FL_clear_proc _S_oom_fcp;
307 _OOM_handler (_FL_clear_proc __fcp)
309 _S_oom_fcp = __fcp;
310 _S_old_handler = std::set_new_handler (_S_handle_oom_proc);
311 _S_handled_oom = false;
314 static void _S_handle_oom_proc()
316 _S_oom_fcp();
317 std::set_new_handler (_S_old_handler);
318 _S_handled_oom = true;
321 ~_OOM_handler ()
323 if (!_S_handled_oom)
324 std::set_new_handler (_S_old_handler);
328 std::new_handler _OOM_handler::_S_old_handler;
329 bool _OOM_handler::_S_handled_oom = false;
330 _OOM_handler::_FL_clear_proc _OOM_handler::_S_oom_fcp = 0;
333 class _BA_free_list_store {
334 struct _LT_pointer_compare {
335 template <typename _Tp>
336 bool operator() (_Tp* __pt, _Tp const& __crt) const throw()
338 return *__pt < __crt;
342 #if defined __GTHREADS
343 static _Mutex _S_bfl_mutex;
344 #endif
345 static std::vector<unsigned int*> _S_free_list;
346 typedef std::vector<unsigned int*>::iterator _FLIter;
348 static void _S_validate_free_list(unsigned int *__addr) throw()
350 const unsigned int Max_Size = 64;
351 if (_S_free_list.size() >= Max_Size)
353 //Ok, the threshold value has been reached.
354 //We determine which block to remove from the list of free
355 //blocks.
356 if (*__addr >= *_S_free_list.back())
358 //Ok, the new block is greater than or equal to the last
359 //block in the list of free blocks. We just free the new
360 //block.
361 operator delete((void*)__addr);
362 return;
364 else
366 //Deallocate the last block in the list of free lists, and
367 //insert the new one in it's correct position.
368 operator delete((void*)_S_free_list.back());
369 _S_free_list.pop_back();
373 //Just add the block to the list of free lists
374 //unconditionally.
375 _FLIter __temp = std::lower_bound(_S_free_list.begin(), _S_free_list.end(),
376 *__addr, _LT_pointer_compare ());
377 //We may insert the new free list before _temp;
378 _S_free_list.insert(__temp, __addr);
381 static bool _S_should_i_give(unsigned int __block_size, unsigned int __required_size) throw()
383 const unsigned int Max_Wastage_Percentage = 36;
385 if (__block_size >= __required_size &&
386 (((__block_size - __required_size) * 100 / __block_size) < Max_Wastage_Percentage))
387 return true;
388 else
389 return false;
392 public:
393 typedef _BA_free_list_store _BFL_type;
395 static inline void _S_insert_free_list(unsigned int *__addr) throw()
397 #if defined __GTHREADS
398 _Lock __bfl_lock(*&_S_bfl_mutex);
399 #endif
400 //Call _S_validate_free_list to decide what should be done with this
401 //particular free list.
402 _S_validate_free_list(--__addr);
405 static unsigned int *_S_get_free_list(unsigned int __sz) throw (std::bad_alloc)
407 #if defined __GTHREADS
408 _Lock __bfl_lock(*&_S_bfl_mutex);
409 #endif
410 _FLIter __temp = std::lower_bound(_S_free_list.begin(), _S_free_list.end(),
411 __sz, _LT_pointer_compare());
412 if (__temp == _S_free_list.end() || !_S_should_i_give (**__temp, __sz))
414 _OOM_handler __set_handler(_BFL_type::_S_clear);
415 unsigned int *__ret_val = reinterpret_cast<unsigned int*>
416 (operator new (__sz + sizeof(unsigned int)));
417 *__ret_val = __sz;
418 return ++__ret_val;
420 else
422 unsigned int* __ret_val = *__temp;
423 _S_free_list.erase (__temp);
424 return ++__ret_val;
428 //This function just clears the internal Free List, and gives back
429 //all the memory to the OS.
430 static void _S_clear()
432 #if defined __GTHREADS
433 _Lock __bfl_lock(*&_S_bfl_mutex);
434 #endif
435 _FLIter __iter = _S_free_list.begin();
436 while (__iter != _S_free_list.end())
438 operator delete((void*)*__iter);
439 ++__iter;
441 _S_free_list.clear();
446 #if defined __GTHREADS
447 _Mutex _BA_free_list_store::_S_bfl_mutex;
448 #endif
449 std::vector<unsigned int*> _BA_free_list_store::_S_free_list;
451 template <class _Tp> class bitmap_allocator;
452 // specialize for void:
453 template <> class bitmap_allocator<void> {
454 public:
455 typedef void* pointer;
456 typedef const void* const_pointer;
457 // reference-to-void members are impossible.
458 typedef void value_type;
459 template <class U> struct rebind { typedef bitmap_allocator<U> other; };
462 template <class _Tp> class bitmap_allocator : private _BA_free_list_store {
463 public:
464 typedef size_t size_type;
465 typedef ptrdiff_t difference_type;
466 typedef _Tp* pointer;
467 typedef const _Tp* const_pointer;
468 typedef _Tp& reference;
469 typedef const _Tp& const_reference;
470 typedef _Tp value_type;
471 template <class U> struct rebind { typedef bitmap_allocator<U> other; };
473 private:
474 static const unsigned int _Bits_Per_Byte = 8;
475 static const unsigned int _Bits_Per_Block = sizeof(unsigned int) * _Bits_Per_Byte;
477 static inline void _S_bit_allocate(unsigned int *__pbmap, unsigned int __pos) throw()
479 unsigned int __mask = 1 << __pos;
480 __mask = ~__mask;
481 *__pbmap &= __mask;
484 static inline void _S_bit_free(unsigned int *__pbmap, unsigned int __Pos) throw()
486 unsigned int __mask = 1 << __Pos;
487 *__pbmap |= __mask;
490 static inline void *_S_memory_get(size_t __sz) throw (std::bad_alloc)
492 return operator new(__sz);
495 static inline void _S_memory_put(void *__vptr) throw ()
497 operator delete(__vptr);
500 typedef typename std::pair<pointer, pointer> _Block_pair;
501 typedef typename __gnu_cxx::new_allocator<_Block_pair> _BPVec_allocator_type;
502 typedef typename std::vector<_Block_pair, _BPVec_allocator_type> _BPVector;
505 #if defined CHECK_FOR_ERRORS
506 //Complexity: O(lg(N)). Where, N is the number of block of size
507 //sizeof(value_type).
508 static void _S_check_for_free_blocks() throw()
510 typedef typename __gnu_cxx::__aux_balloc::_Ffit_finder<pointer, _BPVec_allocator_type> _FFF;
511 _FFF __fff;
512 typedef typename _BPVector::iterator _BPiter;
513 _BPiter __bpi = std::find_if(_S_mem_blocks.begin(), _S_mem_blocks.end(),
514 __gnu_cxx::__aux_balloc::_Functor_Ref<_FFF>(__fff));
515 assert(__bpi == _S_mem_blocks.end());
517 #endif
520 //Complexity: O(1), but internally depends upon the complexity of
521 //the function _BA_free_list_store::_S_get_free_list. The part
522 //where the bitmap headers are written is of worst case complexity:
523 //O(X),where X is the number of blocks of size sizeof(value_type)
524 //within the newly acquired block. Having a tight bound.
525 static void _S_refill_pool() throw (std::bad_alloc)
527 #if defined CHECK_FOR_ERRORS
528 _S_check_for_free_blocks();
529 #endif
531 const unsigned int __num_bit_maps = _S_block_size / _Bits_Per_Block;
532 const unsigned int __size_to_allocate = sizeof(unsigned int) +
533 _S_block_size * sizeof(value_type) + __num_bit_maps*sizeof(unsigned int);
535 unsigned int *__temp =
536 reinterpret_cast<unsigned int*>(_BA_free_list_store::_S_get_free_list(__size_to_allocate));
537 *__temp = 0;
538 ++__temp;
540 //The Header information goes at the Beginning of the Block.
541 _Block_pair __bp = std::make_pair(reinterpret_cast<pointer>(__temp + __num_bit_maps),
542 reinterpret_cast<pointer>(__temp + __num_bit_maps)
543 + _S_block_size - 1);
545 //Fill the Vector with this information.
546 _S_mem_blocks.push_back(__bp);
548 unsigned int __bit_mask = 0; //0 Indicates all Allocated.
549 __bit_mask = ~__bit_mask; //1 Indicates all Free.
551 for (unsigned int __i = 0; __i < __num_bit_maps; ++__i)
552 __temp[__i] = __bit_mask;
554 //On some implementations, operator new might throw bad_alloc, or
555 //malloc might fail if the size passed is too large, therefore, we
556 //limit the size passed to malloc or operator new.
557 _S_block_size *= 2;
560 static _BPVector _S_mem_blocks;
561 static unsigned int _S_block_size;
562 static __gnu_cxx::__aux_balloc::_Bit_map_counter<pointer, _BPVec_allocator_type> _S_last_request;
563 static typename _BPVector::size_type _S_last_dealloc_index;
564 #if defined __GTHREADS
565 static _Mutex _S_mut;
566 #endif
568 public:
569 bitmap_allocator() throw()
572 bitmap_allocator(const bitmap_allocator&) { }
574 template <typename _Tp1> bitmap_allocator(const bitmap_allocator<_Tp1>&) throw()
577 ~bitmap_allocator() throw()
580 //Complexity: Worst case complexity is O(N), but that is hardly ever
581 //hit. if and when this particular case is encountered, the next few
582 //cases are guaranteed to have a worst case complexity of O(1)!
583 //That's why this function performs very well on the average. you
584 //can consider this function to be having a complexity refrred to
585 //commonly as: Amortized Constant time.
586 static pointer _S_allocate_single_object()
588 #if defined __GTHREADS
589 _Lock _bit_lock(*&_S_mut);
590 #endif
591 //The algorithm is something like this: The last_requst variable
592 //points to the last accessed Bit Map. When such a condition
593 //occurs, we try to find a free block in the current bitmap, or
594 //succeeding bitmaps until the last bitmap is reached. If no free
595 //block turns up, we resort to First Fit method. But, again, the
596 //First Fit is used only upto the point where we started the
597 //previous linear search.
599 while (_S_last_request._M_finished() == false && (*(_S_last_request._M_get()) == 0))
601 _S_last_request.operator++();
604 if (_S_last_request._M_finished())
606 //Fall Back to First Fit algorithm.
607 typedef typename __gnu_cxx::__aux_balloc::_Ffit_finder<pointer, _BPVec_allocator_type> _FFF;
608 _FFF __fff;
609 typedef typename _BPVector::iterator _BPiter;
610 _BPiter __bpi = std::find_if(_S_mem_blocks.begin(), _S_mem_blocks.end(),
611 __gnu_cxx::__aux_balloc::_Functor_Ref<_FFF>(__fff));
613 if (__bpi != _S_mem_blocks.end())
615 //Search was successful. Ok, now mark the first bit from
616 //the right as 0, meaning Allocated. This bit is obtained
617 //by calling _M_get() on __fff.
618 unsigned int __nz_bit = _Bit_scan_forward(*__fff._M_get());
619 _S_bit_allocate(__fff._M_get(), __nz_bit);
621 _S_last_request._M_reset(__bpi - _S_mem_blocks.begin());
623 //Now, get the address of the bit we marked as allocated.
624 pointer __ret_val = __bpi->first + __fff._M_offset() + __nz_bit;
625 unsigned int *__puse_count = reinterpret_cast<unsigned int*>(__bpi->first) -
626 (__gnu_cxx::__aux_balloc::__balloc_num_bit_maps(*__bpi) + 1);
627 ++(*__puse_count);
628 return __ret_val;
630 else
632 //Search was unsuccessful. We Add more memory to the pool
633 //by calling _S_refill_pool().
634 _S_refill_pool();
636 //_M_Reset the _S_last_request structure to the first free
637 //block's bit map.
638 _S_last_request._M_reset(_S_mem_blocks.size() - 1);
640 //Now, mark that bit as allocated.
643 //_S_last_request holds a pointer to a valid bit map, that points
644 //to a free block in memory.
645 unsigned int __nz_bit = _Bit_scan_forward(*_S_last_request._M_get());
646 _S_bit_allocate(_S_last_request._M_get(), __nz_bit);
648 pointer __ret_val = _S_last_request.base() + _S_last_request._M_offset() + __nz_bit;
650 unsigned int *__puse_count = reinterpret_cast<unsigned int*>
651 (_S_mem_blocks[_S_last_request._M_where()].first) -
652 (__gnu_cxx::__aux_balloc::__balloc_num_bit_maps(_S_mem_blocks[_S_last_request._M_where()]) + 1);
653 ++(*__puse_count);
654 return __ret_val;
657 //Complexity: O(1), but internally the complexity depends upon the
658 //complexity of the function(s) _S_allocate_single_object and
659 //_S_memory_get.
660 pointer allocate(size_type __n)
662 if (__n == 1)
663 return _S_allocate_single_object();
664 else
665 return reinterpret_cast<pointer>(_S_memory_get(__n * sizeof(value_type)));
668 //Complexity: Worst case complexity is O(N) where N is the number of
669 //blocks of size sizeof(value_type) within the free lists that the
670 //allocator holds. However, this worst case is hit only when the
671 //user supplies a bogus argument to hint. If the hint argument is
672 //sensible, then the complexity drops to O(lg(N)), and in extreme
673 //cases, even drops to as low as O(1). So, if the user supplied
674 //argument is good, then this function performs very well.
675 pointer allocate(size_type __n, typename bitmap_allocator<void>::const_pointer)
677 return allocate(__n);
680 void deallocate(pointer __p, size_type __n) throw()
682 if (__n == 1)
683 _S_deallocate_single_object(__p);
684 else
685 _S_memory_put(__p);
688 //Complexity: O(lg(N)), but the worst case is hit quite often! I
689 //need to do something about this. I'll be able to work on it, only
690 //when I have some solid figures from a few real apps.
691 static void _S_deallocate_single_object(pointer __p) throw()
693 #if defined __GTHREADS
694 _Lock _bit_lock(*&_S_mut);
695 #endif
696 typedef typename _BPVector::iterator iterator;
697 typedef typename _BPVector::difference_type diff_type;
699 diff_type __diff;
700 int __displacement;
702 assert(_S_last_dealloc_index >= 0);
704 if (__gnu_cxx::__aux_balloc::_Inclusive_between<pointer>(__p)(_S_mem_blocks[_S_last_dealloc_index]))
706 assert(_S_last_dealloc_index <= _S_mem_blocks.size() - 1);
708 //Initial Assumption was correct!
709 __diff = _S_last_dealloc_index;
710 __displacement = __p - _S_mem_blocks[__diff].first;
712 else
714 iterator _iter = (std::find_if(_S_mem_blocks.begin(), _S_mem_blocks.end(),
715 __gnu_cxx::__aux_balloc::_Inclusive_between<pointer>(__p)));
716 assert(_iter != _S_mem_blocks.end());
718 __diff = _iter - _S_mem_blocks.begin();
719 __displacement = __p - _S_mem_blocks[__diff].first;
720 _S_last_dealloc_index = __diff;
723 //Get the position of the iterator that has been found.
724 const unsigned int __rotate = __displacement % _Bits_Per_Block;
725 unsigned int *__bit_mapC = reinterpret_cast<unsigned int*>(_S_mem_blocks[__diff].first) - 1;
726 __bit_mapC -= (__displacement / _Bits_Per_Block);
728 _S_bit_free(__bit_mapC, __rotate);
729 unsigned int *__puse_count = reinterpret_cast<unsigned int*>
730 (_S_mem_blocks[__diff].first) -
731 (__gnu_cxx::__aux_balloc::__balloc_num_bit_maps(_S_mem_blocks[__diff]) + 1);
733 assert(*__puse_count != 0);
735 --(*__puse_count);
737 if (!*__puse_count)
739 _S_block_size /= 2;
741 //We may safely remove this block.
742 _Block_pair __bp = _S_mem_blocks[__diff];
743 _S_insert_free_list(__puse_count);
744 _S_mem_blocks.erase(_S_mem_blocks.begin() + __diff);
746 //We reset the _S_last_request variable to reflect the erased
747 //block. We do this to pretect future requests after the last
748 //block has been removed from a particular memory Chunk,
749 //which in turn has been returned to the free list, and
750 //hence had been erased from the vector, so the size of the
751 //vector gets reduced by 1.
752 if ((diff_type)_S_last_request._M_where() >= __diff--)
754 _S_last_request._M_reset(__diff);
755 // assert(__diff >= 0);
758 //If the Index into the vector of the region of memory that
759 //might hold the next address that will be passed to
760 //deallocated may have been invalidated due to the above
761 //erase procedure being called on the vector, hence we try
762 //to restore this invariant too.
763 if (_S_last_dealloc_index >= _S_mem_blocks.size())
765 _S_last_dealloc_index =(__diff != -1 ? __diff : 0);
766 assert(_S_last_dealloc_index >= 0);
771 pointer address(reference r) const { return &r; }
772 const_pointer address(const_reference r) const { return &r; }
774 size_type max_size(void) const throw() { return (size_type()-1)/sizeof(value_type); }
776 void construct (pointer p, const_reference _data)
778 new (p) value_type (_data);
781 void destroy (pointer p)
783 p->~value_type();
788 template <typename _Tp>
789 typename bitmap_allocator<_Tp>::_BPVector bitmap_allocator<_Tp>::_S_mem_blocks;
791 template <typename _Tp>
792 unsigned int bitmap_allocator<_Tp>::_S_block_size = bitmap_allocator<_Tp>::_Bits_Per_Block;
794 template <typename _Tp>
795 typename __gnu_cxx::bitmap_allocator<_Tp>::_BPVector::size_type
796 bitmap_allocator<_Tp>::_S_last_dealloc_index = 0;
798 template <typename _Tp>
799 __gnu_cxx::__aux_balloc::_Bit_map_counter
800 <typename bitmap_allocator<_Tp>::pointer, typename bitmap_allocator<_Tp>::_BPVec_allocator_type>
801 bitmap_allocator<_Tp>::_S_last_request(_S_mem_blocks);
803 #if defined __GTHREADS
804 template <typename _Tp>
805 __gnu_cxx::_Mutex
806 bitmap_allocator<_Tp>::_S_mut;
807 #endif
809 template <typename _Tp1, typename _Tp2>
810 bool operator== (const bitmap_allocator<_Tp1>&, const bitmap_allocator<_Tp2>&) throw()
812 return true;
815 template <typename _Tp1, typename _Tp2>
816 bool operator!= (const bitmap_allocator<_Tp1>&, const bitmap_allocator<_Tp2>&) throw()
818 return false;
823 #endif //_BITMAP_ALLOCATOR_H