streambuf.tcc (basic_streambuf::sputbackc): Prefix "this->" to call to pbackfail.
[official-gcc.git] / libstdc++-v3 / include / bits / stl_alloc.h
blobbab39fb7150cd208dcfc4721ad06768abbfc0aca
1 // Allocators -*- C++ -*-
3 // Copyright (C) 2001, 2002 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
9 // any later version.
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
19 // USA.
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
31 * Copyright (c) 1996-1997
32 * Silicon Graphics Computer Systems, Inc.
34 * Permission to use, copy, modify, distribute and sell this software
35 * and its documentation for any purpose is hereby granted without fee,
36 * provided that the above copyright notice appear in all copies and
37 * that both that copyright notice and this permission notice appear
38 * in supporting documentation. Silicon Graphics makes no
39 * representations about the suitability of this software for any
40 * purpose. It is provided "as is" without express or implied warranty.
43 /** @file stl_alloc.h
44 * This is an internal header file, included by other library headers.
45 * You should not attempt to use it directly.
48 #ifndef __GLIBCPP_INTERNAL_ALLOC_H
49 #define __GLIBCPP_INTERNAL_ALLOC_H
51 /**
52 * @defgroup Allocators Memory Allocators
53 * @if maint
54 * stl_alloc.h implements some node allocators. These are NOT the same as
55 * allocators in the C++ standard, nor in the original H-P STL. They do not
56 * encapsulate different pointer types; we assume that there is only one
57 * pointer type. The C++ standard allocators are intended to allocate
58 * individual objects, not pools or arenas.
60 * In this file allocators are of two different styles: "standard" and
61 * "SGI" (quotes included). "Standard" allocators conform to 20.4. "SGI"
62 * allocators differ in AT LEAST the following ways (add to this list as you
63 * discover them):
65 * - "Standard" allocate() takes two parameters (n_count,hint=0) but "SGI"
66 * allocate() takes one paramter (n_size).
67 * - Likewise, "standard" deallocate()'s argument is a count, but in "SGI"
68 * is a byte size.
69 * - max_size(), construct(), and destroy() are missing in "SGI" allocators.
70 * - reallocate(p,oldsz,newsz) is added in "SGI", and behaves as
71 * if p=realloc(p,newsz).
73 * "SGI" allocators may be wrapped in __allocator to convert the interface
74 * into a "standard" one.
75 * @endif
77 * @note The @c reallocate member functions have been deprecated for 3.2
78 * and will be removed in 3.4. You must define @c _GLIBCPP_DEPRECATED
79 * to make this visible in 3.2; see c++config.h.
81 * The canonical description of these classes is in docs/html/ext/howto.html
82 * or online at http://gcc.gnu.org/onlinedocs/libstdc++/ext/howto.html#3
85 #include <cstddef>
86 #include <cstdlib>
87 #include <cstring>
88 #include <cassert>
89 #include <bits/functexcept.h> // For __throw_bad_alloc
90 #include <bits/stl_threads.h>
92 #include <bits/atomicity.h>
94 namespace std
96 /**
97 * @if maint
98 * A new-based allocator, as required by the standard. Allocation and
99 * deallocation forward to global new and delete. "SGI" style, minus
100 * reallocate().
101 * @endif
102 * (See @link Allocators allocators info @endlink for more.)
104 class __new_alloc
106 public:
107 static void*
108 allocate(size_t __n)
109 { return ::operator new(__n); }
111 static void
112 deallocate(void* __p, size_t)
113 { ::operator delete(__p); }
118 * @if maint
119 * A malloc-based allocator. Typically slower than the
120 * __default_alloc_template (below). Typically thread-safe and more
121 * storage efficient. The template argument is unused and is only present
122 * to permit multiple instantiations (but see __default_alloc_template
123 * for caveats). "SGI" style, plus __set_malloc_handler for OOM conditions.
124 * @endif
125 * (See @link Allocators allocators info @endlink for more.)
127 template<int __inst>
128 class __malloc_alloc_template
130 private:
131 static void* _S_oom_malloc(size_t);
132 #ifdef _GLIBCPP_DEPRECATED
133 static void* _S_oom_realloc(void*, size_t);
134 #endif
135 static void (* __malloc_alloc_oom_handler)();
137 public:
138 static void*
139 allocate(size_t __n)
141 void* __result = malloc(__n);
142 if (__builtin_expect(__result == 0, 0))
143 __result = _S_oom_malloc(__n);
144 return __result;
147 static void
148 deallocate(void* __p, size_t /* __n */)
149 { free(__p); }
151 #ifdef _GLIBCPP_DEPRECATED
152 static void*
153 reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
155 void* __result = realloc(__p, __new_sz);
156 if (__builtin_expect(__result == 0, 0))
157 __result = _S_oom_realloc(__p, __new_sz);
158 return __result;
160 #endif
162 static void (* __set_malloc_handler(void (*__f)()))()
164 void (* __old)() = __malloc_alloc_oom_handler;
165 __malloc_alloc_oom_handler = __f;
166 return __old;
170 // malloc_alloc out-of-memory handling
171 template<int __inst>
172 void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
174 template<int __inst>
175 void*
176 __malloc_alloc_template<__inst>::
177 _S_oom_malloc(size_t __n)
179 void (* __my_malloc_handler)();
180 void* __result;
182 for (;;)
184 __my_malloc_handler = __malloc_alloc_oom_handler;
185 if (__builtin_expect(__my_malloc_handler == 0, 0))
186 __throw_bad_alloc();
187 (*__my_malloc_handler)();
188 __result = malloc(__n);
189 if (__result)
190 return __result;
194 #ifdef _GLIBCPP_DEPRECATED
195 template<int __inst>
196 void*
197 __malloc_alloc_template<__inst>::
198 _S_oom_realloc(void* __p, size_t __n)
200 void (* __my_malloc_handler)();
201 void* __result;
203 for (;;)
205 __my_malloc_handler = __malloc_alloc_oom_handler;
206 if (__builtin_expect(__my_malloc_handler == 0, 0))
207 __throw_bad_alloc();
208 (*__my_malloc_handler)();
209 __result = realloc(__p, __n);
210 if (__result)
211 return __result;
214 #endif
216 // Should not be referenced within the library anymore.
217 typedef __new_alloc __mem_interface;
220 * @if maint
221 * This is used primarily (only?) in _Alloc_traits and other places to
222 * help provide the _Alloc_type typedef. All it does is forward the
223 * requests after some minimal checking.
225 * This is neither "standard"-conforming nor "SGI". The _Alloc parameter
226 * must be "SGI" style.
227 * @endif
228 * (See @link Allocators allocators info @endlink for more.)
230 template<typename _Tp, typename _Alloc>
231 class __simple_alloc
233 public:
234 static _Tp*
235 allocate(size_t __n)
237 _Tp* __ret = 0;
238 if (__n)
239 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
240 return __ret;
243 static _Tp*
244 allocate()
245 { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
247 static void
248 deallocate(_Tp* __p, size_t __n)
249 { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
251 static void
252 deallocate(_Tp* __p)
253 { _Alloc::deallocate(__p, sizeof (_Tp)); }
258 * @if maint
259 * An adaptor for an underlying allocator (_Alloc) to check the size
260 * arguments for debugging. Errors are reported using assert; these
261 * checks can be disabled via NDEBUG, but the space penalty is still
262 * paid, therefore it is far better to just use the underlying allocator
263 * by itelf when no checking is desired.
265 * "There is some evidence that this can confuse Purify." - SGI comment
267 * This adaptor is "SGI" style. The _Alloc parameter must also be "SGI".
268 * @endif
269 * (See @link Allocators allocators info @endlink for more.)
271 template<typename _Alloc>
272 class __debug_alloc
274 private:
275 // Size of space used to store size. Note that this must be
276 // large enough to preserve alignment.
277 enum {_S_extra = 8};
279 public:
280 static void*
281 allocate(size_t __n)
283 char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
284 *(size_t*)__result = __n;
285 return __result + (int) _S_extra;
288 static void
289 deallocate(void* __p, size_t __n)
291 char* __real_p = (char*)__p - (int) _S_extra;
292 assert(*(size_t*)__real_p == __n);
293 _Alloc::deallocate(__real_p, __n + (int) _S_extra);
296 #ifdef _GLIBCPP_DEPRECATED
297 static void*
298 reallocate(void* __p, size_t __old_sz, size_t __new_sz)
300 char* __real_p = (char*)__p - (int) _S_extra;
301 assert(*(size_t*)__real_p == __old_sz);
302 char* __result = (char*) _Alloc::reallocate(__real_p,
303 __old_sz + (int) _S_extra,
304 __new_sz + (int) _S_extra);
305 *(size_t*)__result = __new_sz;
306 return __result + (int) _S_extra;
308 #endif
313 * @if maint
314 * Default node allocator. "SGI" style. Uses various allocators to
315 * fulfill underlying requests (and makes as few requests as possible
316 * when in default high-speed pool mode).
318 * Important implementation properties:
319 * 0. If globally mandated, then allocate objects from __new_alloc
320 * 1. If the clients request an object of size > _MAX_BYTES, the resulting
321 * object will be obtained directly from __new_alloc
322 * 2. In all other cases, we allocate an object of size exactly
323 * _S_round_up(requested_size). Thus the client has enough size
324 * information that we can return the object to the proper free list
325 * without permanently losing part of the object.
327 * The first template parameter specifies whether more than one thread may
328 * use this allocator. It is safe to allocate an object from one instance
329 * of a default_alloc and deallocate it with another one. This effectively
330 * transfers its ownership to the second one. This may have undesirable
331 * effects on reference locality.
333 * The second parameter is unused and serves only to allow the creation of
334 * multiple default_alloc instances. Note that containers built on different
335 * allocator instances have different types, limiting the utility of this
336 * approach. If you do not wish to share the free lists with the main
337 * default_alloc instance, instantiate this with a non-zero __inst.
339 * @endif
340 * (See @link Allocators allocators info @endlink for more.)
342 template<bool __threads, int __inst>
343 class __default_alloc_template
345 private:
346 enum {_ALIGN = 8};
347 enum {_MAX_BYTES = 128};
348 enum {_NFREELISTS = _MAX_BYTES / _ALIGN};
350 union _Obj
352 union _Obj* _M_free_list_link;
353 char _M_client_data[1]; // The client sees this.
356 static _Obj* volatile _S_free_list[_NFREELISTS];
358 // Chunk allocation state.
359 static char* _S_start_free;
360 static char* _S_end_free;
361 static size_t _S_heap_size;
363 static _STL_mutex_lock _S_node_allocator_lock;
365 static size_t
366 _S_round_up(size_t __bytes)
367 { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
369 static size_t
370 _S_freelist_index(size_t __bytes)
371 { return (((__bytes) + (size_t)_ALIGN - 1)/(size_t)_ALIGN - 1); }
373 // Returns an object of size __n, and optionally adds to size __n
374 // free list.
375 static void*
376 _S_refill(size_t __n);
378 // Allocates a chunk for nobjs of size size. nobjs may be reduced
379 // if it is inconvenient to allocate the requested number.
380 static char*
381 _S_chunk_alloc(size_t __size, int& __nobjs);
383 // It would be nice to use _STL_auto_lock here. But we need a
384 // test whether threads are in use.
385 struct _Lock
387 _Lock() { if (__threads) _S_node_allocator_lock._M_acquire_lock(); }
388 ~_Lock() { if (__threads) _S_node_allocator_lock._M_release_lock(); }
389 } __attribute__ ((__unused__));
390 friend struct _Lock;
392 static _Atomic_word _S_force_new;
394 public:
395 // __n must be > 0
396 static void*
397 allocate(size_t __n)
399 void* __ret = 0;
401 // If there is a race through here, assume answer from getenv
402 // will resolve in same direction. Inspired by techniques
403 // to efficiently support threading found in basic_string.h.
404 if (_S_force_new == 0)
406 if (getenv("GLIBCPP_FORCE_NEW"))
407 __atomic_add(&_S_force_new, 1);
408 else
409 __atomic_add(&_S_force_new, -1);
410 // Trust but verify...
411 assert(_S_force_new != 0);
414 if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0))
415 __ret = __new_alloc::allocate(__n);
416 else
418 _Obj* volatile* __my_free_list = _S_free_list
419 + _S_freelist_index(__n);
420 // Acquire the lock here with a constructor call. This
421 // ensures that it is released in exit or during stack
422 // unwinding.
423 _Lock __lock_instance;
424 _Obj* __restrict__ __result = *__my_free_list;
425 if (__builtin_expect(__result == 0, 0))
426 __ret = _S_refill(_S_round_up(__n));
427 else
429 *__my_free_list = __result -> _M_free_list_link;
430 __ret = __result;
432 if (__builtin_expect(__ret == 0, 0))
433 __throw_bad_alloc();
435 return __ret;
438 // __p may not be 0
439 static void
440 deallocate(void* __p, size_t __n)
442 if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0))
443 __new_alloc::deallocate(__p, __n);
444 else
446 _Obj* volatile* __my_free_list = _S_free_list
447 + _S_freelist_index(__n);
448 _Obj* __q = (_Obj*)__p;
450 // Acquire the lock here with a constructor call. This
451 // ensures that it is released in exit or during stack
452 // unwinding.
453 _Lock __lock_instance;
454 __q -> _M_free_list_link = *__my_free_list;
455 *__my_free_list = __q;
459 #ifdef _GLIBCPP_DEPRECATED
460 static void*
461 reallocate(void* __p, size_t __old_sz, size_t __new_sz);
462 #endif
465 template<bool __threads, int __inst> _Atomic_word
466 __default_alloc_template<__threads, __inst>::_S_force_new = 0;
468 template<bool __threads, int __inst>
469 inline bool
470 operator==(const __default_alloc_template<__threads,__inst>&,
471 const __default_alloc_template<__threads,__inst>&)
472 { return true; }
474 template<bool __threads, int __inst>
475 inline bool
476 operator!=(const __default_alloc_template<__threads,__inst>&,
477 const __default_alloc_template<__threads,__inst>&)
478 { return false; }
481 // We allocate memory in large chunks in order to avoid fragmenting the
482 // heap too much. We assume that __size is properly aligned. We hold
483 // the allocation lock.
484 template<bool __threads, int __inst>
485 char*
486 __default_alloc_template<__threads, __inst>::
487 _S_chunk_alloc(size_t __size, int& __nobjs)
489 char* __result;
490 size_t __total_bytes = __size * __nobjs;
491 size_t __bytes_left = _S_end_free - _S_start_free;
493 if (__bytes_left >= __total_bytes)
495 __result = _S_start_free;
496 _S_start_free += __total_bytes;
497 return __result ;
499 else if (__bytes_left >= __size)
501 __nobjs = (int)(__bytes_left/__size);
502 __total_bytes = __size * __nobjs;
503 __result = _S_start_free;
504 _S_start_free += __total_bytes;
505 return __result;
507 else
509 size_t __bytes_to_get =
510 2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
511 // Try to make use of the left-over piece.
512 if (__bytes_left > 0)
514 _Obj* volatile* __my_free_list =
515 _S_free_list + _S_freelist_index(__bytes_left);
517 ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
518 *__my_free_list = (_Obj*)_S_start_free;
520 _S_start_free = (char*) __new_alloc::allocate(__bytes_to_get);
521 if (_S_start_free == 0)
523 size_t __i;
524 _Obj* volatile* __my_free_list;
525 _Obj* __p;
526 // Try to make do with what we have. That can't hurt. We
527 // do not try smaller requests, since that tends to result
528 // in disaster on multi-process machines.
529 __i = __size;
530 for (; __i <= (size_t) _MAX_BYTES; __i += (size_t) _ALIGN)
532 __my_free_list = _S_free_list + _S_freelist_index(__i);
533 __p = *__my_free_list;
534 if (__p != 0)
536 *__my_free_list = __p -> _M_free_list_link;
537 _S_start_free = (char*)__p;
538 _S_end_free = _S_start_free + __i;
539 return _S_chunk_alloc(__size, __nobjs);
540 // Any leftover piece will eventually make it to the
541 // right free list.
544 _S_end_free = 0; // In case of exception.
545 _S_start_free = (char*)__new_alloc::allocate(__bytes_to_get);
546 // This should either throw an exception or remedy the situation.
547 // Thus we assume it succeeded.
549 _S_heap_size += __bytes_to_get;
550 _S_end_free = _S_start_free + __bytes_to_get;
551 return _S_chunk_alloc(__size, __nobjs);
556 // Returns an object of size __n, and optionally adds to "size
557 // __n"'s free list. We assume that __n is properly aligned. We
558 // hold the allocation lock.
559 template<bool __threads, int __inst>
560 void*
561 __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
563 int __nobjs = 20;
564 char* __chunk = _S_chunk_alloc(__n, __nobjs);
565 _Obj* volatile* __my_free_list;
566 _Obj* __result;
567 _Obj* __current_obj;
568 _Obj* __next_obj;
569 int __i;
571 if (1 == __nobjs)
572 return __chunk;
573 __my_free_list = _S_free_list + _S_freelist_index(__n);
575 // Build free list in chunk.
576 __result = (_Obj*)__chunk;
577 *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
578 for (__i = 1; ; __i++)
580 __current_obj = __next_obj;
581 __next_obj = (_Obj*)((char*)__next_obj + __n);
582 if (__nobjs - 1 == __i)
584 __current_obj -> _M_free_list_link = 0;
585 break;
587 else
588 __current_obj -> _M_free_list_link = __next_obj;
590 return __result;
594 #ifdef _GLIBCPP_DEPRECATED
595 template<bool threads, int inst>
596 void*
597 __default_alloc_template<threads, inst>::
598 reallocate(void* __p, size_t __old_sz, size_t __new_sz)
600 void* __result;
601 size_t __copy_sz;
603 if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES)
604 return(realloc(__p, __new_sz));
605 if (_S_round_up(__old_sz) == _S_round_up(__new_sz))
606 return(__p);
607 __result = allocate(__new_sz);
608 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
609 memcpy(__result, __p, __copy_sz);
610 deallocate(__p, __old_sz);
611 return __result;
613 #endif
615 template<bool __threads, int __inst>
616 _STL_mutex_lock
617 __default_alloc_template<__threads,__inst>::_S_node_allocator_lock
618 __STL_MUTEX_INITIALIZER;
620 template<bool __threads, int __inst>
621 char* __default_alloc_template<__threads,__inst>::_S_start_free = 0;
623 template<bool __threads, int __inst>
624 char* __default_alloc_template<__threads,__inst>::_S_end_free = 0;
626 template<bool __threads, int __inst>
627 size_t __default_alloc_template<__threads,__inst>::_S_heap_size = 0;
629 template<bool __threads, int __inst>
630 typename __default_alloc_template<__threads,__inst>::_Obj* volatile
631 __default_alloc_template<__threads,__inst>::_S_free_list[_NFREELISTS];
633 typedef __default_alloc_template<true,0> __alloc;
634 typedef __default_alloc_template<false,0> __single_client_alloc;
638 * @brief The "standard" allocator, as per [20.4].
640 * The private _Alloc is "SGI" style. (See comments at the top
641 * of stl_alloc.h.)
643 * The underlying allocator behaves as follows.
644 * - __default_alloc_template is used via two typedefs
645 * - "__single_client_alloc" typedef does no locking for threads
646 * - "__alloc" typedef is threadsafe via the locks
647 * - __new_alloc is used for memory requests
649 * (See @link Allocators allocators info @endlink for more.)
651 template<typename _Tp>
652 class allocator
654 typedef __alloc _Alloc; // The underlying allocator.
655 public:
656 typedef size_t size_type;
657 typedef ptrdiff_t difference_type;
658 typedef _Tp* pointer;
659 typedef const _Tp* const_pointer;
660 typedef _Tp& reference;
661 typedef const _Tp& const_reference;
662 typedef _Tp value_type;
664 template<typename _Tp1>
665 struct rebind
666 { typedef allocator<_Tp1> other; };
668 allocator() throw() {}
669 allocator(const allocator&) throw() {}
670 template<typename _Tp1>
671 allocator(const allocator<_Tp1>&) throw() {}
672 ~allocator() throw() {}
674 pointer
675 address(reference __x) const { return &__x; }
677 const_pointer
678 address(const_reference __x) const { return &__x; }
680 // NB: __n is permitted to be 0. The C++ standard says nothing
681 // about what the return value is when __n == 0.
682 _Tp*
683 allocate(size_type __n, const void* = 0)
685 _Tp* __ret = 0;
686 if (__n)
688 if (__n <= this->max_size())
689 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
690 else
691 __throw_bad_alloc();
693 return __ret;
696 // __p is not permitted to be a null pointer.
697 void
698 deallocate(pointer __p, size_type __n)
699 { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
701 size_type
702 max_size() const throw() { return size_t(-1) / sizeof(_Tp); }
704 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
705 void destroy(pointer __p) { __p->~_Tp(); }
708 template<>
709 class allocator<void>
711 public:
712 typedef size_t size_type;
713 typedef ptrdiff_t difference_type;
714 typedef void* pointer;
715 typedef const void* const_pointer;
716 typedef void value_type;
718 template<typename _Tp1>
719 struct rebind
720 { typedef allocator<_Tp1> other; };
724 template<typename _T1, typename _T2>
725 inline bool
726 operator==(const allocator<_T1>&, const allocator<_T2>&)
727 { return true; }
729 template<typename _T1, typename _T2>
730 inline bool
731 operator!=(const allocator<_T1>&, const allocator<_T2>&)
732 { return false; }
736 * @if maint
737 * Allocator adaptor to turn an "SGI" style allocator (e.g.,
738 * __alloc, __malloc_alloc_template) into a "standard" conforming
739 * allocator. Note that this adaptor does *not* assume that all
740 * objects of the underlying alloc class are identical, nor does it
741 * assume that all of the underlying alloc's member functions are
742 * static member functions. Note, also, that __allocator<_Tp,
743 * __alloc> is essentially the same thing as allocator<_Tp>.
744 * @endif
745 * (See @link Allocators allocators info @endlink for more.)
747 template<typename _Tp, typename _Alloc>
748 struct __allocator
750 _Alloc __underlying_alloc;
752 typedef size_t size_type;
753 typedef ptrdiff_t difference_type;
754 typedef _Tp* pointer;
755 typedef const _Tp* const_pointer;
756 typedef _Tp& reference;
757 typedef const _Tp& const_reference;
758 typedef _Tp value_type;
760 template<typename _Tp1>
761 struct rebind
762 { typedef __allocator<_Tp1, _Alloc> other; };
764 __allocator() throw() {}
765 __allocator(const __allocator& __a) throw()
766 : __underlying_alloc(__a.__underlying_alloc) {}
768 template<typename _Tp1>
769 __allocator(const __allocator<_Tp1, _Alloc>& __a) throw()
770 : __underlying_alloc(__a.__underlying_alloc) {}
772 ~__allocator() throw() {}
774 pointer
775 address(reference __x) const { return &__x; }
777 const_pointer
778 address(const_reference __x) const { return &__x; }
780 // NB: __n is permitted to be 0. The C++ standard says nothing
781 // about what the return value is when __n == 0.
782 _Tp*
783 allocate(size_type __n, const void* = 0)
785 _Tp* __ret = 0;
786 if (__n)
787 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
788 return __ret;
791 // __p is not permitted to be a null pointer.
792 void
793 deallocate(pointer __p, size_type __n)
794 { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
796 size_type
797 max_size() const throw() { return size_t(-1) / sizeof(_Tp); }
799 void
800 construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
802 void
803 destroy(pointer __p) { __p->~_Tp(); }
806 template<typename _Alloc>
807 struct __allocator<void, _Alloc>
809 typedef size_t size_type;
810 typedef ptrdiff_t difference_type;
811 typedef void* pointer;
812 typedef const void* const_pointer;
813 typedef void value_type;
815 template<typename _Tp1>
816 struct rebind
817 { typedef __allocator<_Tp1, _Alloc> other; };
820 template<typename _Tp, typename _Alloc>
821 inline bool
822 operator==(const __allocator<_Tp,_Alloc>& __a1,
823 const __allocator<_Tp,_Alloc>& __a2)
824 { return __a1.__underlying_alloc == __a2.__underlying_alloc; }
826 template<typename _Tp, typename _Alloc>
827 inline bool
828 operator!=(const __allocator<_Tp, _Alloc>& __a1,
829 const __allocator<_Tp, _Alloc>& __a2)
830 { return __a1.__underlying_alloc != __a2.__underlying_alloc; }
833 //@{
834 /** Comparison operators for all of the predifined SGI-style allocators.
835 * This ensures that __allocator<malloc_alloc> (for example) will work
836 * correctly. As required, all allocators compare equal.
838 template<int inst>
839 inline bool
840 operator==(const __malloc_alloc_template<inst>&,
841 const __malloc_alloc_template<inst>&)
842 { return true; }
844 template<int __inst>
845 inline bool
846 operator!=(const __malloc_alloc_template<__inst>&,
847 const __malloc_alloc_template<__inst>&)
848 { return false; }
850 template<typename _Alloc>
851 inline bool
852 operator==(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&)
853 { return true; }
855 template<typename _Alloc>
856 inline bool
857 operator!=(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&)
858 { return false; }
859 //@}
863 * @if maint
864 * Another allocator adaptor: _Alloc_traits. This serves two purposes.
865 * First, make it possible to write containers that can use either "SGI"
866 * style allocators or "standard" allocators. Second, provide a mechanism
867 * so that containers can query whether or not the allocator has distinct
868 * instances. If not, the container can avoid wasting a word of memory to
869 * store an empty object. For examples of use, see stl_vector.h, etc, or
870 * any of the other classes derived from this one.
872 * This adaptor uses partial specialization. The general case of
873 * _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
874 * standard-conforming allocator, possibly with non-equal instances and
875 * non-static members. (It still behaves correctly even if _Alloc has
876 * static member and if all instances are equal. Refinements affect
877 * performance, not correctness.)
879 * There are always two members: allocator_type, which is a standard-
880 * conforming allocator type for allocating objects of type _Tp, and
881 * _S_instanceless, a static const member of type bool. If
882 * _S_instanceless is true, this means that there is no difference
883 * between any two instances of type allocator_type. Furthermore, if
884 * _S_instanceless is true, then _Alloc_traits has one additional
885 * member: _Alloc_type. This type encapsulates allocation and
886 * deallocation of objects of type _Tp through a static interface; it
887 * has two member functions, whose signatures are
889 * - static _Tp* allocate(size_t)
890 * - static void deallocate(_Tp*, size_t)
892 * The size_t parameters are "standard" style (see top of stl_alloc.h) in
893 * that they take counts, not sizes.
895 * @endif
896 * (See @link Allocators allocators info @endlink for more.)
898 //@{
899 // The fully general version.
900 template<typename _Tp, typename _Allocator>
901 struct _Alloc_traits
903 static const bool _S_instanceless = false;
904 typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
907 template<typename _Tp, typename _Allocator>
908 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
910 /// The version for the default allocator.
911 template<typename _Tp, typename _Tp1>
912 struct _Alloc_traits<_Tp, allocator<_Tp1> >
914 static const bool _S_instanceless = true;
915 typedef __simple_alloc<_Tp, __alloc> _Alloc_type;
916 typedef allocator<_Tp> allocator_type;
918 //@}
920 //@{
921 /// Versions for the predefined "SGI" style allocators.
922 template<typename _Tp, int __inst>
923 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
925 static const bool _S_instanceless = true;
926 typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
927 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
930 template<typename _Tp, bool __threads, int __inst>
931 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
933 static const bool _S_instanceless = true;
934 typedef __simple_alloc<_Tp, __default_alloc_template<__threads, __inst> >
935 _Alloc_type;
936 typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> >
937 allocator_type;
940 template<typename _Tp, typename _Alloc>
941 struct _Alloc_traits<_Tp, __debug_alloc<_Alloc> >
943 static const bool _S_instanceless = true;
944 typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
945 typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
947 //@}
949 //@{
950 /// Versions for the __allocator adaptor used with the predefined
951 /// "SGI" style allocators.
952 template<typename _Tp, typename _Tp1, int __inst>
953 struct _Alloc_traits<_Tp,
954 __allocator<_Tp1, __malloc_alloc_template<__inst> > >
956 static const bool _S_instanceless = true;
957 typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
958 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
961 template<typename _Tp, typename _Tp1, bool __thr, int __inst>
962 struct _Alloc_traits<_Tp, __allocator<_Tp1, __default_alloc_template<__thr, __inst> > >
964 static const bool _S_instanceless = true;
965 typedef __simple_alloc<_Tp, __default_alloc_template<__thr,__inst> >
966 _Alloc_type;
967 typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> >
968 allocator_type;
971 template<typename _Tp, typename _Tp1, typename _Alloc>
972 struct _Alloc_traits<_Tp, __allocator<_Tp1, __debug_alloc<_Alloc> > >
974 static const bool _S_instanceless = true;
975 typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
976 typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
978 //@}
980 // Inhibit implicit instantiations for required instantiations,
981 // which are defined via explicit instantiations elsewhere.
982 // NB: This syntax is a GNU extension.
983 extern template class allocator<char>;
984 extern template class allocator<wchar_t>;
985 extern template class __default_alloc_template<true,0>;
986 } // namespace std
988 #endif