re PR libstdc++/50257 ([C++0x] unordered_map slow initialization due to huge __prime_...
[official-gcc.git] / libstdc++-v3 / include / bits / hashtable_policy.h
blob08a6dcd39a26f6eb1fbe40340d869cf5baa728ff
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
3 // Copyright (C) 2010, 2011 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
34 namespace std _GLIBCXX_VISIBILITY(default)
36 namespace __detail
38 _GLIBCXX_BEGIN_NAMESPACE_VERSION
40 // Helper function: return distance(first, last) for forward
41 // iterators, or 0 for input iterators.
42 template<class _Iterator>
43 inline typename std::iterator_traits<_Iterator>::difference_type
44 __distance_fw(_Iterator __first, _Iterator __last,
45 std::input_iterator_tag)
46 { return 0; }
48 template<class _Iterator>
49 inline typename std::iterator_traits<_Iterator>::difference_type
50 __distance_fw(_Iterator __first, _Iterator __last,
51 std::forward_iterator_tag)
52 { return std::distance(__first, __last); }
54 template<class _Iterator>
55 inline typename std::iterator_traits<_Iterator>::difference_type
56 __distance_fw(_Iterator __first, _Iterator __last)
58 typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
59 return __distance_fw(__first, __last, _Tag());
62 // Auxiliary types used for all instantiations of _Hashtable: nodes
63 // and iterators.
65 // Nodes, used to wrap elements stored in the hash table. A policy
66 // template parameter of class template _Hashtable controls whether
67 // nodes also store a hash code. In some cases (e.g. strings) this
68 // may be a performance win.
69 template<typename _Value, bool __cache_hash_code>
70 struct _Hash_node;
72 template<typename _Value>
73 struct _Hash_node<_Value, true>
75 _Value _M_v;
76 std::size_t _M_hash_code;
77 _Hash_node* _M_next;
79 template<typename... _Args>
80 _Hash_node(_Args&&... __args)
81 : _M_v(std::forward<_Args>(__args)...),
82 _M_hash_code(), _M_next() { }
85 template<typename _Value>
86 struct _Hash_node<_Value, false>
88 _Value _M_v;
89 _Hash_node* _M_next;
91 template<typename... _Args>
92 _Hash_node(_Args&&... __args)
93 : _M_v(std::forward<_Args>(__args)...),
94 _M_next() { }
97 // Local iterators, used to iterate within a bucket but not between
98 // buckets.
99 template<typename _Value, bool __cache>
100 struct _Node_iterator_base
102 _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
103 : _M_cur(__p) { }
105 void
106 _M_incr()
107 { _M_cur = _M_cur->_M_next; }
109 _Hash_node<_Value, __cache>* _M_cur;
112 template<typename _Value, bool __cache>
113 inline bool
114 operator==(const _Node_iterator_base<_Value, __cache>& __x,
115 const _Node_iterator_base<_Value, __cache>& __y)
116 { return __x._M_cur == __y._M_cur; }
118 template<typename _Value, bool __cache>
119 inline bool
120 operator!=(const _Node_iterator_base<_Value, __cache>& __x,
121 const _Node_iterator_base<_Value, __cache>& __y)
122 { return __x._M_cur != __y._M_cur; }
124 template<typename _Value, bool __constant_iterators, bool __cache>
125 struct _Node_iterator
126 : public _Node_iterator_base<_Value, __cache>
128 typedef _Value value_type;
129 typedef typename std::conditional<__constant_iterators,
130 const _Value*, _Value*>::type
131 pointer;
132 typedef typename std::conditional<__constant_iterators,
133 const _Value&, _Value&>::type
134 reference;
135 typedef std::ptrdiff_t difference_type;
136 typedef std::forward_iterator_tag iterator_category;
138 _Node_iterator()
139 : _Node_iterator_base<_Value, __cache>(0) { }
141 explicit
142 _Node_iterator(_Hash_node<_Value, __cache>* __p)
143 : _Node_iterator_base<_Value, __cache>(__p) { }
145 reference
146 operator*() const
147 { return this->_M_cur->_M_v; }
149 pointer
150 operator->() const
151 { return std::__addressof(this->_M_cur->_M_v); }
153 _Node_iterator&
154 operator++()
156 this->_M_incr();
157 return *this;
160 _Node_iterator
161 operator++(int)
163 _Node_iterator __tmp(*this);
164 this->_M_incr();
165 return __tmp;
169 template<typename _Value, bool __constant_iterators, bool __cache>
170 struct _Node_const_iterator
171 : public _Node_iterator_base<_Value, __cache>
173 typedef _Value value_type;
174 typedef const _Value* pointer;
175 typedef const _Value& reference;
176 typedef std::ptrdiff_t difference_type;
177 typedef std::forward_iterator_tag iterator_category;
179 _Node_const_iterator()
180 : _Node_iterator_base<_Value, __cache>(0) { }
182 explicit
183 _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
184 : _Node_iterator_base<_Value, __cache>(__p) { }
186 _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
187 __cache>& __x)
188 : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
190 reference
191 operator*() const
192 { return this->_M_cur->_M_v; }
194 pointer
195 operator->() const
196 { return std::__addressof(this->_M_cur->_M_v); }
198 _Node_const_iterator&
199 operator++()
201 this->_M_incr();
202 return *this;
205 _Node_const_iterator
206 operator++(int)
208 _Node_const_iterator __tmp(*this);
209 this->_M_incr();
210 return __tmp;
214 template<typename _Value, bool __cache>
215 struct _Hashtable_iterator_base
217 _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
218 _Hash_node<_Value, __cache>** __bucket)
219 : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
221 void
222 _M_incr()
224 _M_cur_node = _M_cur_node->_M_next;
225 if (!_M_cur_node)
226 _M_incr_bucket();
229 void
230 _M_incr_bucket();
232 _Hash_node<_Value, __cache>* _M_cur_node;
233 _Hash_node<_Value, __cache>** _M_cur_bucket;
236 // Global iterators, used for arbitrary iteration within a hash
237 // table. Larger and more expensive than local iterators.
238 template<typename _Value, bool __cache>
239 void
240 _Hashtable_iterator_base<_Value, __cache>::
241 _M_incr_bucket()
243 ++_M_cur_bucket;
245 // This loop requires the bucket array to have a non-null sentinel.
246 while (!*_M_cur_bucket)
247 ++_M_cur_bucket;
248 _M_cur_node = *_M_cur_bucket;
251 template<typename _Value, bool __cache>
252 inline bool
253 operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
254 const _Hashtable_iterator_base<_Value, __cache>& __y)
255 { return __x._M_cur_node == __y._M_cur_node; }
257 template<typename _Value, bool __cache>
258 inline bool
259 operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
260 const _Hashtable_iterator_base<_Value, __cache>& __y)
261 { return __x._M_cur_node != __y._M_cur_node; }
263 template<typename _Value, bool __constant_iterators, bool __cache>
264 struct _Hashtable_iterator
265 : public _Hashtable_iterator_base<_Value, __cache>
267 typedef _Value value_type;
268 typedef typename std::conditional<__constant_iterators,
269 const _Value*, _Value*>::type
270 pointer;
271 typedef typename std::conditional<__constant_iterators,
272 const _Value&, _Value&>::type
273 reference;
274 typedef std::ptrdiff_t difference_type;
275 typedef std::forward_iterator_tag iterator_category;
277 _Hashtable_iterator()
278 : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
280 _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
281 _Hash_node<_Value, __cache>** __b)
282 : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
284 explicit
285 _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
286 : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
288 reference
289 operator*() const
290 { return this->_M_cur_node->_M_v; }
292 pointer
293 operator->() const
294 { return std::__addressof(this->_M_cur_node->_M_v); }
296 _Hashtable_iterator&
297 operator++()
299 this->_M_incr();
300 return *this;
303 _Hashtable_iterator
304 operator++(int)
306 _Hashtable_iterator __tmp(*this);
307 this->_M_incr();
308 return __tmp;
312 template<typename _Value, bool __constant_iterators, bool __cache>
313 struct _Hashtable_const_iterator
314 : public _Hashtable_iterator_base<_Value, __cache>
316 typedef _Value value_type;
317 typedef const _Value* pointer;
318 typedef const _Value& reference;
319 typedef std::ptrdiff_t difference_type;
320 typedef std::forward_iterator_tag iterator_category;
322 _Hashtable_const_iterator()
323 : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
325 _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
326 _Hash_node<_Value, __cache>** __b)
327 : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
329 explicit
330 _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
331 : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
333 _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
334 __constant_iterators, __cache>& __x)
335 : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
336 __x._M_cur_bucket) { }
338 reference
339 operator*() const
340 { return this->_M_cur_node->_M_v; }
342 pointer
343 operator->() const
344 { return std::__addressof(this->_M_cur_node->_M_v); }
346 _Hashtable_const_iterator&
347 operator++()
349 this->_M_incr();
350 return *this;
353 _Hashtable_const_iterator
354 operator++(int)
356 _Hashtable_const_iterator __tmp(*this);
357 this->_M_incr();
358 return __tmp;
363 // Many of class template _Hashtable's template parameters are policy
364 // classes. These are defaults for the policies.
366 // Default range hashing function: use division to fold a large number
367 // into the range [0, N).
368 struct _Mod_range_hashing
370 typedef std::size_t first_argument_type;
371 typedef std::size_t second_argument_type;
372 typedef std::size_t result_type;
374 result_type
375 operator()(first_argument_type __num, second_argument_type __den) const
376 { return __num % __den; }
379 // Default ranged hash function H. In principle it should be a
380 // function object composed from objects of type H1 and H2 such that
381 // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
382 // h1 and h2. So instead we'll just use a tag to tell class template
383 // hashtable to do that composition.
384 struct _Default_ranged_hash { };
386 // Default value for rehash policy. Bucket size is (usually) the
387 // smallest prime that keeps the load factor small enough.
388 struct _Prime_rehash_policy
390 _Prime_rehash_policy(float __z = 1.0)
391 : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
393 float
394 max_load_factor() const noexcept
395 { return _M_max_load_factor; }
397 // Return a bucket size no smaller than n.
398 std::size_t
399 _M_next_bkt(std::size_t __n) const;
401 // Return a bucket count appropriate for n elements
402 std::size_t
403 _M_bkt_for_elements(std::size_t __n) const;
405 // __n_bkt is current bucket count, __n_elt is current element count,
406 // and __n_ins is number of elements to be inserted. Do we need to
407 // increase bucket count? If so, return make_pair(true, n), where n
408 // is the new bucket count. If not, return make_pair(false, 0).
409 std::pair<bool, std::size_t>
410 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
411 std::size_t __n_ins) const;
413 enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
415 float _M_max_load_factor;
416 float _M_growth_factor;
417 mutable std::size_t _M_next_resize;
420 extern const unsigned long __prime_list[];
422 // XXX This is a hack. There's no good reason for any of
423 // _Prime_rehash_policy's member functions to be inline.
425 // Return a prime no smaller than n.
426 inline std::size_t
427 _Prime_rehash_policy::
428 _M_next_bkt(std::size_t __n) const
430 // Optimize lookups involving the first elements of __prime_list.
431 // (useful to speed-up, eg, constructors)
432 static const unsigned char __fastbkt[12]
433 = { 2, 2, 2, 3, 5, 5, 7, 7, 11, 11, 11, 11 };
435 const unsigned long __p
436 = __n <= 11 ? __fastbkt[__n]
437 : *std::lower_bound(__prime_list + 5,
438 __prime_list + _S_n_primes, __n);
439 _M_next_resize =
440 static_cast<std::size_t>(__builtin_floor(__p * _M_max_load_factor));
441 return __p;
444 // Return the smallest prime p such that alpha p >= n, where alpha
445 // is the load factor.
446 inline std::size_t
447 _Prime_rehash_policy::
448 _M_bkt_for_elements(std::size_t __n) const
450 const float __min_bkts = __n / _M_max_load_factor;
451 const unsigned long __p = *std::lower_bound(__prime_list, __prime_list
452 + _S_n_primes, __min_bkts);
453 _M_next_resize =
454 static_cast<std::size_t>(__builtin_floor(__p * _M_max_load_factor));
455 return __p;
458 // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
459 // If p > __n_bkt, return make_pair(true, p); otherwise return
460 // make_pair(false, 0). In principle this isn't very different from
461 // _M_bkt_for_elements.
463 // The only tricky part is that we're caching the element count at
464 // which we need to rehash, so we don't have to do a floating-point
465 // multiply for every insertion.
467 inline std::pair<bool, std::size_t>
468 _Prime_rehash_policy::
469 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
470 std::size_t __n_ins) const
472 if (__n_elt + __n_ins > _M_next_resize)
474 float __min_bkts = ((float(__n_ins) + float(__n_elt))
475 / _M_max_load_factor);
476 if (__min_bkts > __n_bkt)
478 __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
479 const unsigned long __p =
480 *std::lower_bound(__prime_list, __prime_list + _S_n_primes,
481 __min_bkts);
482 _M_next_resize = static_cast<std::size_t>
483 (__builtin_floor(__p * _M_max_load_factor));
484 return std::make_pair(true, __p);
486 else
488 _M_next_resize = static_cast<std::size_t>
489 (__builtin_floor(__n_bkt * _M_max_load_factor));
490 return std::make_pair(false, 0);
493 else
494 return std::make_pair(false, 0);
497 // Base classes for std::_Hashtable. We define these base classes
498 // because in some cases we want to do different things depending
499 // on the value of a policy class. In some cases the policy class
500 // affects which member functions and nested typedefs are defined;
501 // we handle that by specializing base class templates. Several of
502 // the base class templates need to access other members of class
503 // template _Hashtable, so we use the "curiously recurring template
504 // pattern" for them.
506 // class template _Map_base. If the hashtable has a value type of
507 // the form pair<T1, T2> and a key extraction policy that returns the
508 // first part of the pair, the hashtable gets a mapped_type typedef.
509 // If it satisfies those criteria and also has unique keys, then it
510 // also gets an operator[].
511 template<typename _Key, typename _Value, typename _Ex, bool __unique,
512 typename _Hashtable>
513 struct _Map_base { };
515 template<typename _Key, typename _Pair, typename _Hashtable>
516 struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
518 typedef typename _Pair::second_type mapped_type;
521 template<typename _Key, typename _Pair, typename _Hashtable>
522 struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
524 typedef typename _Pair::second_type mapped_type;
526 mapped_type&
527 operator[](const _Key& __k);
529 mapped_type&
530 operator[](_Key&& __k);
532 // _GLIBCXX_RESOLVE_LIB_DEFECTS
533 // DR 761. unordered_map needs an at() member function.
534 mapped_type&
535 at(const _Key& __k);
537 const mapped_type&
538 at(const _Key& __k) const;
541 template<typename _Key, typename _Pair, typename _Hashtable>
542 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
543 true, _Hashtable>::mapped_type&
544 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
545 operator[](const _Key& __k)
547 _Hashtable* __h = static_cast<_Hashtable*>(this);
548 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
549 std::size_t __n = __h->_M_bucket_index(__k, __code,
550 __h->_M_bucket_count);
552 typename _Hashtable::_Node* __p =
553 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
554 if (!__p)
555 return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
556 __n, __code)->second;
557 return (__p->_M_v).second;
560 template<typename _Key, typename _Pair, typename _Hashtable>
561 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
562 true, _Hashtable>::mapped_type&
563 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
564 operator[](_Key&& __k)
566 _Hashtable* __h = static_cast<_Hashtable*>(this);
567 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
568 std::size_t __n = __h->_M_bucket_index(__k, __code,
569 __h->_M_bucket_count);
571 typename _Hashtable::_Node* __p =
572 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
573 if (!__p)
574 return __h->_M_insert_bucket(std::make_pair(std::move(__k),
575 mapped_type()),
576 __n, __code)->second;
577 return (__p->_M_v).second;
580 template<typename _Key, typename _Pair, typename _Hashtable>
581 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
582 true, _Hashtable>::mapped_type&
583 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
584 at(const _Key& __k)
586 _Hashtable* __h = static_cast<_Hashtable*>(this);
587 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
588 std::size_t __n = __h->_M_bucket_index(__k, __code,
589 __h->_M_bucket_count);
591 typename _Hashtable::_Node* __p =
592 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
593 if (!__p)
594 __throw_out_of_range(__N("_Map_base::at"));
595 return (__p->_M_v).second;
598 template<typename _Key, typename _Pair, typename _Hashtable>
599 const typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
600 true, _Hashtable>::mapped_type&
601 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
602 at(const _Key& __k) const
604 const _Hashtable* __h = static_cast<const _Hashtable*>(this);
605 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
606 std::size_t __n = __h->_M_bucket_index(__k, __code,
607 __h->_M_bucket_count);
609 typename _Hashtable::_Node* __p =
610 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
611 if (!__p)
612 __throw_out_of_range(__N("_Map_base::at"));
613 return (__p->_M_v).second;
616 // class template _Rehash_base. Give hashtable the max_load_factor
617 // functions and reserve iff the rehash policy is _Prime_rehash_policy.
618 template<typename _RehashPolicy, typename _Hashtable>
619 struct _Rehash_base { };
621 template<typename _Hashtable>
622 struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
624 float
625 max_load_factor() const noexcept
627 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
628 return __this->__rehash_policy().max_load_factor();
631 void
632 max_load_factor(float __z)
634 _Hashtable* __this = static_cast<_Hashtable*>(this);
635 __this->__rehash_policy(_Prime_rehash_policy(__z));
638 void
639 reserve(std::size_t __n)
641 _Hashtable* __this = static_cast<_Hashtable*>(this);
642 __this->rehash(__builtin_ceil(__n / max_load_factor()));
646 // Class template _Hash_code_base. Encapsulates two policy issues that
647 // aren't quite orthogonal.
648 // (1) the difference between using a ranged hash function and using
649 // the combination of a hash function and a range-hashing function.
650 // In the former case we don't have such things as hash codes, so
651 // we have a dummy type as placeholder.
652 // (2) Whether or not we cache hash codes. Caching hash codes is
653 // meaningless if we have a ranged hash function.
654 // We also put the key extraction and equality comparison function
655 // objects here, for convenience.
657 // Primary template: unused except as a hook for specializations.
658 template<typename _Key, typename _Value,
659 typename _ExtractKey, typename _Equal,
660 typename _H1, typename _H2, typename _Hash,
661 bool __cache_hash_code>
662 struct _Hash_code_base;
664 // Specialization: ranged hash function, no caching hash codes. H1
665 // and H2 are provided but ignored. We define a dummy hash code type.
666 template<typename _Key, typename _Value,
667 typename _ExtractKey, typename _Equal,
668 typename _H1, typename _H2, typename _Hash>
669 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
670 _Hash, false>
672 protected:
673 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
674 const _H1&, const _H2&, const _Hash& __h)
675 : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
677 typedef void* _Hash_code_type;
679 _Hash_code_type
680 _M_hash_code(const _Key& __key) const
681 { return 0; }
683 std::size_t
684 _M_bucket_index(const _Key& __k, _Hash_code_type,
685 std::size_t __n) const
686 { return _M_ranged_hash(__k, __n); }
688 std::size_t
689 _M_bucket_index(const _Hash_node<_Value, false>* __p,
690 std::size_t __n) const
691 { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
693 bool
694 _M_compare(const _Key& __k, _Hash_code_type,
695 _Hash_node<_Value, false>* __n) const
696 { return _M_eq(__k, _M_extract(__n->_M_v)); }
698 void
699 _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
702 void
703 _M_copy_code(_Hash_node<_Value, false>*,
704 const _Hash_node<_Value, false>*) const
707 void
708 _M_swap(_Hash_code_base& __x)
710 std::swap(_M_extract, __x._M_extract);
711 std::swap(_M_eq, __x._M_eq);
712 std::swap(_M_ranged_hash, __x._M_ranged_hash);
715 protected:
716 _ExtractKey _M_extract;
717 _Equal _M_eq;
718 _Hash _M_ranged_hash;
722 // No specialization for ranged hash function while caching hash codes.
723 // That combination is meaningless, and trying to do it is an error.
726 // Specialization: ranged hash function, cache hash codes. This
727 // combination is meaningless, so we provide only a declaration
728 // and no definition.
729 template<typename _Key, typename _Value,
730 typename _ExtractKey, typename _Equal,
731 typename _H1, typename _H2, typename _Hash>
732 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
733 _Hash, true>;
735 // Specialization: hash function and range-hashing function, no
736 // caching of hash codes. H is provided but ignored. Provides
737 // typedef and accessor required by TR1.
738 template<typename _Key, typename _Value,
739 typename _ExtractKey, typename _Equal,
740 typename _H1, typename _H2>
741 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
742 _Default_ranged_hash, false>
744 typedef _H1 hasher;
746 hasher
747 hash_function() const
748 { return _M_h1; }
750 protected:
751 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
752 const _H1& __h1, const _H2& __h2,
753 const _Default_ranged_hash&)
754 : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
756 typedef std::size_t _Hash_code_type;
758 _Hash_code_type
759 _M_hash_code(const _Key& __k) const
760 { return _M_h1(__k); }
762 std::size_t
763 _M_bucket_index(const _Key&, _Hash_code_type __c,
764 std::size_t __n) const
765 { return _M_h2(__c, __n); }
767 std::size_t
768 _M_bucket_index(const _Hash_node<_Value, false>* __p,
769 std::size_t __n) const
770 { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
772 bool
773 _M_compare(const _Key& __k, _Hash_code_type,
774 _Hash_node<_Value, false>* __n) const
775 { return _M_eq(__k, _M_extract(__n->_M_v)); }
777 void
778 _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
781 void
782 _M_copy_code(_Hash_node<_Value, false>*,
783 const _Hash_node<_Value, false>*) const
786 void
787 _M_swap(_Hash_code_base& __x)
789 std::swap(_M_extract, __x._M_extract);
790 std::swap(_M_eq, __x._M_eq);
791 std::swap(_M_h1, __x._M_h1);
792 std::swap(_M_h2, __x._M_h2);
795 protected:
796 _ExtractKey _M_extract;
797 _Equal _M_eq;
798 _H1 _M_h1;
799 _H2 _M_h2;
802 // Specialization: hash function and range-hashing function,
803 // caching hash codes. H is provided but ignored. Provides
804 // typedef and accessor required by TR1.
805 template<typename _Key, typename _Value,
806 typename _ExtractKey, typename _Equal,
807 typename _H1, typename _H2>
808 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
809 _Default_ranged_hash, true>
811 typedef _H1 hasher;
813 hasher
814 hash_function() const
815 { return _M_h1; }
817 protected:
818 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
819 const _H1& __h1, const _H2& __h2,
820 const _Default_ranged_hash&)
821 : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
823 typedef std::size_t _Hash_code_type;
825 _Hash_code_type
826 _M_hash_code(const _Key& __k) const
827 { return _M_h1(__k); }
829 std::size_t
830 _M_bucket_index(const _Key&, _Hash_code_type __c,
831 std::size_t __n) const
832 { return _M_h2(__c, __n); }
834 std::size_t
835 _M_bucket_index(const _Hash_node<_Value, true>* __p,
836 std::size_t __n) const
837 { return _M_h2(__p->_M_hash_code, __n); }
839 bool
840 _M_compare(const _Key& __k, _Hash_code_type __c,
841 _Hash_node<_Value, true>* __n) const
842 { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
844 void
845 _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
846 { __n->_M_hash_code = __c; }
848 void
849 _M_copy_code(_Hash_node<_Value, true>* __to,
850 const _Hash_node<_Value, true>* __from) const
851 { __to->_M_hash_code = __from->_M_hash_code; }
853 void
854 _M_swap(_Hash_code_base& __x)
856 std::swap(_M_extract, __x._M_extract);
857 std::swap(_M_eq, __x._M_eq);
858 std::swap(_M_h1, __x._M_h1);
859 std::swap(_M_h2, __x._M_h2);
862 protected:
863 _ExtractKey _M_extract;
864 _Equal _M_eq;
865 _H1 _M_h1;
866 _H2 _M_h2;
870 // Class template _Equality_base. This is for implementing equality
871 // comparison for unordered containers, per N3068, by John Lakos and
872 // Pablo Halpern. Algorithmically, we follow closely the reference
873 // implementations therein.
874 template<typename _ExtractKey, bool __unique_keys,
875 typename _Hashtable>
876 struct _Equality_base;
878 template<typename _ExtractKey, typename _Hashtable>
879 struct _Equality_base<_ExtractKey, true, _Hashtable>
881 bool _M_equal(const _Hashtable&) const;
884 template<typename _ExtractKey, typename _Hashtable>
885 bool
886 _Equality_base<_ExtractKey, true, _Hashtable>::
887 _M_equal(const _Hashtable& __other) const
889 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
891 if (__this->size() != __other.size())
892 return false;
894 for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
896 const auto __ity = __other.find(_ExtractKey()(*__itx));
897 if (__ity == __other.end() || *__ity != *__itx)
898 return false;
900 return true;
903 template<typename _ExtractKey, typename _Hashtable>
904 struct _Equality_base<_ExtractKey, false, _Hashtable>
906 bool _M_equal(const _Hashtable&) const;
908 private:
909 template<typename _Uiterator>
910 static bool
911 _S_is_permutation(_Uiterator, _Uiterator, _Uiterator);
914 // See std::is_permutation in N3068.
915 template<typename _ExtractKey, typename _Hashtable>
916 template<typename _Uiterator>
917 bool
918 _Equality_base<_ExtractKey, false, _Hashtable>::
919 _S_is_permutation(_Uiterator __first1, _Uiterator __last1,
920 _Uiterator __first2)
922 for (; __first1 != __last1; ++__first1, ++__first2)
923 if (!(*__first1 == *__first2))
924 break;
926 if (__first1 == __last1)
927 return true;
929 _Uiterator __last2 = __first2;
930 std::advance(__last2, std::distance(__first1, __last1));
932 for (_Uiterator __it1 = __first1; __it1 != __last1; ++__it1)
934 _Uiterator __tmp = __first1;
935 while (__tmp != __it1 && !(*__tmp == *__it1))
936 ++__tmp;
938 // We've seen this one before.
939 if (__tmp != __it1)
940 continue;
942 std::ptrdiff_t __n2 = 0;
943 for (__tmp = __first2; __tmp != __last2; ++__tmp)
944 if (*__tmp == *__it1)
945 ++__n2;
947 if (!__n2)
948 return false;
950 std::ptrdiff_t __n1 = 0;
951 for (__tmp = __it1; __tmp != __last1; ++__tmp)
952 if (*__tmp == *__it1)
953 ++__n1;
955 if (__n1 != __n2)
956 return false;
958 return true;
961 template<typename _ExtractKey, typename _Hashtable>
962 bool
963 _Equality_base<_ExtractKey, false, _Hashtable>::
964 _M_equal(const _Hashtable& __other) const
966 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
968 if (__this->size() != __other.size())
969 return false;
971 for (auto __itx = __this->begin(); __itx != __this->end();)
973 const auto __xrange = __this->equal_range(_ExtractKey()(*__itx));
974 const auto __yrange = __other.equal_range(_ExtractKey()(*__itx));
976 if (std::distance(__xrange.first, __xrange.second)
977 != std::distance(__yrange.first, __yrange.second))
978 return false;
980 if (!_S_is_permutation(__xrange.first,
981 __xrange.second,
982 __yrange.first))
983 return false;
985 __itx = __xrange.second;
987 return true;
990 _GLIBCXX_END_NAMESPACE_VERSION
991 } // namespace __detail
992 } // namespace std
994 #endif // _HASHTABLE_POLICY_H