1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
3 // Copyright (C) 2010 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
38 // Helper function: return distance(first, last) for forward
39 // iterators, or 0 for input iterators.
40 template<class _Iterator
>
41 inline typename
std::iterator_traits
<_Iterator
>::difference_type
42 __distance_fw(_Iterator __first
, _Iterator __last
,
43 std::input_iterator_tag
)
46 template<class _Iterator
>
47 inline typename
std::iterator_traits
<_Iterator
>::difference_type
48 __distance_fw(_Iterator __first
, _Iterator __last
,
49 std::forward_iterator_tag
)
50 { return std::distance(__first
, __last
); }
52 template<class _Iterator
>
53 inline typename
std::iterator_traits
<_Iterator
>::difference_type
54 __distance_fw(_Iterator __first
, _Iterator __last
)
56 typedef typename
std::iterator_traits
<_Iterator
>::iterator_category _Tag
;
57 return __distance_fw(__first
, __last
, _Tag());
60 // Auxiliary types used for all instantiations of _Hashtable: nodes
63 // Nodes, used to wrap elements stored in the hash table. A policy
64 // template parameter of class template _Hashtable controls whether
65 // nodes also store a hash code. In some cases (e.g. strings) this
66 // may be a performance win.
67 template<typename _Value
, bool __cache_hash_code
>
70 template<typename _Value
>
71 struct _Hash_node
<_Value
, true>
74 std::size_t _M_hash_code
;
77 template<typename
... _Args
>
78 _Hash_node(_Args
&&... __args
)
79 : _M_v(std::forward
<_Args
>(__args
)...),
80 _M_hash_code(), _M_next() { }
83 template<typename _Value
>
84 struct _Hash_node
<_Value
, false>
89 template<typename
... _Args
>
90 _Hash_node(_Args
&&... __args
)
91 : _M_v(std::forward
<_Args
>(__args
)...),
95 // Local iterators, used to iterate within a bucket but not between
97 template<typename _Value
, bool __cache
>
98 struct _Node_iterator_base
100 _Node_iterator_base(_Hash_node
<_Value
, __cache
>* __p
)
105 { _M_cur
= _M_cur
->_M_next
; }
107 _Hash_node
<_Value
, __cache
>* _M_cur
;
110 template<typename _Value
, bool __cache
>
112 operator==(const _Node_iterator_base
<_Value
, __cache
>& __x
,
113 const _Node_iterator_base
<_Value
, __cache
>& __y
)
114 { return __x
._M_cur
== __y
._M_cur
; }
116 template<typename _Value
, bool __cache
>
118 operator!=(const _Node_iterator_base
<_Value
, __cache
>& __x
,
119 const _Node_iterator_base
<_Value
, __cache
>& __y
)
120 { return __x
._M_cur
!= __y
._M_cur
; }
122 template<typename _Value
, bool __constant_iterators
, bool __cache
>
123 struct _Node_iterator
124 : public _Node_iterator_base
<_Value
, __cache
>
126 typedef _Value value_type
;
127 typedef typename
std::conditional
<__constant_iterators
,
128 const _Value
*, _Value
*>::type
130 typedef typename
std::conditional
<__constant_iterators
,
131 const _Value
&, _Value
&>::type
133 typedef std::ptrdiff_t difference_type
;
134 typedef std::forward_iterator_tag iterator_category
;
137 : _Node_iterator_base
<_Value
, __cache
>(0) { }
140 _Node_iterator(_Hash_node
<_Value
, __cache
>* __p
)
141 : _Node_iterator_base
<_Value
, __cache
>(__p
) { }
145 { return this->_M_cur
->_M_v
; }
149 { return std::__addressof(this->_M_cur
->_M_v
); }
161 _Node_iterator
__tmp(*this);
167 template<typename _Value
, bool __constant_iterators
, bool __cache
>
168 struct _Node_const_iterator
169 : public _Node_iterator_base
<_Value
, __cache
>
171 typedef _Value value_type
;
172 typedef const _Value
* pointer
;
173 typedef const _Value
& reference
;
174 typedef std::ptrdiff_t difference_type
;
175 typedef std::forward_iterator_tag iterator_category
;
177 _Node_const_iterator()
178 : _Node_iterator_base
<_Value
, __cache
>(0) { }
181 _Node_const_iterator(_Hash_node
<_Value
, __cache
>* __p
)
182 : _Node_iterator_base
<_Value
, __cache
>(__p
) { }
184 _Node_const_iterator(const _Node_iterator
<_Value
, __constant_iterators
,
186 : _Node_iterator_base
<_Value
, __cache
>(__x
._M_cur
) { }
190 { return this->_M_cur
->_M_v
; }
194 { return std::__addressof(this->_M_cur
->_M_v
); }
196 _Node_const_iterator
&
206 _Node_const_iterator
__tmp(*this);
212 template<typename _Value
, bool __cache
>
213 struct _Hashtable_iterator_base
215 _Hashtable_iterator_base(_Hash_node
<_Value
, __cache
>* __node
,
216 _Hash_node
<_Value
, __cache
>** __bucket
)
217 : _M_cur_node(__node
), _M_cur_bucket(__bucket
) { }
222 _M_cur_node
= _M_cur_node
->_M_next
;
230 _Hash_node
<_Value
, __cache
>* _M_cur_node
;
231 _Hash_node
<_Value
, __cache
>** _M_cur_bucket
;
234 // Global iterators, used for arbitrary iteration within a hash
235 // table. Larger and more expensive than local iterators.
236 template<typename _Value
, bool __cache
>
238 _Hashtable_iterator_base
<_Value
, __cache
>::
243 // This loop requires the bucket array to have a non-null sentinel.
244 while (!*_M_cur_bucket
)
246 _M_cur_node
= *_M_cur_bucket
;
249 template<typename _Value
, bool __cache
>
251 operator==(const _Hashtable_iterator_base
<_Value
, __cache
>& __x
,
252 const _Hashtable_iterator_base
<_Value
, __cache
>& __y
)
253 { return __x
._M_cur_node
== __y
._M_cur_node
; }
255 template<typename _Value
, bool __cache
>
257 operator!=(const _Hashtable_iterator_base
<_Value
, __cache
>& __x
,
258 const _Hashtable_iterator_base
<_Value
, __cache
>& __y
)
259 { return __x
._M_cur_node
!= __y
._M_cur_node
; }
261 template<typename _Value
, bool __constant_iterators
, bool __cache
>
262 struct _Hashtable_iterator
263 : public _Hashtable_iterator_base
<_Value
, __cache
>
265 typedef _Value value_type
;
266 typedef typename
std::conditional
<__constant_iterators
,
267 const _Value
*, _Value
*>::type
269 typedef typename
std::conditional
<__constant_iterators
,
270 const _Value
&, _Value
&>::type
272 typedef std::ptrdiff_t difference_type
;
273 typedef std::forward_iterator_tag iterator_category
;
275 _Hashtable_iterator()
276 : _Hashtable_iterator_base
<_Value
, __cache
>(0, 0) { }
278 _Hashtable_iterator(_Hash_node
<_Value
, __cache
>* __p
,
279 _Hash_node
<_Value
, __cache
>** __b
)
280 : _Hashtable_iterator_base
<_Value
, __cache
>(__p
, __b
) { }
283 _Hashtable_iterator(_Hash_node
<_Value
, __cache
>** __b
)
284 : _Hashtable_iterator_base
<_Value
, __cache
>(*__b
, __b
) { }
288 { return this->_M_cur_node
->_M_v
; }
292 { return std::__addressof(this->_M_cur_node
->_M_v
); }
304 _Hashtable_iterator
__tmp(*this);
310 template<typename _Value
, bool __constant_iterators
, bool __cache
>
311 struct _Hashtable_const_iterator
312 : public _Hashtable_iterator_base
<_Value
, __cache
>
314 typedef _Value value_type
;
315 typedef const _Value
* pointer
;
316 typedef const _Value
& reference
;
317 typedef std::ptrdiff_t difference_type
;
318 typedef std::forward_iterator_tag iterator_category
;
320 _Hashtable_const_iterator()
321 : _Hashtable_iterator_base
<_Value
, __cache
>(0, 0) { }
323 _Hashtable_const_iterator(_Hash_node
<_Value
, __cache
>* __p
,
324 _Hash_node
<_Value
, __cache
>** __b
)
325 : _Hashtable_iterator_base
<_Value
, __cache
>(__p
, __b
) { }
328 _Hashtable_const_iterator(_Hash_node
<_Value
, __cache
>** __b
)
329 : _Hashtable_iterator_base
<_Value
, __cache
>(*__b
, __b
) { }
331 _Hashtable_const_iterator(const _Hashtable_iterator
<_Value
,
332 __constant_iterators
, __cache
>& __x
)
333 : _Hashtable_iterator_base
<_Value
, __cache
>(__x
._M_cur_node
,
334 __x
._M_cur_bucket
) { }
338 { return this->_M_cur_node
->_M_v
; }
342 { return std::__addressof(this->_M_cur_node
->_M_v
); }
344 _Hashtable_const_iterator
&
351 _Hashtable_const_iterator
354 _Hashtable_const_iterator
__tmp(*this);
361 // Many of class template _Hashtable's template parameters are policy
362 // classes. These are defaults for the policies.
364 // Default range hashing function: use division to fold a large number
365 // into the range [0, N).
366 struct _Mod_range_hashing
368 typedef std::size_t first_argument_type
;
369 typedef std::size_t second_argument_type
;
370 typedef std::size_t result_type
;
373 operator()(first_argument_type __num
, second_argument_type __den
) const
374 { return __num
% __den
; }
377 // Default ranged hash function H. In principle it should be a
378 // function object composed from objects of type H1 and H2 such that
379 // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
380 // h1 and h2. So instead we'll just use a tag to tell class template
381 // hashtable to do that composition.
382 struct _Default_ranged_hash
{ };
384 // Default value for rehash policy. Bucket size is (usually) the
385 // smallest prime that keeps the load factor small enough.
386 struct _Prime_rehash_policy
388 _Prime_rehash_policy(float __z
= 1.0)
389 : _M_max_load_factor(__z
), _M_growth_factor(2.f
), _M_next_resize(0) { }
392 max_load_factor() const
393 { return _M_max_load_factor
; }
395 // Return a bucket size no smaller than n.
397 _M_next_bkt(std::size_t __n
) const;
399 // Return a bucket count appropriate for n elements
401 _M_bkt_for_elements(std::size_t __n
) const;
403 // __n_bkt is current bucket count, __n_elt is current element count,
404 // and __n_ins is number of elements to be inserted. Do we need to
405 // increase bucket count? If so, return make_pair(true, n), where n
406 // is the new bucket count. If not, return make_pair(false, 0).
407 std::pair
<bool, std::size_t>
408 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
409 std::size_t __n_ins
) const;
411 enum { _S_n_primes
= sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
413 float _M_max_load_factor
;
414 float _M_growth_factor
;
415 mutable std::size_t _M_next_resize
;
418 extern const unsigned long __prime_list
[];
420 // XXX This is a hack. There's no good reason for any of
421 // _Prime_rehash_policy's member functions to be inline.
423 // Return a prime no smaller than n.
425 _Prime_rehash_policy::
426 _M_next_bkt(std::size_t __n
) const
428 const unsigned long* __p
= std::lower_bound(__prime_list
, __prime_list
431 static_cast<std::size_t>(__builtin_ceil(*__p
* _M_max_load_factor
));
435 // Return the smallest prime p such that alpha p >= n, where alpha
436 // is the load factor.
438 _Prime_rehash_policy::
439 _M_bkt_for_elements(std::size_t __n
) const
441 const float __min_bkts
= __n
/ _M_max_load_factor
;
442 const unsigned long* __p
= std::lower_bound(__prime_list
, __prime_list
443 + _S_n_primes
, __min_bkts
);
445 static_cast<std::size_t>(__builtin_ceil(*__p
* _M_max_load_factor
));
449 // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
450 // If p > __n_bkt, return make_pair(true, p); otherwise return
451 // make_pair(false, 0). In principle this isn't very different from
452 // _M_bkt_for_elements.
454 // The only tricky part is that we're caching the element count at
455 // which we need to rehash, so we don't have to do a floating-point
456 // multiply for every insertion.
458 inline std::pair
<bool, std::size_t>
459 _Prime_rehash_policy::
460 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
461 std::size_t __n_ins
) const
463 if (__n_elt
+ __n_ins
> _M_next_resize
)
465 float __min_bkts
= ((float(__n_ins
) + float(__n_elt
))
466 / _M_max_load_factor
);
467 if (__min_bkts
> __n_bkt
)
469 __min_bkts
= std::max(__min_bkts
, _M_growth_factor
* __n_bkt
);
470 const unsigned long* __p
=
471 std::lower_bound(__prime_list
, __prime_list
+ _S_n_primes
,
473 _M_next_resize
= static_cast<std::size_t>
474 (__builtin_ceil(*__p
* _M_max_load_factor
));
475 return std::make_pair(true, *__p
);
479 _M_next_resize
= static_cast<std::size_t>
480 (__builtin_ceil(__n_bkt
* _M_max_load_factor
));
481 return std::make_pair(false, 0);
485 return std::make_pair(false, 0);
488 // Base classes for std::_Hashtable. We define these base classes
489 // because in some cases we want to do different things depending
490 // on the value of a policy class. In some cases the policy class
491 // affects which member functions and nested typedefs are defined;
492 // we handle that by specializing base class templates. Several of
493 // the base class templates need to access other members of class
494 // template _Hashtable, so we use the "curiously recurring template
495 // pattern" for them.
497 // class template _Map_base. If the hashtable has a value type of
498 // the form pair<T1, T2> and a key extraction policy that returns the
499 // first part of the pair, the hashtable gets a mapped_type typedef.
500 // If it satisfies those criteria and also has unique keys, then it
501 // also gets an operator[].
502 template<typename _Key
, typename _Value
, typename _Ex
, bool __unique
,
504 struct _Map_base
{ };
506 template<typename _Key
, typename _Pair
, typename _Hashtable
>
507 struct _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, false, _Hashtable
>
509 typedef typename
_Pair::second_type mapped_type
;
512 template<typename _Key
, typename _Pair
, typename _Hashtable
>
513 struct _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>
515 typedef typename
_Pair::second_type mapped_type
;
518 operator[](const _Key
& __k
);
521 operator[](_Key
&& __k
);
523 // _GLIBCXX_RESOLVE_LIB_DEFECTS
524 // DR 761. unordered_map needs an at() member function.
529 at(const _Key
& __k
) const;
532 template<typename _Key
, typename _Pair
, typename _Hashtable
>
533 typename _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>,
534 true, _Hashtable
>::mapped_type
&
535 _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>::
536 operator[](const _Key
& __k
)
538 _Hashtable
* __h
= static_cast<_Hashtable
*>(this);
539 typename
_Hashtable::_Hash_code_type __code
= __h
->_M_hash_code(__k
);
540 std::size_t __n
= __h
->_M_bucket_index(__k
, __code
,
541 __h
->_M_bucket_count
);
543 typename
_Hashtable::_Node
* __p
=
544 __h
->_M_find_node(__h
->_M_buckets
[__n
], __k
, __code
);
546 return __h
->_M_insert_bucket(std::make_pair(__k
, mapped_type()),
547 __n
, __code
)->second
;
548 return (__p
->_M_v
).second
;
551 template<typename _Key
, typename _Pair
, typename _Hashtable
>
552 typename _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>,
553 true, _Hashtable
>::mapped_type
&
554 _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>::
555 operator[](_Key
&& __k
)
557 _Hashtable
* __h
= static_cast<_Hashtable
*>(this);
558 typename
_Hashtable::_Hash_code_type __code
= __h
->_M_hash_code(__k
);
559 std::size_t __n
= __h
->_M_bucket_index(__k
, __code
,
560 __h
->_M_bucket_count
);
562 typename
_Hashtable::_Node
* __p
=
563 __h
->_M_find_node(__h
->_M_buckets
[__n
], __k
, __code
);
565 return __h
->_M_insert_bucket(std::make_pair(std::move(__k
),
567 __n
, __code
)->second
;
568 return (__p
->_M_v
).second
;
571 template<typename _Key
, typename _Pair
, typename _Hashtable
>
572 typename _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>,
573 true, _Hashtable
>::mapped_type
&
574 _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>::
577 _Hashtable
* __h
= static_cast<_Hashtable
*>(this);
578 typename
_Hashtable::_Hash_code_type __code
= __h
->_M_hash_code(__k
);
579 std::size_t __n
= __h
->_M_bucket_index(__k
, __code
,
580 __h
->_M_bucket_count
);
582 typename
_Hashtable::_Node
* __p
=
583 __h
->_M_find_node(__h
->_M_buckets
[__n
], __k
, __code
);
585 __throw_out_of_range(__N("_Map_base::at"));
586 return (__p
->_M_v
).second
;
589 template<typename _Key
, typename _Pair
, typename _Hashtable
>
590 const typename _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>,
591 true, _Hashtable
>::mapped_type
&
592 _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>::
593 at(const _Key
& __k
) const
595 const _Hashtable
* __h
= static_cast<const _Hashtable
*>(this);
596 typename
_Hashtable::_Hash_code_type __code
= __h
->_M_hash_code(__k
);
597 std::size_t __n
= __h
->_M_bucket_index(__k
, __code
,
598 __h
->_M_bucket_count
);
600 typename
_Hashtable::_Node
* __p
=
601 __h
->_M_find_node(__h
->_M_buckets
[__n
], __k
, __code
);
603 __throw_out_of_range(__N("_Map_base::at"));
604 return (__p
->_M_v
).second
;
607 // class template _Rehash_base. Give hashtable the max_load_factor
608 // functions and reserve iff the rehash policy is _Prime_rehash_policy.
609 template<typename _RehashPolicy
, typename _Hashtable
>
610 struct _Rehash_base
{ };
612 template<typename _Hashtable
>
613 struct _Rehash_base
<_Prime_rehash_policy
, _Hashtable
>
616 max_load_factor() const
618 const _Hashtable
* __this
= static_cast<const _Hashtable
*>(this);
619 return __this
->__rehash_policy().max_load_factor();
623 max_load_factor(float __z
)
625 _Hashtable
* __this
= static_cast<_Hashtable
*>(this);
626 __this
->__rehash_policy(_Prime_rehash_policy(__z
));
630 reserve(std::size_t __n
)
632 _Hashtable
* __this
= static_cast<_Hashtable
*>(this);
633 __this
->rehash(__builtin_ceil(__n
/ max_load_factor()));
637 // Class template _Hash_code_base. Encapsulates two policy issues that
638 // aren't quite orthogonal.
639 // (1) the difference between using a ranged hash function and using
640 // the combination of a hash function and a range-hashing function.
641 // In the former case we don't have such things as hash codes, so
642 // we have a dummy type as placeholder.
643 // (2) Whether or not we cache hash codes. Caching hash codes is
644 // meaningless if we have a ranged hash function.
645 // We also put the key extraction and equality comparison function
646 // objects here, for convenience.
648 // Primary template: unused except as a hook for specializations.
649 template<typename _Key
, typename _Value
,
650 typename _ExtractKey
, typename _Equal
,
651 typename _H1
, typename _H2
, typename _Hash
,
652 bool __cache_hash_code
>
653 struct _Hash_code_base
;
655 // Specialization: ranged hash function, no caching hash codes. H1
656 // and H2 are provided but ignored. We define a dummy hash code type.
657 template<typename _Key
, typename _Value
,
658 typename _ExtractKey
, typename _Equal
,
659 typename _H1
, typename _H2
, typename _Hash
>
660 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
664 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
665 const _H1
&, const _H2
&, const _Hash
& __h
)
666 : _M_extract(__ex
), _M_eq(__eq
), _M_ranged_hash(__h
) { }
668 typedef void* _Hash_code_type
;
671 _M_hash_code(const _Key
& __key
) const
675 _M_bucket_index(const _Key
& __k
, _Hash_code_type
,
676 std::size_t __n
) const
677 { return _M_ranged_hash(__k
, __n
); }
680 _M_bucket_index(const _Hash_node
<_Value
, false>* __p
,
681 std::size_t __n
) const
682 { return _M_ranged_hash(_M_extract(__p
->_M_v
), __n
); }
685 _M_compare(const _Key
& __k
, _Hash_code_type
,
686 _Hash_node
<_Value
, false>* __n
) const
687 { return _M_eq(__k
, _M_extract(__n
->_M_v
)); }
690 _M_store_code(_Hash_node
<_Value
, false>*, _Hash_code_type
) const
694 _M_copy_code(_Hash_node
<_Value
, false>*,
695 const _Hash_node
<_Value
, false>*) const
699 _M_swap(_Hash_code_base
& __x
)
701 std::swap(_M_extract
, __x
._M_extract
);
702 std::swap(_M_eq
, __x
._M_eq
);
703 std::swap(_M_ranged_hash
, __x
._M_ranged_hash
);
707 _ExtractKey _M_extract
;
709 _Hash _M_ranged_hash
;
713 // No specialization for ranged hash function while caching hash codes.
714 // That combination is meaningless, and trying to do it is an error.
717 // Specialization: ranged hash function, cache hash codes. This
718 // combination is meaningless, so we provide only a declaration
719 // and no definition.
720 template<typename _Key
, typename _Value
,
721 typename _ExtractKey
, typename _Equal
,
722 typename _H1
, typename _H2
, typename _Hash
>
723 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
726 // Specialization: hash function and range-hashing function, no
727 // caching of hash codes. H is provided but ignored. Provides
728 // typedef and accessor required by TR1.
729 template<typename _Key
, typename _Value
,
730 typename _ExtractKey
, typename _Equal
,
731 typename _H1
, typename _H2
>
732 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
733 _Default_ranged_hash
, false>
738 hash_function() const
742 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
743 const _H1
& __h1
, const _H2
& __h2
,
744 const _Default_ranged_hash
&)
745 : _M_extract(__ex
), _M_eq(__eq
), _M_h1(__h1
), _M_h2(__h2
) { }
747 typedef std::size_t _Hash_code_type
;
750 _M_hash_code(const _Key
& __k
) const
751 { return _M_h1(__k
); }
754 _M_bucket_index(const _Key
&, _Hash_code_type __c
,
755 std::size_t __n
) const
756 { return _M_h2(__c
, __n
); }
759 _M_bucket_index(const _Hash_node
<_Value
, false>* __p
,
760 std::size_t __n
) const
761 { return _M_h2(_M_h1(_M_extract(__p
->_M_v
)), __n
); }
764 _M_compare(const _Key
& __k
, _Hash_code_type
,
765 _Hash_node
<_Value
, false>* __n
) const
766 { return _M_eq(__k
, _M_extract(__n
->_M_v
)); }
769 _M_store_code(_Hash_node
<_Value
, false>*, _Hash_code_type
) const
773 _M_copy_code(_Hash_node
<_Value
, false>*,
774 const _Hash_node
<_Value
, false>*) const
778 _M_swap(_Hash_code_base
& __x
)
780 std::swap(_M_extract
, __x
._M_extract
);
781 std::swap(_M_eq
, __x
._M_eq
);
782 std::swap(_M_h1
, __x
._M_h1
);
783 std::swap(_M_h2
, __x
._M_h2
);
787 _ExtractKey _M_extract
;
793 // Specialization: hash function and range-hashing function,
794 // caching hash codes. H is provided but ignored. Provides
795 // typedef and accessor required by TR1.
796 template<typename _Key
, typename _Value
,
797 typename _ExtractKey
, typename _Equal
,
798 typename _H1
, typename _H2
>
799 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
800 _Default_ranged_hash
, true>
805 hash_function() const
809 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
810 const _H1
& __h1
, const _H2
& __h2
,
811 const _Default_ranged_hash
&)
812 : _M_extract(__ex
), _M_eq(__eq
), _M_h1(__h1
), _M_h2(__h2
) { }
814 typedef std::size_t _Hash_code_type
;
817 _M_hash_code(const _Key
& __k
) const
818 { return _M_h1(__k
); }
821 _M_bucket_index(const _Key
&, _Hash_code_type __c
,
822 std::size_t __n
) const
823 { return _M_h2(__c
, __n
); }
826 _M_bucket_index(const _Hash_node
<_Value
, true>* __p
,
827 std::size_t __n
) const
828 { return _M_h2(__p
->_M_hash_code
, __n
); }
831 _M_compare(const _Key
& __k
, _Hash_code_type __c
,
832 _Hash_node
<_Value
, true>* __n
) const
833 { return __c
== __n
->_M_hash_code
&& _M_eq(__k
, _M_extract(__n
->_M_v
)); }
836 _M_store_code(_Hash_node
<_Value
, true>* __n
, _Hash_code_type __c
) const
837 { __n
->_M_hash_code
= __c
; }
840 _M_copy_code(_Hash_node
<_Value
, true>* __to
,
841 const _Hash_node
<_Value
, true>* __from
) const
842 { __to
->_M_hash_code
= __from
->_M_hash_code
; }
845 _M_swap(_Hash_code_base
& __x
)
847 std::swap(_M_extract
, __x
._M_extract
);
848 std::swap(_M_eq
, __x
._M_eq
);
849 std::swap(_M_h1
, __x
._M_h1
);
850 std::swap(_M_h2
, __x
._M_h2
);
854 _ExtractKey _M_extract
;
861 // Class template _Equality_base. This is for implementing equality
862 // comparison for unordered containers, per N3068, by John Lakos and
863 // Pablo Halpern. Algorithmically, we follow closely the reference
864 // implementations therein.
865 template<typename _ExtractKey
, bool __unique_keys
,
867 struct _Equality_base
;
869 template<typename _ExtractKey
, typename _Hashtable
>
870 struct _Equality_base
<_ExtractKey
, true, _Hashtable
>
872 bool _M_equal(const _Hashtable
&) const;
875 template<typename _ExtractKey
, typename _Hashtable
>
877 _Equality_base
<_ExtractKey
, true, _Hashtable
>::
878 _M_equal(const _Hashtable
& __other
) const
880 const _Hashtable
* __this
= static_cast<const _Hashtable
*>(this);
882 if (__this
->size() != __other
.size())
885 for (auto __itx
= __this
->begin(); __itx
!= __this
->end(); ++__itx
)
887 const auto __ity
= __other
.find(_ExtractKey()(*__itx
));
888 if (__ity
== __other
.end() || *__ity
!= *__itx
)
894 template<typename _ExtractKey
, typename _Hashtable
>
895 struct _Equality_base
<_ExtractKey
, false, _Hashtable
>
897 bool _M_equal(const _Hashtable
&) const;
900 template<typename _Uiterator
>
902 _S_is_permutation(_Uiterator
, _Uiterator
, _Uiterator
);
905 // See std::is_permutation in N3068.
906 template<typename _ExtractKey
, typename _Hashtable
>
907 template<typename _Uiterator
>
909 _Equality_base
<_ExtractKey
, false, _Hashtable
>::
910 _S_is_permutation(_Uiterator __first1
, _Uiterator __last1
,
913 for (; __first1
!= __last1
; ++__first1
, ++__first2
)
914 if (!(*__first1
== *__first2
))
917 if (__first1
== __last1
)
920 _Uiterator __last2
= __first2
;
921 std::advance(__last2
, std::distance(__first1
, __last1
));
923 for (_Uiterator __it1
= __first1
; __it1
!= __last1
; ++__it1
)
925 _Uiterator __tmp
= __first1
;
926 while (__tmp
!= __it1
&& !(*__tmp
== *__it1
))
929 // We've seen this one before.
933 std::ptrdiff_t __n2
= 0;
934 for (__tmp
= __first2
; __tmp
!= __last2
; ++__tmp
)
935 if (*__tmp
== *__it1
)
941 std::ptrdiff_t __n1
= 0;
942 for (__tmp
= __it1
; __tmp
!= __last1
; ++__tmp
)
943 if (*__tmp
== *__it1
)
952 template<typename _ExtractKey
, typename _Hashtable
>
954 _Equality_base
<_ExtractKey
, false, _Hashtable
>::
955 _M_equal(const _Hashtable
& __other
) const
957 const _Hashtable
* __this
= static_cast<const _Hashtable
*>(this);
959 if (__this
->size() != __other
.size())
962 for (auto __itx
= __this
->begin(); __itx
!= __this
->end();)
964 const auto __xrange
= __this
->equal_range(_ExtractKey()(*__itx
));
965 const auto __yrange
= __other
.equal_range(_ExtractKey()(*__itx
));
967 if (std::distance(__xrange
.first
, __xrange
.second
)
968 != std::distance(__yrange
.first
, __yrange
.second
))
971 if (!_S_is_permutation(__xrange
.first
,
976 __itx
= __xrange
.second
;
980 } // namespace __detail
983 #endif // _HASHTABLE_POLICY_H