Merge from mainline (165734:167278).
[official-gcc/graphite-test-results.git] / libstdc++-v3 / include / bits / hashtable_policy.h
blobd8d2af5d64ecbbcade87334776f5f23d351324bf
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
3 // Copyright (C) 2010 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * You should not attempt to use it directly.
30 #ifndef _HASHTABLE_POLICY_H
31 #define _HASHTABLE_POLICY_H 1
33 namespace std
35 namespace __detail
37 // Helper function: return distance(first, last) for forward
38 // iterators, or 0 for input iterators.
39 template<class _Iterator>
40 inline typename std::iterator_traits<_Iterator>::difference_type
41 __distance_fw(_Iterator __first, _Iterator __last,
42 std::input_iterator_tag)
43 { return 0; }
45 template<class _Iterator>
46 inline typename std::iterator_traits<_Iterator>::difference_type
47 __distance_fw(_Iterator __first, _Iterator __last,
48 std::forward_iterator_tag)
49 { return std::distance(__first, __last); }
51 template<class _Iterator>
52 inline typename std::iterator_traits<_Iterator>::difference_type
53 __distance_fw(_Iterator __first, _Iterator __last)
55 typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag;
56 return __distance_fw(__first, __last, _Tag());
59 // Auxiliary types used for all instantiations of _Hashtable: nodes
60 // and iterators.
62 // Nodes, used to wrap elements stored in the hash table. A policy
63 // template parameter of class template _Hashtable controls whether
64 // nodes also store a hash code. In some cases (e.g. strings) this
65 // may be a performance win.
66 template<typename _Value, bool __cache_hash_code>
67 struct _Hash_node;
69 template<typename _Value>
70 struct _Hash_node<_Value, true>
72 _Value _M_v;
73 std::size_t _M_hash_code;
74 _Hash_node* _M_next;
76 template<typename... _Args>
77 _Hash_node(_Args&&... __args)
78 : _M_v(std::forward<_Args>(__args)...),
79 _M_hash_code(), _M_next() { }
82 template<typename _Value>
83 struct _Hash_node<_Value, false>
85 _Value _M_v;
86 _Hash_node* _M_next;
88 template<typename... _Args>
89 _Hash_node(_Args&&... __args)
90 : _M_v(std::forward<_Args>(__args)...),
91 _M_next() { }
94 // Local iterators, used to iterate within a bucket but not between
95 // buckets.
96 template<typename _Value, bool __cache>
97 struct _Node_iterator_base
99 _Node_iterator_base(_Hash_node<_Value, __cache>* __p)
100 : _M_cur(__p) { }
102 void
103 _M_incr()
104 { _M_cur = _M_cur->_M_next; }
106 _Hash_node<_Value, __cache>* _M_cur;
109 template<typename _Value, bool __cache>
110 inline bool
111 operator==(const _Node_iterator_base<_Value, __cache>& __x,
112 const _Node_iterator_base<_Value, __cache>& __y)
113 { return __x._M_cur == __y._M_cur; }
115 template<typename _Value, bool __cache>
116 inline bool
117 operator!=(const _Node_iterator_base<_Value, __cache>& __x,
118 const _Node_iterator_base<_Value, __cache>& __y)
119 { return __x._M_cur != __y._M_cur; }
121 template<typename _Value, bool __constant_iterators, bool __cache>
122 struct _Node_iterator
123 : public _Node_iterator_base<_Value, __cache>
125 typedef _Value value_type;
126 typedef typename std::conditional<__constant_iterators,
127 const _Value*, _Value*>::type
128 pointer;
129 typedef typename std::conditional<__constant_iterators,
130 const _Value&, _Value&>::type
131 reference;
132 typedef std::ptrdiff_t difference_type;
133 typedef std::forward_iterator_tag iterator_category;
135 _Node_iterator()
136 : _Node_iterator_base<_Value, __cache>(0) { }
138 explicit
139 _Node_iterator(_Hash_node<_Value, __cache>* __p)
140 : _Node_iterator_base<_Value, __cache>(__p) { }
142 reference
143 operator*() const
144 { return this->_M_cur->_M_v; }
146 pointer
147 operator->() const
148 { return std::__addressof(this->_M_cur->_M_v); }
150 _Node_iterator&
151 operator++()
153 this->_M_incr();
154 return *this;
157 _Node_iterator
158 operator++(int)
160 _Node_iterator __tmp(*this);
161 this->_M_incr();
162 return __tmp;
166 template<typename _Value, bool __constant_iterators, bool __cache>
167 struct _Node_const_iterator
168 : public _Node_iterator_base<_Value, __cache>
170 typedef _Value value_type;
171 typedef const _Value* pointer;
172 typedef const _Value& reference;
173 typedef std::ptrdiff_t difference_type;
174 typedef std::forward_iterator_tag iterator_category;
176 _Node_const_iterator()
177 : _Node_iterator_base<_Value, __cache>(0) { }
179 explicit
180 _Node_const_iterator(_Hash_node<_Value, __cache>* __p)
181 : _Node_iterator_base<_Value, __cache>(__p) { }
183 _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
184 __cache>& __x)
185 : _Node_iterator_base<_Value, __cache>(__x._M_cur) { }
187 reference
188 operator*() const
189 { return this->_M_cur->_M_v; }
191 pointer
192 operator->() const
193 { return std::__addressof(this->_M_cur->_M_v); }
195 _Node_const_iterator&
196 operator++()
198 this->_M_incr();
199 return *this;
202 _Node_const_iterator
203 operator++(int)
205 _Node_const_iterator __tmp(*this);
206 this->_M_incr();
207 return __tmp;
211 template<typename _Value, bool __cache>
212 struct _Hashtable_iterator_base
214 _Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node,
215 _Hash_node<_Value, __cache>** __bucket)
216 : _M_cur_node(__node), _M_cur_bucket(__bucket) { }
218 void
219 _M_incr()
221 _M_cur_node = _M_cur_node->_M_next;
222 if (!_M_cur_node)
223 _M_incr_bucket();
226 void
227 _M_incr_bucket();
229 _Hash_node<_Value, __cache>* _M_cur_node;
230 _Hash_node<_Value, __cache>** _M_cur_bucket;
233 // Global iterators, used for arbitrary iteration within a hash
234 // table. Larger and more expensive than local iterators.
235 template<typename _Value, bool __cache>
236 void
237 _Hashtable_iterator_base<_Value, __cache>::
238 _M_incr_bucket()
240 ++_M_cur_bucket;
242 // This loop requires the bucket array to have a non-null sentinel.
243 while (!*_M_cur_bucket)
244 ++_M_cur_bucket;
245 _M_cur_node = *_M_cur_bucket;
248 template<typename _Value, bool __cache>
249 inline bool
250 operator==(const _Hashtable_iterator_base<_Value, __cache>& __x,
251 const _Hashtable_iterator_base<_Value, __cache>& __y)
252 { return __x._M_cur_node == __y._M_cur_node; }
254 template<typename _Value, bool __cache>
255 inline bool
256 operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x,
257 const _Hashtable_iterator_base<_Value, __cache>& __y)
258 { return __x._M_cur_node != __y._M_cur_node; }
260 template<typename _Value, bool __constant_iterators, bool __cache>
261 struct _Hashtable_iterator
262 : public _Hashtable_iterator_base<_Value, __cache>
264 typedef _Value value_type;
265 typedef typename std::conditional<__constant_iterators,
266 const _Value*, _Value*>::type
267 pointer;
268 typedef typename std::conditional<__constant_iterators,
269 const _Value&, _Value&>::type
270 reference;
271 typedef std::ptrdiff_t difference_type;
272 typedef std::forward_iterator_tag iterator_category;
274 _Hashtable_iterator()
275 : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
277 _Hashtable_iterator(_Hash_node<_Value, __cache>* __p,
278 _Hash_node<_Value, __cache>** __b)
279 : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
281 explicit
282 _Hashtable_iterator(_Hash_node<_Value, __cache>** __b)
283 : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
285 reference
286 operator*() const
287 { return this->_M_cur_node->_M_v; }
289 pointer
290 operator->() const
291 { return std::__addressof(this->_M_cur_node->_M_v); }
293 _Hashtable_iterator&
294 operator++()
296 this->_M_incr();
297 return *this;
300 _Hashtable_iterator
301 operator++(int)
303 _Hashtable_iterator __tmp(*this);
304 this->_M_incr();
305 return __tmp;
309 template<typename _Value, bool __constant_iterators, bool __cache>
310 struct _Hashtable_const_iterator
311 : public _Hashtable_iterator_base<_Value, __cache>
313 typedef _Value value_type;
314 typedef const _Value* pointer;
315 typedef const _Value& reference;
316 typedef std::ptrdiff_t difference_type;
317 typedef std::forward_iterator_tag iterator_category;
319 _Hashtable_const_iterator()
320 : _Hashtable_iterator_base<_Value, __cache>(0, 0) { }
322 _Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p,
323 _Hash_node<_Value, __cache>** __b)
324 : _Hashtable_iterator_base<_Value, __cache>(__p, __b) { }
326 explicit
327 _Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b)
328 : _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { }
330 _Hashtable_const_iterator(const _Hashtable_iterator<_Value,
331 __constant_iterators, __cache>& __x)
332 : _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node,
333 __x._M_cur_bucket) { }
335 reference
336 operator*() const
337 { return this->_M_cur_node->_M_v; }
339 pointer
340 operator->() const
341 { return std::__addressof(this->_M_cur_node->_M_v); }
343 _Hashtable_const_iterator&
344 operator++()
346 this->_M_incr();
347 return *this;
350 _Hashtable_const_iterator
351 operator++(int)
353 _Hashtable_const_iterator __tmp(*this);
354 this->_M_incr();
355 return __tmp;
360 // Many of class template _Hashtable's template parameters are policy
361 // classes. These are defaults for the policies.
363 // Default range hashing function: use division to fold a large number
364 // into the range [0, N).
365 struct _Mod_range_hashing
367 typedef std::size_t first_argument_type;
368 typedef std::size_t second_argument_type;
369 typedef std::size_t result_type;
371 result_type
372 operator()(first_argument_type __num, second_argument_type __den) const
373 { return __num % __den; }
376 // Default ranged hash function H. In principle it should be a
377 // function object composed from objects of type H1 and H2 such that
378 // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
379 // h1 and h2. So instead we'll just use a tag to tell class template
380 // hashtable to do that composition.
381 struct _Default_ranged_hash { };
383 // Default value for rehash policy. Bucket size is (usually) the
384 // smallest prime that keeps the load factor small enough.
385 struct _Prime_rehash_policy
387 _Prime_rehash_policy(float __z = 1.0)
388 : _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { }
390 float
391 max_load_factor() const
392 { return _M_max_load_factor; }
394 // Return a bucket size no smaller than n.
395 std::size_t
396 _M_next_bkt(std::size_t __n) const;
398 // Return a bucket count appropriate for n elements
399 std::size_t
400 _M_bkt_for_elements(std::size_t __n) const;
402 // __n_bkt is current bucket count, __n_elt is current element count,
403 // and __n_ins is number of elements to be inserted. Do we need to
404 // increase bucket count? If so, return make_pair(true, n), where n
405 // is the new bucket count. If not, return make_pair(false, 0).
406 std::pair<bool, std::size_t>
407 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
408 std::size_t __n_ins) const;
410 enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
412 float _M_max_load_factor;
413 float _M_growth_factor;
414 mutable std::size_t _M_next_resize;
417 extern const unsigned long __prime_list[];
419 // XXX This is a hack. There's no good reason for any of
420 // _Prime_rehash_policy's member functions to be inline.
422 // Return a prime no smaller than n.
423 inline std::size_t
424 _Prime_rehash_policy::
425 _M_next_bkt(std::size_t __n) const
427 const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
428 + _S_n_primes, __n);
429 _M_next_resize =
430 static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
431 return *__p;
434 // Return the smallest prime p such that alpha p >= n, where alpha
435 // is the load factor.
436 inline std::size_t
437 _Prime_rehash_policy::
438 _M_bkt_for_elements(std::size_t __n) const
440 const float __min_bkts = __n / _M_max_load_factor;
441 const unsigned long* __p = std::lower_bound(__prime_list, __prime_list
442 + _S_n_primes, __min_bkts);
443 _M_next_resize =
444 static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor));
445 return *__p;
448 // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
449 // If p > __n_bkt, return make_pair(true, p); otherwise return
450 // make_pair(false, 0). In principle this isn't very different from
451 // _M_bkt_for_elements.
453 // The only tricky part is that we're caching the element count at
454 // which we need to rehash, so we don't have to do a floating-point
455 // multiply for every insertion.
457 inline std::pair<bool, std::size_t>
458 _Prime_rehash_policy::
459 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
460 std::size_t __n_ins) const
462 if (__n_elt + __n_ins > _M_next_resize)
464 float __min_bkts = ((float(__n_ins) + float(__n_elt))
465 / _M_max_load_factor);
466 if (__min_bkts > __n_bkt)
468 __min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt);
469 const unsigned long* __p =
470 std::lower_bound(__prime_list, __prime_list + _S_n_primes,
471 __min_bkts);
472 _M_next_resize = static_cast<std::size_t>
473 (__builtin_ceil(*__p * _M_max_load_factor));
474 return std::make_pair(true, *__p);
476 else
478 _M_next_resize = static_cast<std::size_t>
479 (__builtin_ceil(__n_bkt * _M_max_load_factor));
480 return std::make_pair(false, 0);
483 else
484 return std::make_pair(false, 0);
487 // Base classes for std::_Hashtable. We define these base classes
488 // because in some cases we want to do different things depending
489 // on the value of a policy class. In some cases the policy class
490 // affects which member functions and nested typedefs are defined;
491 // we handle that by specializing base class templates. Several of
492 // the base class templates need to access other members of class
493 // template _Hashtable, so we use the "curiously recurring template
494 // pattern" for them.
496 // class template _Map_base. If the hashtable has a value type of
497 // the form pair<T1, T2> and a key extraction policy that returns the
498 // first part of the pair, the hashtable gets a mapped_type typedef.
499 // If it satisfies those criteria and also has unique keys, then it
500 // also gets an operator[].
501 template<typename _Key, typename _Value, typename _Ex, bool __unique,
502 typename _Hashtable>
503 struct _Map_base { };
505 template<typename _Key, typename _Pair, typename _Hashtable>
506 struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable>
508 typedef typename _Pair::second_type mapped_type;
511 template<typename _Key, typename _Pair, typename _Hashtable>
512 struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>
514 typedef typename _Pair::second_type mapped_type;
516 mapped_type&
517 operator[](const _Key& __k);
519 mapped_type&
520 operator[](_Key&& __k);
522 // _GLIBCXX_RESOLVE_LIB_DEFECTS
523 // DR 761. unordered_map needs an at() member function.
524 mapped_type&
525 at(const _Key& __k);
527 const mapped_type&
528 at(const _Key& __k) const;
531 template<typename _Key, typename _Pair, typename _Hashtable>
532 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
533 true, _Hashtable>::mapped_type&
534 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
535 operator[](const _Key& __k)
537 _Hashtable* __h = static_cast<_Hashtable*>(this);
538 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
539 std::size_t __n = __h->_M_bucket_index(__k, __code,
540 __h->_M_bucket_count);
542 typename _Hashtable::_Node* __p =
543 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
544 if (!__p)
545 return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()),
546 __n, __code)->second;
547 return (__p->_M_v).second;
550 template<typename _Key, typename _Pair, typename _Hashtable>
551 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
552 true, _Hashtable>::mapped_type&
553 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
554 operator[](_Key&& __k)
556 _Hashtable* __h = static_cast<_Hashtable*>(this);
557 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
558 std::size_t __n = __h->_M_bucket_index(__k, __code,
559 __h->_M_bucket_count);
561 typename _Hashtable::_Node* __p =
562 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
563 if (!__p)
564 return __h->_M_insert_bucket(std::make_pair(std::move(__k),
565 mapped_type()),
566 __n, __code)->second;
567 return (__p->_M_v).second;
570 template<typename _Key, typename _Pair, typename _Hashtable>
571 typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
572 true, _Hashtable>::mapped_type&
573 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
574 at(const _Key& __k)
576 _Hashtable* __h = static_cast<_Hashtable*>(this);
577 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
578 std::size_t __n = __h->_M_bucket_index(__k, __code,
579 __h->_M_bucket_count);
581 typename _Hashtable::_Node* __p =
582 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
583 if (!__p)
584 __throw_out_of_range(__N("_Map_base::at"));
585 return (__p->_M_v).second;
588 template<typename _Key, typename _Pair, typename _Hashtable>
589 const typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>,
590 true, _Hashtable>::mapped_type&
591 _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>::
592 at(const _Key& __k) const
594 const _Hashtable* __h = static_cast<const _Hashtable*>(this);
595 typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k);
596 std::size_t __n = __h->_M_bucket_index(__k, __code,
597 __h->_M_bucket_count);
599 typename _Hashtable::_Node* __p =
600 __h->_M_find_node(__h->_M_buckets[__n], __k, __code);
601 if (!__p)
602 __throw_out_of_range(__N("_Map_base::at"));
603 return (__p->_M_v).second;
606 // class template _Rehash_base. Give hashtable the max_load_factor
607 // functions and reserve iff the rehash policy is _Prime_rehash_policy.
608 template<typename _RehashPolicy, typename _Hashtable>
609 struct _Rehash_base { };
611 template<typename _Hashtable>
612 struct _Rehash_base<_Prime_rehash_policy, _Hashtable>
614 float
615 max_load_factor() const
617 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
618 return __this->__rehash_policy().max_load_factor();
621 void
622 max_load_factor(float __z)
624 _Hashtable* __this = static_cast<_Hashtable*>(this);
625 __this->__rehash_policy(_Prime_rehash_policy(__z));
628 void
629 reserve(std::size_t __n)
631 _Hashtable* __this = static_cast<_Hashtable*>(this);
632 __this->rehash(__builtin_ceil(__n / max_load_factor()));
636 // Class template _Hash_code_base. Encapsulates two policy issues that
637 // aren't quite orthogonal.
638 // (1) the difference between using a ranged hash function and using
639 // the combination of a hash function and a range-hashing function.
640 // In the former case we don't have such things as hash codes, so
641 // we have a dummy type as placeholder.
642 // (2) Whether or not we cache hash codes. Caching hash codes is
643 // meaningless if we have a ranged hash function.
644 // We also put the key extraction and equality comparison function
645 // objects here, for convenience.
647 // Primary template: unused except as a hook for specializations.
648 template<typename _Key, typename _Value,
649 typename _ExtractKey, typename _Equal,
650 typename _H1, typename _H2, typename _Hash,
651 bool __cache_hash_code>
652 struct _Hash_code_base;
654 // Specialization: ranged hash function, no caching hash codes. H1
655 // and H2 are provided but ignored. We define a dummy hash code type.
656 template<typename _Key, typename _Value,
657 typename _ExtractKey, typename _Equal,
658 typename _H1, typename _H2, typename _Hash>
659 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
660 _Hash, false>
662 protected:
663 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
664 const _H1&, const _H2&, const _Hash& __h)
665 : _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { }
667 typedef void* _Hash_code_type;
669 _Hash_code_type
670 _M_hash_code(const _Key& __key) const
671 { return 0; }
673 std::size_t
674 _M_bucket_index(const _Key& __k, _Hash_code_type,
675 std::size_t __n) const
676 { return _M_ranged_hash(__k, __n); }
678 std::size_t
679 _M_bucket_index(const _Hash_node<_Value, false>* __p,
680 std::size_t __n) const
681 { return _M_ranged_hash(_M_extract(__p->_M_v), __n); }
683 bool
684 _M_compare(const _Key& __k, _Hash_code_type,
685 _Hash_node<_Value, false>* __n) const
686 { return _M_eq(__k, _M_extract(__n->_M_v)); }
688 void
689 _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
692 void
693 _M_copy_code(_Hash_node<_Value, false>*,
694 const _Hash_node<_Value, false>*) const
697 void
698 _M_swap(_Hash_code_base& __x)
700 std::swap(_M_extract, __x._M_extract);
701 std::swap(_M_eq, __x._M_eq);
702 std::swap(_M_ranged_hash, __x._M_ranged_hash);
705 protected:
706 _ExtractKey _M_extract;
707 _Equal _M_eq;
708 _Hash _M_ranged_hash;
712 // No specialization for ranged hash function while caching hash codes.
713 // That combination is meaningless, and trying to do it is an error.
716 // Specialization: ranged hash function, cache hash codes. This
717 // combination is meaningless, so we provide only a declaration
718 // and no definition.
719 template<typename _Key, typename _Value,
720 typename _ExtractKey, typename _Equal,
721 typename _H1, typename _H2, typename _Hash>
722 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
723 _Hash, true>;
725 // Specialization: hash function and range-hashing function, no
726 // caching of hash codes. H is provided but ignored. Provides
727 // typedef and accessor required by TR1.
728 template<typename _Key, typename _Value,
729 typename _ExtractKey, typename _Equal,
730 typename _H1, typename _H2>
731 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
732 _Default_ranged_hash, false>
734 typedef _H1 hasher;
736 hasher
737 hash_function() const
738 { return _M_h1; }
740 protected:
741 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
742 const _H1& __h1, const _H2& __h2,
743 const _Default_ranged_hash&)
744 : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
746 typedef std::size_t _Hash_code_type;
748 _Hash_code_type
749 _M_hash_code(const _Key& __k) const
750 { return _M_h1(__k); }
752 std::size_t
753 _M_bucket_index(const _Key&, _Hash_code_type __c,
754 std::size_t __n) const
755 { return _M_h2(__c, __n); }
757 std::size_t
758 _M_bucket_index(const _Hash_node<_Value, false>* __p,
759 std::size_t __n) const
760 { return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); }
762 bool
763 _M_compare(const _Key& __k, _Hash_code_type,
764 _Hash_node<_Value, false>* __n) const
765 { return _M_eq(__k, _M_extract(__n->_M_v)); }
767 void
768 _M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const
771 void
772 _M_copy_code(_Hash_node<_Value, false>*,
773 const _Hash_node<_Value, false>*) const
776 void
777 _M_swap(_Hash_code_base& __x)
779 std::swap(_M_extract, __x._M_extract);
780 std::swap(_M_eq, __x._M_eq);
781 std::swap(_M_h1, __x._M_h1);
782 std::swap(_M_h2, __x._M_h2);
785 protected:
786 _ExtractKey _M_extract;
787 _Equal _M_eq;
788 _H1 _M_h1;
789 _H2 _M_h2;
792 // Specialization: hash function and range-hashing function,
793 // caching hash codes. H is provided but ignored. Provides
794 // typedef and accessor required by TR1.
795 template<typename _Key, typename _Value,
796 typename _ExtractKey, typename _Equal,
797 typename _H1, typename _H2>
798 struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2,
799 _Default_ranged_hash, true>
801 typedef _H1 hasher;
803 hasher
804 hash_function() const
805 { return _M_h1; }
807 protected:
808 _Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq,
809 const _H1& __h1, const _H2& __h2,
810 const _Default_ranged_hash&)
811 : _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { }
813 typedef std::size_t _Hash_code_type;
815 _Hash_code_type
816 _M_hash_code(const _Key& __k) const
817 { return _M_h1(__k); }
819 std::size_t
820 _M_bucket_index(const _Key&, _Hash_code_type __c,
821 std::size_t __n) const
822 { return _M_h2(__c, __n); }
824 std::size_t
825 _M_bucket_index(const _Hash_node<_Value, true>* __p,
826 std::size_t __n) const
827 { return _M_h2(__p->_M_hash_code, __n); }
829 bool
830 _M_compare(const _Key& __k, _Hash_code_type __c,
831 _Hash_node<_Value, true>* __n) const
832 { return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); }
834 void
835 _M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const
836 { __n->_M_hash_code = __c; }
838 void
839 _M_copy_code(_Hash_node<_Value, true>* __to,
840 const _Hash_node<_Value, true>* __from) const
841 { __to->_M_hash_code = __from->_M_hash_code; }
843 void
844 _M_swap(_Hash_code_base& __x)
846 std::swap(_M_extract, __x._M_extract);
847 std::swap(_M_eq, __x._M_eq);
848 std::swap(_M_h1, __x._M_h1);
849 std::swap(_M_h2, __x._M_h2);
852 protected:
853 _ExtractKey _M_extract;
854 _Equal _M_eq;
855 _H1 _M_h1;
856 _H2 _M_h2;
860 // Class template _Equality_base. This is for implementing equality
861 // comparison for unordered containers, per N3068, by John Lakos and
862 // Pablo Halpern. Algorithmically, we follow closely the reference
863 // implementations therein.
864 template<typename _ExtractKey, bool __unique_keys,
865 typename _Hashtable>
866 struct _Equality_base;
868 template<typename _ExtractKey, typename _Hashtable>
869 struct _Equality_base<_ExtractKey, true, _Hashtable>
871 bool _M_equal(const _Hashtable&) const;
874 template<typename _ExtractKey, typename _Hashtable>
875 bool
876 _Equality_base<_ExtractKey, true, _Hashtable>::
877 _M_equal(const _Hashtable& __other) const
879 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
881 if (__this->size() != __other.size())
882 return false;
884 for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
886 const auto __ity = __other.find(_ExtractKey()(*__itx));
887 if (__ity == __other.end() || *__ity != *__itx)
888 return false;
890 return true;
893 template<typename _ExtractKey, typename _Hashtable>
894 struct _Equality_base<_ExtractKey, false, _Hashtable>
896 bool _M_equal(const _Hashtable&) const;
898 private:
899 template<typename _Uiterator>
900 static bool
901 _S_is_permutation(_Uiterator, _Uiterator, _Uiterator);
904 // See std::is_permutation in N3068.
905 template<typename _ExtractKey, typename _Hashtable>
906 template<typename _Uiterator>
907 bool
908 _Equality_base<_ExtractKey, false, _Hashtable>::
909 _S_is_permutation(_Uiterator __first1, _Uiterator __last1,
910 _Uiterator __first2)
912 for (; __first1 != __last1; ++__first1, ++__first2)
913 if (!(*__first1 == *__first2))
914 break;
916 if (__first1 == __last1)
917 return true;
919 _Uiterator __last2 = __first2;
920 std::advance(__last2, std::distance(__first1, __last1));
922 for (_Uiterator __it1 = __first1; __it1 != __last1; ++__it1)
924 _Uiterator __tmp = __first1;
925 while (__tmp != __it1 && !(*__tmp == *__it1))
926 ++__tmp;
928 // We've seen this one before.
929 if (__tmp != __it1)
930 continue;
932 std::ptrdiff_t __n2 = 0;
933 for (__tmp = __first2; __tmp != __last2; ++__tmp)
934 if (*__tmp == *__it1)
935 ++__n2;
937 if (!__n2)
938 return false;
940 std::ptrdiff_t __n1 = 0;
941 for (__tmp = __it1; __tmp != __last1; ++__tmp)
942 if (*__tmp == *__it1)
943 ++__n1;
945 if (__n1 != __n2)
946 return false;
948 return true;
951 template<typename _ExtractKey, typename _Hashtable>
952 bool
953 _Equality_base<_ExtractKey, false, _Hashtable>::
954 _M_equal(const _Hashtable& __other) const
956 const _Hashtable* __this = static_cast<const _Hashtable*>(this);
958 if (__this->size() != __other.size())
959 return false;
961 for (auto __itx = __this->begin(); __itx != __this->end();)
963 const auto __xrange = __this->equal_range(_ExtractKey()(*__itx));
964 const auto __yrange = __other.equal_range(_ExtractKey()(*__itx));
966 if (std::distance(__xrange.first, __xrange.second)
967 != std::distance(__yrange.first, __yrange.second))
968 return false;
970 if (!_S_is_permutation(__xrange.first,
971 __xrange.second,
972 __yrange.first))
973 return false;
975 __itx = __xrange.second;
977 return true;
979 } // namespace __detail
982 #endif // _HASHTABLE_POLICY_H