1 // Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*-
3 // Copyright (C) 2010, 2011 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file tr1/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{tr1/unordered_map, tr1/unordered_set}
31 namespace std
_GLIBCXX_VISIBILITY(default)
37 _GLIBCXX_BEGIN_NAMESPACE_VERSION
39 // Helper function: return distance(first, last) for forward
40 // iterators, or 0 for input iterators.
41 template<class _Iterator
>
42 inline typename
std::iterator_traits
<_Iterator
>::difference_type
43 __distance_fw(_Iterator __first
, _Iterator __last
,
44 std::input_iterator_tag
)
47 template<class _Iterator
>
48 inline typename
std::iterator_traits
<_Iterator
>::difference_type
49 __distance_fw(_Iterator __first
, _Iterator __last
,
50 std::forward_iterator_tag
)
51 { return std::distance(__first
, __last
); }
53 template<class _Iterator
>
54 inline typename
std::iterator_traits
<_Iterator
>::difference_type
55 __distance_fw(_Iterator __first
, _Iterator __last
)
57 typedef typename
std::iterator_traits
<_Iterator
>::iterator_category _Tag
;
58 return __distance_fw(__first
, __last
, _Tag());
61 // Auxiliary types used for all instantiations of _Hashtable: nodes
64 // Nodes, used to wrap elements stored in the hash table. A policy
65 // template parameter of class template _Hashtable controls whether
66 // nodes also store a hash code. In some cases (e.g. strings) this
67 // may be a performance win.
68 template<typename _Value
, bool __cache_hash_code
>
71 template<typename _Value
>
72 struct _Hash_node
<_Value
, true>
75 std::size_t _M_hash_code
;
79 template<typename _Value
>
80 struct _Hash_node
<_Value
, false>
86 // Local iterators, used to iterate within a bucket but not between
88 template<typename _Value
, bool __cache
>
89 struct _Node_iterator_base
91 _Node_iterator_base(_Hash_node
<_Value
, __cache
>* __p
)
96 { _M_cur
= _M_cur
->_M_next
; }
98 _Hash_node
<_Value
, __cache
>* _M_cur
;
101 template<typename _Value
, bool __cache
>
103 operator==(const _Node_iterator_base
<_Value
, __cache
>& __x
,
104 const _Node_iterator_base
<_Value
, __cache
>& __y
)
105 { return __x
._M_cur
== __y
._M_cur
; }
107 template<typename _Value
, bool __cache
>
109 operator!=(const _Node_iterator_base
<_Value
, __cache
>& __x
,
110 const _Node_iterator_base
<_Value
, __cache
>& __y
)
111 { return __x
._M_cur
!= __y
._M_cur
; }
113 template<typename _Value
, bool __constant_iterators
, bool __cache
>
114 struct _Node_iterator
115 : public _Node_iterator_base
<_Value
, __cache
>
117 typedef _Value value_type
;
119 __gnu_cxx::__conditional_type
<__constant_iterators
,
120 const _Value
*, _Value
*>::__type
123 __gnu_cxx::__conditional_type
<__constant_iterators
,
124 const _Value
&, _Value
&>::__type
126 typedef std::ptrdiff_t difference_type
;
127 typedef std::forward_iterator_tag iterator_category
;
130 : _Node_iterator_base
<_Value
, __cache
>(0) { }
133 _Node_iterator(_Hash_node
<_Value
, __cache
>* __p
)
134 : _Node_iterator_base
<_Value
, __cache
>(__p
) { }
138 { return this->_M_cur
->_M_v
; }
142 { return std::__addressof(this->_M_cur
->_M_v
); }
154 _Node_iterator
__tmp(*this);
160 template<typename _Value
, bool __constant_iterators
, bool __cache
>
161 struct _Node_const_iterator
162 : public _Node_iterator_base
<_Value
, __cache
>
164 typedef _Value value_type
;
165 typedef const _Value
* pointer
;
166 typedef const _Value
& reference
;
167 typedef std::ptrdiff_t difference_type
;
168 typedef std::forward_iterator_tag iterator_category
;
170 _Node_const_iterator()
171 : _Node_iterator_base
<_Value
, __cache
>(0) { }
174 _Node_const_iterator(_Hash_node
<_Value
, __cache
>* __p
)
175 : _Node_iterator_base
<_Value
, __cache
>(__p
) { }
177 _Node_const_iterator(const _Node_iterator
<_Value
, __constant_iterators
,
179 : _Node_iterator_base
<_Value
, __cache
>(__x
._M_cur
) { }
183 { return this->_M_cur
->_M_v
; }
187 { return std::__addressof(this->_M_cur
->_M_v
); }
189 _Node_const_iterator
&
199 _Node_const_iterator
__tmp(*this);
205 template<typename _Value
, bool __cache
>
206 struct _Hashtable_iterator_base
208 _Hashtable_iterator_base(_Hash_node
<_Value
, __cache
>* __node
,
209 _Hash_node
<_Value
, __cache
>** __bucket
)
210 : _M_cur_node(__node
), _M_cur_bucket(__bucket
) { }
215 _M_cur_node
= _M_cur_node
->_M_next
;
223 _Hash_node
<_Value
, __cache
>* _M_cur_node
;
224 _Hash_node
<_Value
, __cache
>** _M_cur_bucket
;
227 // Global iterators, used for arbitrary iteration within a hash
228 // table. Larger and more expensive than local iterators.
229 template<typename _Value
, bool __cache
>
231 _Hashtable_iterator_base
<_Value
, __cache
>::
236 // This loop requires the bucket array to have a non-null sentinel.
237 while (!*_M_cur_bucket
)
239 _M_cur_node
= *_M_cur_bucket
;
242 template<typename _Value
, bool __cache
>
244 operator==(const _Hashtable_iterator_base
<_Value
, __cache
>& __x
,
245 const _Hashtable_iterator_base
<_Value
, __cache
>& __y
)
246 { return __x
._M_cur_node
== __y
._M_cur_node
; }
248 template<typename _Value
, bool __cache
>
250 operator!=(const _Hashtable_iterator_base
<_Value
, __cache
>& __x
,
251 const _Hashtable_iterator_base
<_Value
, __cache
>& __y
)
252 { return __x
._M_cur_node
!= __y
._M_cur_node
; }
254 template<typename _Value
, bool __constant_iterators
, bool __cache
>
255 struct _Hashtable_iterator
256 : public _Hashtable_iterator_base
<_Value
, __cache
>
258 typedef _Value value_type
;
260 __gnu_cxx::__conditional_type
<__constant_iterators
,
261 const _Value
*, _Value
*>::__type
264 __gnu_cxx::__conditional_type
<__constant_iterators
,
265 const _Value
&, _Value
&>::__type
267 typedef std::ptrdiff_t difference_type
;
268 typedef std::forward_iterator_tag iterator_category
;
270 _Hashtable_iterator()
271 : _Hashtable_iterator_base
<_Value
, __cache
>(0, 0) { }
273 _Hashtable_iterator(_Hash_node
<_Value
, __cache
>* __p
,
274 _Hash_node
<_Value
, __cache
>** __b
)
275 : _Hashtable_iterator_base
<_Value
, __cache
>(__p
, __b
) { }
278 _Hashtable_iterator(_Hash_node
<_Value
, __cache
>** __b
)
279 : _Hashtable_iterator_base
<_Value
, __cache
>(*__b
, __b
) { }
283 { return this->_M_cur_node
->_M_v
; }
287 { return std::__addressof(this->_M_cur_node
->_M_v
); }
299 _Hashtable_iterator
__tmp(*this);
305 template<typename _Value
, bool __constant_iterators
, bool __cache
>
306 struct _Hashtable_const_iterator
307 : public _Hashtable_iterator_base
<_Value
, __cache
>
309 typedef _Value value_type
;
310 typedef const _Value
* pointer
;
311 typedef const _Value
& reference
;
312 typedef std::ptrdiff_t difference_type
;
313 typedef std::forward_iterator_tag iterator_category
;
315 _Hashtable_const_iterator()
316 : _Hashtable_iterator_base
<_Value
, __cache
>(0, 0) { }
318 _Hashtable_const_iterator(_Hash_node
<_Value
, __cache
>* __p
,
319 _Hash_node
<_Value
, __cache
>** __b
)
320 : _Hashtable_iterator_base
<_Value
, __cache
>(__p
, __b
) { }
323 _Hashtable_const_iterator(_Hash_node
<_Value
, __cache
>** __b
)
324 : _Hashtable_iterator_base
<_Value
, __cache
>(*__b
, __b
) { }
326 _Hashtable_const_iterator(const _Hashtable_iterator
<_Value
,
327 __constant_iterators
, __cache
>& __x
)
328 : _Hashtable_iterator_base
<_Value
, __cache
>(__x
._M_cur_node
,
329 __x
._M_cur_bucket
) { }
333 { return this->_M_cur_node
->_M_v
; }
337 { return std::__addressof(this->_M_cur_node
->_M_v
); }
339 _Hashtable_const_iterator
&
346 _Hashtable_const_iterator
349 _Hashtable_const_iterator
__tmp(*this);
356 // Many of class template _Hashtable's template parameters are policy
357 // classes. These are defaults for the policies.
359 // Default range hashing function: use division to fold a large number
360 // into the range [0, N).
361 struct _Mod_range_hashing
363 typedef std::size_t first_argument_type
;
364 typedef std::size_t second_argument_type
;
365 typedef std::size_t result_type
;
368 operator()(first_argument_type __num
, second_argument_type __den
) const
369 { return __num
% __den
; }
372 // Default ranged hash function H. In principle it should be a
373 // function object composed from objects of type H1 and H2 such that
374 // h(k, N) = h2(h1(k), N), but that would mean making extra copies of
375 // h1 and h2. So instead we'll just use a tag to tell class template
376 // hashtable to do that composition.
377 struct _Default_ranged_hash
{ };
379 // Default value for rehash policy. Bucket size is (usually) the
380 // smallest prime that keeps the load factor small enough.
381 struct _Prime_rehash_policy
383 _Prime_rehash_policy(float __z
= 1.0)
384 : _M_max_load_factor(__z
), _M_growth_factor(2.f
), _M_next_resize(0) { }
387 max_load_factor() const
388 { return _M_max_load_factor
; }
390 // Return a bucket size no smaller than n.
392 _M_next_bkt(std::size_t __n
) const;
394 // Return a bucket count appropriate for n elements
396 _M_bkt_for_elements(std::size_t __n
) const;
398 // __n_bkt is current bucket count, __n_elt is current element count,
399 // and __n_ins is number of elements to be inserted. Do we need to
400 // increase bucket count? If so, return make_pair(true, n), where n
401 // is the new bucket count. If not, return make_pair(false, 0).
402 std::pair
<bool, std::size_t>
403 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
404 std::size_t __n_ins
) const;
406 enum { _S_n_primes
= sizeof(unsigned long) != 8 ? 256 : 256 + 48 };
408 float _M_max_load_factor
;
409 float _M_growth_factor
;
410 mutable std::size_t _M_next_resize
;
413 extern const unsigned long __prime_list
[];
415 // XXX This is a hack. There's no good reason for any of
416 // _Prime_rehash_policy's member functions to be inline.
418 // Return a prime no smaller than n.
420 _Prime_rehash_policy::
421 _M_next_bkt(std::size_t __n
) const
423 const unsigned long* __p
= std::lower_bound(__prime_list
, __prime_list
426 static_cast<std::size_t>(__builtin_ceil(*__p
* _M_max_load_factor
));
430 // Return the smallest prime p such that alpha p >= n, where alpha
431 // is the load factor.
433 _Prime_rehash_policy::
434 _M_bkt_for_elements(std::size_t __n
) const
436 const float __min_bkts
= __n
/ _M_max_load_factor
;
437 const unsigned long* __p
= std::lower_bound(__prime_list
, __prime_list
438 + _S_n_primes
, __min_bkts
);
440 static_cast<std::size_t>(__builtin_ceil(*__p
* _M_max_load_factor
));
444 // Finds the smallest prime p such that alpha p > __n_elt + __n_ins.
445 // If p > __n_bkt, return make_pair(true, p); otherwise return
446 // make_pair(false, 0). In principle this isn't very different from
447 // _M_bkt_for_elements.
449 // The only tricky part is that we're caching the element count at
450 // which we need to rehash, so we don't have to do a floating-point
451 // multiply for every insertion.
453 inline std::pair
<bool, std::size_t>
454 _Prime_rehash_policy::
455 _M_need_rehash(std::size_t __n_bkt
, std::size_t __n_elt
,
456 std::size_t __n_ins
) const
458 if (__n_elt
+ __n_ins
> _M_next_resize
)
460 float __min_bkts
= ((float(__n_ins
) + float(__n_elt
))
461 / _M_max_load_factor
);
462 if (__min_bkts
> __n_bkt
)
464 __min_bkts
= std::max(__min_bkts
, _M_growth_factor
* __n_bkt
);
465 const unsigned long* __p
=
466 std::lower_bound(__prime_list
, __prime_list
+ _S_n_primes
,
468 _M_next_resize
= static_cast<std::size_t>
469 (__builtin_ceil(*__p
* _M_max_load_factor
));
470 return std::make_pair(true, *__p
);
474 _M_next_resize
= static_cast<std::size_t>
475 (__builtin_ceil(__n_bkt
* _M_max_load_factor
));
476 return std::make_pair(false, 0);
480 return std::make_pair(false, 0);
483 // Base classes for std::tr1::_Hashtable. We define these base
484 // classes because in some cases we want to do different things
485 // depending on the value of a policy class. In some cases the
486 // policy class affects which member functions and nested typedefs
487 // are defined; we handle that by specializing base class templates.
488 // Several of the base class templates need to access other members
489 // of class template _Hashtable, so we use the "curiously recurring
490 // template pattern" for them.
492 // class template _Map_base. If the hashtable has a value type of the
493 // form pair<T1, T2> and a key extraction policy that returns the
494 // first part of the pair, the hashtable gets a mapped_type typedef.
495 // If it satisfies those criteria and also has unique keys, then it
496 // also gets an operator[].
497 template<typename _Key
, typename _Value
, typename _Ex
, bool __unique
,
499 struct _Map_base
{ };
501 template<typename _Key
, typename _Pair
, typename _Hashtable
>
502 struct _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, false, _Hashtable
>
504 typedef typename
_Pair::second_type mapped_type
;
507 template<typename _Key
, typename _Pair
, typename _Hashtable
>
508 struct _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>
510 typedef typename
_Pair::second_type mapped_type
;
513 operator[](const _Key
& __k
);
516 template<typename _Key
, typename _Pair
, typename _Hashtable
>
517 typename _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>,
518 true, _Hashtable
>::mapped_type
&
519 _Map_base
<_Key
, _Pair
, std::_Select1st
<_Pair
>, true, _Hashtable
>::
520 operator[](const _Key
& __k
)
522 _Hashtable
* __h
= static_cast<_Hashtable
*>(this);
523 typename
_Hashtable::_Hash_code_type __code
= __h
->_M_hash_code(__k
);
524 std::size_t __n
= __h
->_M_bucket_index(__k
, __code
,
525 __h
->_M_bucket_count
);
527 typename
_Hashtable::_Node
* __p
=
528 __h
->_M_find_node(__h
->_M_buckets
[__n
], __k
, __code
);
530 return __h
->_M_insert_bucket(std::make_pair(__k
, mapped_type()),
531 __n
, __code
)->second
;
532 return (__p
->_M_v
).second
;
535 // class template _Rehash_base. Give hashtable the max_load_factor
536 // functions iff the rehash policy is _Prime_rehash_policy.
537 template<typename _RehashPolicy
, typename _Hashtable
>
538 struct _Rehash_base
{ };
540 template<typename _Hashtable
>
541 struct _Rehash_base
<_Prime_rehash_policy
, _Hashtable
>
544 max_load_factor() const
546 const _Hashtable
* __this
= static_cast<const _Hashtable
*>(this);
547 return __this
->__rehash_policy().max_load_factor();
551 max_load_factor(float __z
)
553 _Hashtable
* __this
= static_cast<_Hashtable
*>(this);
554 __this
->__rehash_policy(_Prime_rehash_policy(__z
));
558 // Class template _Hash_code_base. Encapsulates two policy issues that
559 // aren't quite orthogonal.
560 // (1) the difference between using a ranged hash function and using
561 // the combination of a hash function and a range-hashing function.
562 // In the former case we don't have such things as hash codes, so
563 // we have a dummy type as placeholder.
564 // (2) Whether or not we cache hash codes. Caching hash codes is
565 // meaningless if we have a ranged hash function.
566 // We also put the key extraction and equality comparison function
567 // objects here, for convenience.
569 // Primary template: unused except as a hook for specializations.
570 template<typename _Key
, typename _Value
,
571 typename _ExtractKey
, typename _Equal
,
572 typename _H1
, typename _H2
, typename _Hash
,
573 bool __cache_hash_code
>
574 struct _Hash_code_base
;
576 // Specialization: ranged hash function, no caching hash codes. H1
577 // and H2 are provided but ignored. We define a dummy hash code type.
578 template<typename _Key
, typename _Value
,
579 typename _ExtractKey
, typename _Equal
,
580 typename _H1
, typename _H2
, typename _Hash
>
581 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
585 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
586 const _H1
&, const _H2
&, const _Hash
& __h
)
587 : _M_extract(__ex
), _M_eq(__eq
), _M_ranged_hash(__h
) { }
589 typedef void* _Hash_code_type
;
592 _M_hash_code(const _Key
& __key
) const
596 _M_bucket_index(const _Key
& __k
, _Hash_code_type
,
597 std::size_t __n
) const
598 { return _M_ranged_hash(__k
, __n
); }
601 _M_bucket_index(const _Hash_node
<_Value
, false>* __p
,
602 std::size_t __n
) const
603 { return _M_ranged_hash(_M_extract(__p
->_M_v
), __n
); }
606 _M_compare(const _Key
& __k
, _Hash_code_type
,
607 _Hash_node
<_Value
, false>* __n
) const
608 { return _M_eq(__k
, _M_extract(__n
->_M_v
)); }
611 _M_store_code(_Hash_node
<_Value
, false>*, _Hash_code_type
) const
615 _M_copy_code(_Hash_node
<_Value
, false>*,
616 const _Hash_node
<_Value
, false>*) const
620 _M_swap(_Hash_code_base
& __x
)
622 std::swap(_M_extract
, __x
._M_extract
);
623 std::swap(_M_eq
, __x
._M_eq
);
624 std::swap(_M_ranged_hash
, __x
._M_ranged_hash
);
628 _ExtractKey _M_extract
;
630 _Hash _M_ranged_hash
;
634 // No specialization for ranged hash function while caching hash codes.
635 // That combination is meaningless, and trying to do it is an error.
638 // Specialization: ranged hash function, cache hash codes. This
639 // combination is meaningless, so we provide only a declaration
640 // and no definition.
641 template<typename _Key
, typename _Value
,
642 typename _ExtractKey
, typename _Equal
,
643 typename _H1
, typename _H2
, typename _Hash
>
644 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
647 // Specialization: hash function and range-hashing function, no
648 // caching of hash codes. H is provided but ignored. Provides
649 // typedef and accessor required by TR1.
650 template<typename _Key
, typename _Value
,
651 typename _ExtractKey
, typename _Equal
,
652 typename _H1
, typename _H2
>
653 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
654 _Default_ranged_hash
, false>
659 hash_function() const
663 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
664 const _H1
& __h1
, const _H2
& __h2
,
665 const _Default_ranged_hash
&)
666 : _M_extract(__ex
), _M_eq(__eq
), _M_h1(__h1
), _M_h2(__h2
) { }
668 typedef std::size_t _Hash_code_type
;
671 _M_hash_code(const _Key
& __k
) const
672 { return _M_h1(__k
); }
675 _M_bucket_index(const _Key
&, _Hash_code_type __c
,
676 std::size_t __n
) const
677 { return _M_h2(__c
, __n
); }
680 _M_bucket_index(const _Hash_node
<_Value
, false>* __p
,
681 std::size_t __n
) const
682 { return _M_h2(_M_h1(_M_extract(__p
->_M_v
)), __n
); }
685 _M_compare(const _Key
& __k
, _Hash_code_type
,
686 _Hash_node
<_Value
, false>* __n
) const
687 { return _M_eq(__k
, _M_extract(__n
->_M_v
)); }
690 _M_store_code(_Hash_node
<_Value
, false>*, _Hash_code_type
) const
694 _M_copy_code(_Hash_node
<_Value
, false>*,
695 const _Hash_node
<_Value
, false>*) const
699 _M_swap(_Hash_code_base
& __x
)
701 std::swap(_M_extract
, __x
._M_extract
);
702 std::swap(_M_eq
, __x
._M_eq
);
703 std::swap(_M_h1
, __x
._M_h1
);
704 std::swap(_M_h2
, __x
._M_h2
);
708 _ExtractKey _M_extract
;
714 // Specialization: hash function and range-hashing function,
715 // caching hash codes. H is provided but ignored. Provides
716 // typedef and accessor required by TR1.
717 template<typename _Key
, typename _Value
,
718 typename _ExtractKey
, typename _Equal
,
719 typename _H1
, typename _H2
>
720 struct _Hash_code_base
<_Key
, _Value
, _ExtractKey
, _Equal
, _H1
, _H2
,
721 _Default_ranged_hash
, true>
726 hash_function() const
730 _Hash_code_base(const _ExtractKey
& __ex
, const _Equal
& __eq
,
731 const _H1
& __h1
, const _H2
& __h2
,
732 const _Default_ranged_hash
&)
733 : _M_extract(__ex
), _M_eq(__eq
), _M_h1(__h1
), _M_h2(__h2
) { }
735 typedef std::size_t _Hash_code_type
;
738 _M_hash_code(const _Key
& __k
) const
739 { return _M_h1(__k
); }
742 _M_bucket_index(const _Key
&, _Hash_code_type __c
,
743 std::size_t __n
) const
744 { return _M_h2(__c
, __n
); }
747 _M_bucket_index(const _Hash_node
<_Value
, true>* __p
,
748 std::size_t __n
) const
749 { return _M_h2(__p
->_M_hash_code
, __n
); }
752 _M_compare(const _Key
& __k
, _Hash_code_type __c
,
753 _Hash_node
<_Value
, true>* __n
) const
754 { return __c
== __n
->_M_hash_code
&& _M_eq(__k
, _M_extract(__n
->_M_v
)); }
757 _M_store_code(_Hash_node
<_Value
, true>* __n
, _Hash_code_type __c
) const
758 { __n
->_M_hash_code
= __c
; }
761 _M_copy_code(_Hash_node
<_Value
, true>* __to
,
762 const _Hash_node
<_Value
, true>* __from
) const
763 { __to
->_M_hash_code
= __from
->_M_hash_code
; }
766 _M_swap(_Hash_code_base
& __x
)
768 std::swap(_M_extract
, __x
._M_extract
);
769 std::swap(_M_eq
, __x
._M_eq
);
770 std::swap(_M_h1
, __x
._M_h1
);
771 std::swap(_M_h2
, __x
._M_h2
);
775 _ExtractKey _M_extract
;
780 _GLIBCXX_END_NAMESPACE_VERSION
781 } // namespace __detail