libstdc++
hashtable_policy.h
Go to the documentation of this file.
1// Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
3// Copyright (C) 2010-2024 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
29 */
30
31#ifndef _HASHTABLE_POLICY_H
32#define _HASHTABLE_POLICY_H 1
33
34#include <tuple> // for std::tuple, std::forward_as_tuple
35#include <bits/functional_hash.h> // for __is_fast_hash
36#include <bits/stl_algobase.h> // for std::min, std::is_permutation.
37#include <bits/stl_pair.h> // for std::pair
38#include <ext/aligned_buffer.h> // for __gnu_cxx::__aligned_buffer
39#include <ext/alloc_traits.h> // for std::__alloc_rebind
40#include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
41
42namespace std _GLIBCXX_VISIBILITY(default)
43{
44_GLIBCXX_BEGIN_NAMESPACE_VERSION
45/// @cond undocumented
46
47 template<typename _Key, typename _Value, typename _Alloc,
48 typename _ExtractKey, typename _Equal,
49 typename _Hash, typename _RangeHash, typename _Unused,
50 typename _RehashPolicy, typename _Traits>
51 class _Hashtable;
52
53namespace __detail
54{
55 /**
56 * @defgroup hashtable-detail Base and Implementation Classes
57 * @ingroup unordered_associative_containers
58 * @{
59 */
60 template<typename _Key, typename _Value, typename _ExtractKey,
61 typename _Equal, typename _Hash, typename _RangeHash,
62 typename _Unused, typename _Traits>
63 struct _Hashtable_base;
64
65 // Helper function: return distance(first, last) for forward
66 // iterators, or 0/1 for input iterators.
67 template<typename _Iterator>
69 __distance_fw(_Iterator __first, _Iterator __last,
71 { return __first != __last ? 1 : 0; }
72
73 template<typename _Iterator>
75 __distance_fw(_Iterator __first, _Iterator __last,
77 { return std::distance(__first, __last); }
78
79 template<typename _Iterator>
81 __distance_fw(_Iterator __first, _Iterator __last)
82 { return __distance_fw(__first, __last,
83 std::__iterator_category(__first)); }
84
85 struct _Identity
86 {
87 template<typename _Tp>
88 _Tp&&
89 operator()(_Tp&& __x) const noexcept
90 { return std::forward<_Tp>(__x); }
91 };
92
93 struct _Select1st
94 {
95 template<typename _Pair>
96 struct __1st_type;
97
98 template<typename _Tp, typename _Up>
99 struct __1st_type<pair<_Tp, _Up>>
100 { using type = _Tp; };
101
102 template<typename _Tp, typename _Up>
103 struct __1st_type<const pair<_Tp, _Up>>
104 { using type = const _Tp; };
105
106 template<typename _Pair>
107 struct __1st_type<_Pair&>
108 { using type = typename __1st_type<_Pair>::type&; };
109
110 template<typename _Tp>
111 typename __1st_type<_Tp>::type&&
112 operator()(_Tp&& __x) const noexcept
113 { return std::forward<_Tp>(__x).first; }
114 };
115
116 template<typename _ExKey>
117 struct _NodeBuilder;
118
119 template<>
120 struct _NodeBuilder<_Select1st>
121 {
122 template<typename _Kt, typename _Arg, typename _NodeGenerator>
123 static auto
124 _S_build(_Kt&& __k, _Arg&& __arg, const _NodeGenerator& __node_gen)
125 -> typename _NodeGenerator::__node_ptr
126 {
127 return __node_gen(std::forward<_Kt>(__k),
128 std::forward<_Arg>(__arg).second);
129 }
130 };
131
132 template<>
133 struct _NodeBuilder<_Identity>
134 {
135 template<typename _Kt, typename _Arg, typename _NodeGenerator>
136 static auto
137 _S_build(_Kt&& __k, _Arg&&, const _NodeGenerator& __node_gen)
138 -> typename _NodeGenerator::__node_ptr
139 { return __node_gen(std::forward<_Kt>(__k)); }
140 };
141
142 template<typename _HashtableAlloc, typename _NodePtr>
143 struct _NodePtrGuard
144 {
145 _HashtableAlloc& _M_h;
146 _NodePtr _M_ptr;
147
148 ~_NodePtrGuard()
149 {
150 if (_M_ptr)
151 _M_h._M_deallocate_node_ptr(_M_ptr);
152 }
153 };
154
155 template<typename _NodeAlloc>
156 struct _Hashtable_alloc;
157
158 // Functor recycling a pool of nodes and using allocation once the pool is
159 // empty.
160 template<typename _NodeAlloc>
161 struct _ReuseOrAllocNode
162 {
163 private:
164 using __node_alloc_type = _NodeAlloc;
165 using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
166 using __node_alloc_traits =
167 typename __hashtable_alloc::__node_alloc_traits;
168
169 public:
170 using __node_ptr = typename __hashtable_alloc::__node_ptr;
171
172 _ReuseOrAllocNode(__node_ptr __nodes, __hashtable_alloc& __h)
173 : _M_nodes(__nodes), _M_h(__h) { }
174 _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
175
176 ~_ReuseOrAllocNode()
177 { _M_h._M_deallocate_nodes(_M_nodes); }
178
179 template<typename... _Args>
180 __node_ptr
181 operator()(_Args&&... __args) const
182 {
183 if (!_M_nodes)
184 return _M_h._M_allocate_node(std::forward<_Args>(__args)...);
185
186 __node_ptr __node = _M_nodes;
187 _M_nodes = _M_nodes->_M_next();
188 __node->_M_nxt = nullptr;
189 auto& __a = _M_h._M_node_allocator();
190 __node_alloc_traits::destroy(__a, __node->_M_valptr());
191 _NodePtrGuard<__hashtable_alloc, __node_ptr> __guard { _M_h, __node };
192 __node_alloc_traits::construct(__a, __node->_M_valptr(),
193 std::forward<_Args>(__args)...);
194 __guard._M_ptr = nullptr;
195 return __node;
196 }
197
198 private:
199 mutable __node_ptr _M_nodes;
200 __hashtable_alloc& _M_h;
201 };
202
203 // Functor similar to the previous one but without any pool of nodes to
204 // recycle.
205 template<typename _NodeAlloc>
206 struct _AllocNode
207 {
208 private:
209 using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
210
211 public:
212 using __node_ptr = typename __hashtable_alloc::__node_ptr;
213
214 _AllocNode(__hashtable_alloc& __h)
215 : _M_h(__h) { }
216
217 template<typename... _Args>
218 __node_ptr
219 operator()(_Args&&... __args) const
220 { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
221
222 private:
223 __hashtable_alloc& _M_h;
224 };
225
226 // Auxiliary types used for all instantiations of _Hashtable nodes
227 // and iterators.
228
229 /**
230 * struct _Hashtable_traits
231 *
232 * Important traits for hash tables.
233 *
234 * @tparam _Cache_hash_code Boolean value. True if the value of
235 * the hash function is stored along with the value. This is a
236 * time-space tradeoff. Storing it may improve lookup speed by
237 * reducing the number of times we need to call the _Hash or _Equal
238 * functors.
239 *
240 * @tparam _Constant_iterators Boolean value. True if iterator and
241 * const_iterator are both constant iterator types. This is true
242 * for unordered_set and unordered_multiset, false for
243 * unordered_map and unordered_multimap.
244 *
245 * @tparam _Unique_keys Boolean value. True if the return value
246 * of _Hashtable::count(k) is always at most one, false if it may
247 * be an arbitrary number. This is true for unordered_set and
248 * unordered_map, false for unordered_multiset and
249 * unordered_multimap.
250 */
251 template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
252 struct _Hashtable_traits
253 {
254 using __hash_cached = __bool_constant<_Cache_hash_code>;
255 using __constant_iterators = __bool_constant<_Constant_iterators>;
256 using __unique_keys = __bool_constant<_Unique_keys>;
257 };
258
259 /**
260 * struct _Hashtable_hash_traits
261 *
262 * Important traits for hash tables depending on associated hasher.
263 *
264 */
265 template<typename _Hash>
266 struct _Hashtable_hash_traits
267 {
268 static constexpr std::size_t
269 __small_size_threshold() noexcept
270 { return std::__is_fast_hash<_Hash>::value ? 0 : 20; }
271 };
272
273 /**
274 * struct _Hash_node_base
275 *
276 * Nodes, used to wrap elements stored in the hash table. A policy
277 * template parameter of class template _Hashtable controls whether
278 * nodes also store a hash code. In some cases (e.g. strings) this
279 * may be a performance win.
280 */
281 struct _Hash_node_base
282 {
283 _Hash_node_base* _M_nxt;
284
285 _Hash_node_base() noexcept : _M_nxt() { }
286
287 _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
288 };
289
290 /**
291 * struct _Hash_node_value_base
292 *
293 * Node type with the value to store.
294 */
295 template<typename _Value>
296 struct _Hash_node_value_base
297 {
298 typedef _Value value_type;
299
300 __gnu_cxx::__aligned_buffer<_Value> _M_storage;
301
302 [[__gnu__::__always_inline__]]
303 _Value*
304 _M_valptr() noexcept
305 { return _M_storage._M_ptr(); }
306
307 [[__gnu__::__always_inline__]]
308 const _Value*
309 _M_valptr() const noexcept
310 { return _M_storage._M_ptr(); }
311
312 [[__gnu__::__always_inline__]]
313 _Value&
314 _M_v() noexcept
315 { return *_M_valptr(); }
316
317 [[__gnu__::__always_inline__]]
318 const _Value&
319 _M_v() const noexcept
320 { return *_M_valptr(); }
321 };
322
323 /**
324 * Primary template struct _Hash_node_code_cache.
325 */
326 template<bool _Cache_hash_code>
327 struct _Hash_node_code_cache
328 { };
329
330 /**
331 * Specialization for node with cache, struct _Hash_node_code_cache.
332 */
333 template<>
334 struct _Hash_node_code_cache<true>
335 { std::size_t _M_hash_code; };
336
337 template<typename _Value, bool _Cache_hash_code>
338 struct _Hash_node_value
339 : _Hash_node_value_base<_Value>
340 , _Hash_node_code_cache<_Cache_hash_code>
341 { };
342
343 /**
344 * Primary template struct _Hash_node.
345 */
346 template<typename _Value, bool _Cache_hash_code>
347 struct _Hash_node
348 : _Hash_node_base
349 , _Hash_node_value<_Value, _Cache_hash_code>
350 {
351 _Hash_node*
352 _M_next() const noexcept
353 { return static_cast<_Hash_node*>(this->_M_nxt); }
354 };
355
356 /// Base class for node iterators.
357 template<typename _Value, bool _Cache_hash_code>
358 struct _Node_iterator_base
359 {
360 using __node_type = _Hash_node<_Value, _Cache_hash_code>;
361
362 __node_type* _M_cur;
363
364 _Node_iterator_base() : _M_cur(nullptr) { }
365 _Node_iterator_base(__node_type* __p) noexcept
366 : _M_cur(__p) { }
367
368 void
369 _M_incr() noexcept
370 { _M_cur = _M_cur->_M_next(); }
371
372 friend bool
373 operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
374 noexcept
375 { return __x._M_cur == __y._M_cur; }
376
377#if __cpp_impl_three_way_comparison < 201907L
378 friend bool
379 operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
380 noexcept
381 { return __x._M_cur != __y._M_cur; }
382#endif
383 };
384
385 /// Node iterators, used to iterate through all the hashtable.
386 template<typename _Value, bool __constant_iterators, bool __cache>
387 struct _Node_iterator
388 : public _Node_iterator_base<_Value, __cache>
389 {
390 private:
391 using __base_type = _Node_iterator_base<_Value, __cache>;
392 using __node_type = typename __base_type::__node_type;
393
394 public:
395 using value_type = _Value;
396 using difference_type = std::ptrdiff_t;
397 using iterator_category = std::forward_iterator_tag;
398
399 using pointer = __conditional_t<__constant_iterators,
400 const value_type*, value_type*>;
401
402 using reference = __conditional_t<__constant_iterators,
403 const value_type&, value_type&>;
404
405 _Node_iterator() = default;
406
407 explicit
408 _Node_iterator(__node_type* __p) noexcept
409 : __base_type(__p) { }
410
411 reference
412 operator*() const noexcept
413 { return this->_M_cur->_M_v(); }
414
415 pointer
416 operator->() const noexcept
417 { return this->_M_cur->_M_valptr(); }
418
419 _Node_iterator&
420 operator++() noexcept
421 {
422 this->_M_incr();
423 return *this;
424 }
425
426 _Node_iterator
427 operator++(int) noexcept
428 {
429 _Node_iterator __tmp(*this);
430 this->_M_incr();
431 return __tmp;
432 }
433
434#if __cpp_impl_three_way_comparison >= 201907L
435 friend bool
436 operator==(const _Node_iterator&, const _Node_iterator&) = default;
437#else
438 friend bool
439 operator==(const _Node_iterator& __x, const _Node_iterator& __y) noexcept
440 {
441 const __base_type& __bx = __x;
442 const __base_type& __by = __y;
443 return __bx == __by;
444 }
445
446 friend bool
447 operator!=(const _Node_iterator& __x, const _Node_iterator& __y) noexcept
448 { return !(__x == __y); }
449#endif
450 };
451
452 /// Node const_iterators, used to iterate through all the hashtable.
453 template<typename _Value, bool __constant_iterators, bool __cache>
454 struct _Node_const_iterator
455 : public _Node_iterator_base<_Value, __cache>
456 {
457 private:
458 using __base_type = _Node_iterator_base<_Value, __cache>;
459 using __node_type = typename __base_type::__node_type;
460
461 // The corresponding non-const iterator.
462 using __iterator
463 = _Node_iterator<_Value, __constant_iterators, __cache>;
464
465 public:
466 typedef _Value value_type;
467 typedef std::ptrdiff_t difference_type;
468 typedef std::forward_iterator_tag iterator_category;
469
470 typedef const value_type* pointer;
471 typedef const value_type& reference;
472
473 _Node_const_iterator() = default;
474
475 explicit
476 _Node_const_iterator(__node_type* __p) noexcept
477 : __base_type(__p) { }
478
479 _Node_const_iterator(const __iterator& __x) noexcept
480 : __base_type(__x._M_cur) { }
481
482 reference
483 operator*() const noexcept
484 { return this->_M_cur->_M_v(); }
485
486 pointer
487 operator->() const noexcept
488 { return this->_M_cur->_M_valptr(); }
489
490 _Node_const_iterator&
491 operator++() noexcept
492 {
493 this->_M_incr();
494 return *this;
495 }
496
497 _Node_const_iterator
498 operator++(int) noexcept
499 {
500 _Node_const_iterator __tmp(*this);
501 this->_M_incr();
502 return __tmp;
503 }
504
505#if __cpp_impl_three_way_comparison >= 201907L
506 friend bool
507 operator==(const _Node_const_iterator&,
508 const _Node_const_iterator&) = default;
509
510 friend bool
511 operator==(const _Node_const_iterator& __x, const __iterator& __y)
512 {
513 const __base_type& __bx = __x;
514 const __base_type& __by = __y;
515 return __bx == __by;
516 }
517#else
518 friend bool
519 operator==(const _Node_const_iterator& __x,
520 const _Node_const_iterator& __y) noexcept
521 {
522 const __base_type& __bx = __x;
523 const __base_type& __by = __y;
524 return __bx == __by;
525 }
526
527 friend bool
528 operator!=(const _Node_const_iterator& __x,
529 const _Node_const_iterator& __y) noexcept
530 { return !(__x == __y); }
531
532 friend bool
533 operator==(const _Node_const_iterator& __x,
534 const __iterator& __y) noexcept
535 {
536 const __base_type& __bx = __x;
537 const __base_type& __by = __y;
538 return __bx == __by;
539 }
540
541 friend bool
542 operator!=(const _Node_const_iterator& __x,
543 const __iterator& __y) noexcept
544 { return !(__x == __y); }
545
546 friend bool
547 operator==(const __iterator& __x,
548 const _Node_const_iterator& __y) noexcept
549 {
550 const __base_type& __bx = __x;
551 const __base_type& __by = __y;
552 return __bx == __by;
553 }
554
555 friend bool
556 operator!=(const __iterator& __x,
557 const _Node_const_iterator& __y) noexcept
558 { return !(__x == __y); }
559#endif
560 };
561
562 // Many of class template _Hashtable's template parameters are policy
563 // classes. These are defaults for the policies.
564
565 /// Default range hashing function: use division to fold a large number
566 /// into the range [0, N).
567 struct _Mod_range_hashing
568 {
569 typedef std::size_t first_argument_type;
570 typedef std::size_t second_argument_type;
571 typedef std::size_t result_type;
572
573 result_type
574 operator()(first_argument_type __num,
575 second_argument_type __den) const noexcept
576 { return __num % __den; }
577 };
578
579 /// Default ranged hash function H. In principle it should be a
580 /// function object composed from objects of type H1 and H2 such that
581 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
582 /// h1 and h2. So instead we'll just use a tag to tell class template
583 /// hashtable to do that composition.
584 struct _Default_ranged_hash { };
585
586 /// Default value for rehash policy. Bucket size is (usually) the
587 /// smallest prime that keeps the load factor small enough.
588 struct _Prime_rehash_policy
589 {
590 using __has_load_factor = true_type;
591
592 _Prime_rehash_policy(float __z = 1.0) noexcept
593 : _M_max_load_factor(__z), _M_next_resize(0) { }
594
595 float
596 max_load_factor() const noexcept
597 { return _M_max_load_factor; }
598
599 // Return a bucket size no smaller than n.
600 std::size_t
601 _M_next_bkt(std::size_t __n) const;
602
603 // Return a bucket count appropriate for n elements
604 std::size_t
605 _M_bkt_for_elements(std::size_t __n) const
606 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
607
608 // __n_bkt is current bucket count, __n_elt is current element count,
609 // and __n_ins is number of elements to be inserted. Do we need to
610 // increase bucket count? If so, return make_pair(true, n), where n
611 // is the new bucket count. If not, return make_pair(false, 0).
613 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
614 std::size_t __n_ins) const;
615
616 typedef std::size_t _State;
617
618 _State
619 _M_state() const
620 { return _M_next_resize; }
621
622 void
623 _M_reset() noexcept
624 { _M_next_resize = 0; }
625
626 void
627 _M_reset(_State __state)
628 { _M_next_resize = __state; }
629
630 static const std::size_t _S_growth_factor = 2;
631
632 float _M_max_load_factor;
633 mutable std::size_t _M_next_resize;
634 };
635
636 /// Range hashing function assuming that second arg is a power of 2.
637 struct _Mask_range_hashing
638 {
639 typedef std::size_t first_argument_type;
640 typedef std::size_t second_argument_type;
641 typedef std::size_t result_type;
642
643 result_type
644 operator()(first_argument_type __num,
645 second_argument_type __den) const noexcept
646 { return __num & (__den - 1); }
647 };
648
649 /// Compute closest power of 2 not less than __n
650 inline std::size_t
651 __clp2(std::size_t __n) noexcept
652 {
654 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
655 if (__n < 2)
656 return __n;
657 const unsigned __lz = sizeof(size_t) > sizeof(long)
658 ? __builtin_clzll(__n - 1ull)
659 : __builtin_clzl(__n - 1ul);
660 // Doing two shifts avoids undefined behaviour when __lz == 0.
661 return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
662 }
663
664 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
665 /// operations.
666 struct _Power2_rehash_policy
667 {
668 using __has_load_factor = true_type;
669
670 _Power2_rehash_policy(float __z = 1.0) noexcept
671 : _M_max_load_factor(__z), _M_next_resize(0) { }
672
673 float
674 max_load_factor() const noexcept
675 { return _M_max_load_factor; }
676
677 // Return a bucket size no smaller than n (as long as n is not above the
678 // highest power of 2).
679 std::size_t
680 _M_next_bkt(std::size_t __n) noexcept
681 {
682 if (__n == 0)
683 // Special case on container 1st initialization with 0 bucket count
684 // hint. We keep _M_next_resize to 0 to make sure that next time we
685 // want to add an element allocation will take place.
686 return 1;
687
688 const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
689 const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
690 std::size_t __res = __clp2(__n);
691
692 if (__res == 0)
693 __res = __max_bkt;
694 else if (__res == 1)
695 // If __res is 1 we force it to 2 to make sure there will be an
696 // allocation so that nothing need to be stored in the initial
697 // single bucket
698 __res = 2;
699
700 if (__res == __max_bkt)
701 // Set next resize to the max value so that we never try to rehash again
702 // as we already reach the biggest possible bucket number.
703 // Note that it might result in max_load_factor not being respected.
704 _M_next_resize = size_t(-1);
705 else
706 _M_next_resize
707 = __builtin_floor(__res * (double)_M_max_load_factor);
708
709 return __res;
710 }
711
712 // Return a bucket count appropriate for n elements
713 std::size_t
714 _M_bkt_for_elements(std::size_t __n) const noexcept
715 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
716
717 // __n_bkt is current bucket count, __n_elt is current element count,
718 // and __n_ins is number of elements to be inserted. Do we need to
719 // increase bucket count? If so, return make_pair(true, n), where n
720 // is the new bucket count. If not, return make_pair(false, 0).
722 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
723 std::size_t __n_ins) noexcept
724 {
725 if (__n_elt + __n_ins > _M_next_resize)
726 {
727 // If _M_next_resize is 0 it means that we have nothing allocated so
728 // far and that we start inserting elements. In this case we start
729 // with an initial bucket size of 11.
730 double __min_bkts
731 = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
732 / (double)_M_max_load_factor;
733 if (__min_bkts >= __n_bkt)
734 return { true,
735 _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
736 __n_bkt * _S_growth_factor)) };
737
738 _M_next_resize
739 = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
740 return { false, 0 };
741 }
742 else
743 return { false, 0 };
744 }
745
746 typedef std::size_t _State;
747
748 _State
749 _M_state() const noexcept
750 { return _M_next_resize; }
751
752 void
753 _M_reset() noexcept
754 { _M_next_resize = 0; }
755
756 void
757 _M_reset(_State __state) noexcept
758 { _M_next_resize = __state; }
759
760 static const std::size_t _S_growth_factor = 2;
761
762 float _M_max_load_factor;
763 std::size_t _M_next_resize;
764 };
765
766 template<typename _RehashPolicy>
767 struct _RehashStateGuard
768 {
769 _RehashPolicy* _M_guarded_obj;
770 typename _RehashPolicy::_State _M_prev_state;
771
772 _RehashStateGuard(_RehashPolicy& __policy)
773 : _M_guarded_obj(std::__addressof(__policy))
774 , _M_prev_state(__policy._M_state())
775 { }
776 _RehashStateGuard(const _RehashStateGuard&) = delete;
777
778 ~_RehashStateGuard()
779 {
780 if (_M_guarded_obj)
781 _M_guarded_obj->_M_reset(_M_prev_state);
782 }
783 };
784
785 // Base classes for std::_Hashtable. We define these base classes
786 // because in some cases we want to do different things depending on
787 // the value of a policy class. In some cases the policy class
788 // affects which member functions and nested typedefs are defined;
789 // we handle that by specializing base class templates. Several of
790 // the base class templates need to access other members of class
791 // template _Hashtable, so we use a variant of the "Curiously
792 // Recurring Template Pattern" (CRTP) technique.
793
794 /**
795 * Primary class template _Map_base.
796 *
797 * If the hashtable has a value type of the form pair<const T1, T2> and
798 * a key extraction policy (_ExtractKey) that returns the first part
799 * of the pair, the hashtable gets a mapped_type typedef. If it
800 * satisfies those criteria and also has unique keys, then it also
801 * gets an operator[].
802 */
803 template<typename _Key, typename _Value, typename _Alloc,
804 typename _ExtractKey, typename _Equal,
805 typename _Hash, typename _RangeHash, typename _Unused,
806 typename _RehashPolicy, typename _Traits,
807 bool _Unique_keys = _Traits::__unique_keys::value>
808 struct _Map_base { };
809
810 /// Partial specialization, __unique_keys set to false, std::pair value type.
811 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
812 typename _Hash, typename _RangeHash, typename _Unused,
813 typename _RehashPolicy, typename _Traits>
814 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
815 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
816 {
817 using mapped_type = _Val;
818 };
819
820 /// Partial specialization, __unique_keys set to true.
821 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
822 typename _Hash, typename _RangeHash, typename _Unused,
823 typename _RehashPolicy, typename _Traits>
824 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
825 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
826 {
827 private:
828 using __hashtable_base = _Hashtable_base<_Key, pair<const _Key, _Val>,
829 _Select1st, _Equal, _Hash,
830 _RangeHash, _Unused,
831 _Traits>;
832
833 using __hashtable = _Hashtable<_Key, pair<const _Key, _Val>, _Alloc,
834 _Select1st, _Equal, _Hash, _RangeHash,
835 _Unused, _RehashPolicy, _Traits>;
836
837 using __hash_code = typename __hashtable_base::__hash_code;
838
839 public:
840 using key_type = typename __hashtable_base::key_type;
841 using mapped_type = _Val;
842
843 mapped_type&
844 operator[](const key_type& __k);
845
846 mapped_type&
847 operator[](key_type&& __k);
848
849 // _GLIBCXX_RESOLVE_LIB_DEFECTS
850 // DR 761. unordered_map needs an at() member function.
851 mapped_type&
852 at(const key_type& __k)
853 {
854 auto __ite = static_cast<__hashtable*>(this)->find(__k);
855 if (!__ite._M_cur)
856 __throw_out_of_range(__N("unordered_map::at"));
857 return __ite->second;
858 }
859
860 const mapped_type&
861 at(const key_type& __k) const
862 {
863 auto __ite = static_cast<const __hashtable*>(this)->find(__k);
864 if (!__ite._M_cur)
865 __throw_out_of_range(__N("unordered_map::at"));
866 return __ite->second;
867 }
868 };
869
870 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
871 typename _Hash, typename _RangeHash, typename _Unused,
872 typename _RehashPolicy, typename _Traits>
873 auto
874 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
875 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
876 operator[](const key_type& __k)
877 -> mapped_type&
878 {
879 __hashtable* __h = static_cast<__hashtable*>(this);
880 __hash_code __code = __h->_M_hash_code(__k);
881 std::size_t __bkt = __h->_M_bucket_index(__code);
882 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
883 return __node->_M_v().second;
884
885 typename __hashtable::_Scoped_node __node {
886 __h,
890 };
891 auto __pos
892 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
893 __node._M_node = nullptr;
894 return __pos->second;
895 }
896
897 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
898 typename _Hash, typename _RangeHash, typename _Unused,
899 typename _RehashPolicy, typename _Traits>
900 auto
901 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
902 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
903 operator[](key_type&& __k)
904 -> mapped_type&
905 {
906 __hashtable* __h = static_cast<__hashtable*>(this);
907 __hash_code __code = __h->_M_hash_code(__k);
908 std::size_t __bkt = __h->_M_bucket_index(__code);
909 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
910 return __node->_M_v().second;
911
912 typename __hashtable::_Scoped_node __node {
913 __h,
917 };
918 auto __pos
919 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
920 __node._M_node = nullptr;
921 return __pos->second;
922 }
923
924 // Partial specialization for unordered_map<const T, U>, see PR 104174.
925 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
926 typename _Hash, typename _RangeHash, typename _Unused,
927 typename _RehashPolicy, typename _Traits, bool __uniq>
928 struct _Map_base<const _Key, pair<const _Key, _Val>,
929 _Alloc, _Select1st, _Equal, _Hash,
930 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
931 : _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal, _Hash,
932 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
933 { };
934
935 /**
936 * Primary class template _Insert_base.
937 *
938 * Defines @c insert member functions appropriate to all _Hashtables.
939 */
940 template<typename _Key, typename _Value, typename _Alloc,
941 typename _ExtractKey, typename _Equal,
942 typename _Hash, typename _RangeHash, typename _Unused,
943 typename _RehashPolicy, typename _Traits>
944 struct _Insert_base
945 {
946 protected:
947 using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
948 _Equal, _Hash, _RangeHash,
949 _Unused, _Traits>;
950
951 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
952 _Hash, _RangeHash,
953 _Unused, _RehashPolicy, _Traits>;
954
955 using __hash_cached = typename _Traits::__hash_cached;
956 using __constant_iterators = typename _Traits::__constant_iterators;
957
958 using __hashtable_alloc = _Hashtable_alloc<
959 __alloc_rebind<_Alloc, _Hash_node<_Value,
960 __hash_cached::value>>>;
961
962 using value_type = typename __hashtable_base::value_type;
963 using size_type = typename __hashtable_base::size_type;
964
965 using __unique_keys = typename _Traits::__unique_keys;
966 using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
967 using __node_gen_type = _AllocNode<__node_alloc_type>;
968
969 __hashtable&
970 _M_conjure_hashtable()
971 { return *(static_cast<__hashtable*>(this)); }
972
973 template<typename _InputIterator, typename _NodeGetter>
974 void
975 _M_insert_range(_InputIterator __first, _InputIterator __last,
976 const _NodeGetter&, true_type __uks);
977
978 template<typename _InputIterator, typename _NodeGetter>
979 void
980 _M_insert_range(_InputIterator __first, _InputIterator __last,
981 const _NodeGetter&, false_type __uks);
982
983 public:
984 using iterator = _Node_iterator<_Value, __constant_iterators::value,
985 __hash_cached::value>;
986
987 using const_iterator = _Node_const_iterator<_Value,
988 __constant_iterators::value,
989 __hash_cached::value>;
990
991 using __ireturn_type = __conditional_t<__unique_keys::value,
993 iterator>;
994
995 __ireturn_type
996 insert(const value_type& __v)
997 {
998 __hashtable& __h = _M_conjure_hashtable();
999 __node_gen_type __node_gen(__h);
1000 return __h._M_insert(__v, __node_gen, __unique_keys{});
1001 }
1002
1003 iterator
1004 insert(const_iterator __hint, const value_type& __v)
1005 {
1006 __hashtable& __h = _M_conjure_hashtable();
1007 __node_gen_type __node_gen(__h);
1008 return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
1009 }
1010
1011#ifdef __glibcxx_unordered_map_try_emplace // C++ >= 17 && HOSTED
1012 template<typename _KType, typename... _Args>
1014 try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
1015 {
1016 __hashtable& __h = _M_conjure_hashtable();
1017 auto __code = __h._M_hash_code(__k);
1018 std::size_t __bkt = __h._M_bucket_index(__code);
1019 if (auto __node = __h._M_find_node(__bkt, __k, __code))
1020 return { iterator(__node), false };
1021
1022 typename __hashtable::_Scoped_node __node {
1023 &__h,
1027 };
1028 auto __it
1029 = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
1030 __node._M_node = nullptr;
1031 return { __it, true };
1032 }
1033#endif
1034
1035 void
1036 insert(initializer_list<value_type> __l)
1037 { this->insert(__l.begin(), __l.end()); }
1038
1039 template<typename _InputIterator>
1040 void
1041 insert(_InputIterator __first, _InputIterator __last)
1042 {
1043 __hashtable& __h = _M_conjure_hashtable();
1044 __node_gen_type __node_gen(__h);
1045 return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
1046 }
1047 };
1048
1049 template<typename _Key, typename _Value, typename _Alloc,
1050 typename _ExtractKey, typename _Equal,
1051 typename _Hash, typename _RangeHash, typename _Unused,
1052 typename _RehashPolicy, typename _Traits>
1053 template<typename _InputIterator, typename _NodeGetter>
1054 void
1055 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1056 _Hash, _RangeHash, _Unused,
1057 _RehashPolicy, _Traits>::
1058 _M_insert_range(_InputIterator __first, _InputIterator __last,
1059 const _NodeGetter& __node_gen, true_type __uks)
1060 {
1061 __hashtable& __h = _M_conjure_hashtable();
1062 for (; __first != __last; ++__first)
1063 __h._M_insert(*__first, __node_gen, __uks);
1064 }
1065
1066 template<typename _Key, typename _Value, typename _Alloc,
1067 typename _ExtractKey, typename _Equal,
1068 typename _Hash, typename _RangeHash, typename _Unused,
1069 typename _RehashPolicy, typename _Traits>
1070 template<typename _InputIterator, typename _NodeGetter>
1071 void
1072 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1073 _Hash, _RangeHash, _Unused,
1074 _RehashPolicy, _Traits>::
1075 _M_insert_range(_InputIterator __first, _InputIterator __last,
1076 const _NodeGetter& __node_gen, false_type __uks)
1077 {
1078 using __rehash_guard_t = typename __hashtable::__rehash_guard_t;
1079 using __pair_type = std::pair<bool, std::size_t>;
1080
1081 size_type __n_elt = __detail::__distance_fw(__first, __last);
1082 if (__n_elt == 0)
1083 return;
1084
1085 __hashtable& __h = _M_conjure_hashtable();
1086 __rehash_guard_t __rehash_guard(__h._M_rehash_policy);
1087 __pair_type __do_rehash
1088 = __h._M_rehash_policy._M_need_rehash(__h._M_bucket_count,
1089 __h._M_element_count,
1090 __n_elt);
1091
1092 if (__do_rehash.first)
1093 __h._M_rehash(__do_rehash.second, __uks);
1094
1095 __rehash_guard._M_guarded_obj = nullptr;
1096 for (; __first != __last; ++__first)
1097 __h._M_insert(*__first, __node_gen, __uks);
1098 }
1099
1100 /**
1101 * Primary class template _Insert.
1102 *
1103 * Defines @c insert member functions that depend on _Hashtable policies,
1104 * via partial specializations.
1105 */
1106 template<typename _Key, typename _Value, typename _Alloc,
1107 typename _ExtractKey, typename _Equal,
1108 typename _Hash, typename _RangeHash, typename _Unused,
1109 typename _RehashPolicy, typename _Traits,
1110 bool _Constant_iterators = _Traits::__constant_iterators::value>
1111 struct _Insert;
1112
1113 /// Specialization.
1114 template<typename _Key, typename _Value, typename _Alloc,
1115 typename _ExtractKey, typename _Equal,
1116 typename _Hash, typename _RangeHash, typename _Unused,
1117 typename _RehashPolicy, typename _Traits>
1118 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1119 _Hash, _RangeHash, _Unused,
1120 _RehashPolicy, _Traits, true>
1121 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1122 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1123 {
1124 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1125 _Equal, _Hash, _RangeHash, _Unused,
1126 _RehashPolicy, _Traits>;
1127
1128 using value_type = typename __base_type::value_type;
1129 using iterator = typename __base_type::iterator;
1130 using const_iterator = typename __base_type::const_iterator;
1131 using __ireturn_type = typename __base_type::__ireturn_type;
1132
1133 using __unique_keys = typename __base_type::__unique_keys;
1134 using __hashtable = typename __base_type::__hashtable;
1135 using __node_gen_type = typename __base_type::__node_gen_type;
1136
1137 using __base_type::insert;
1138
1139 __ireturn_type
1140 insert(value_type&& __v)
1141 {
1142 __hashtable& __h = this->_M_conjure_hashtable();
1143 __node_gen_type __node_gen(__h);
1144 return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
1145 }
1146
1147 iterator
1148 insert(const_iterator __hint, value_type&& __v)
1149 {
1150 __hashtable& __h = this->_M_conjure_hashtable();
1151 __node_gen_type __node_gen(__h);
1152 return __h._M_insert(__hint, std::move(__v), __node_gen,
1153 __unique_keys{});
1154 }
1155 };
1156
1157 /// Specialization.
1158 template<typename _Key, typename _Value, typename _Alloc,
1159 typename _ExtractKey, typename _Equal,
1160 typename _Hash, typename _RangeHash, typename _Unused,
1161 typename _RehashPolicy, typename _Traits>
1162 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1163 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1164 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1165 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1166 {
1167 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1168 _Equal, _Hash, _RangeHash, _Unused,
1169 _RehashPolicy, _Traits>;
1170 using value_type = typename __base_type::value_type;
1171 using iterator = typename __base_type::iterator;
1172 using const_iterator = typename __base_type::const_iterator;
1173
1174 using __unique_keys = typename __base_type::__unique_keys;
1175 using __hashtable = typename __base_type::__hashtable;
1176 using __ireturn_type = typename __base_type::__ireturn_type;
1177
1178 using __base_type::insert;
1179
1180 template<typename _Pair>
1182
1183 template<typename _Pair>
1185
1186 template<typename _Pair>
1187 using _IFconsp = typename _IFcons<_Pair>::type;
1188
1189 template<typename _Pair, typename = _IFconsp<_Pair>>
1190 __ireturn_type
1191 insert(_Pair&& __v)
1192 {
1193 __hashtable& __h = this->_M_conjure_hashtable();
1194 return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1195 }
1196
1197 template<typename _Pair, typename = _IFconsp<_Pair>>
1198 iterator
1199 insert(const_iterator __hint, _Pair&& __v)
1200 {
1201 __hashtable& __h = this->_M_conjure_hashtable();
1202 return __h._M_emplace(__hint, __unique_keys{},
1203 std::forward<_Pair>(__v));
1204 }
1205 };
1206
1207 template<typename _Policy>
1208 using __has_load_factor = typename _Policy::__has_load_factor;
1209
1210 /**
1211 * Primary class template _Rehash_base.
1212 *
1213 * Give hashtable the max_load_factor functions and reserve iff the
1214 * rehash policy supports it.
1215 */
1216 template<typename _Key, typename _Value, typename _Alloc,
1217 typename _ExtractKey, typename _Equal,
1218 typename _Hash, typename _RangeHash, typename _Unused,
1219 typename _RehashPolicy, typename _Traits,
1220 typename =
1221 __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1222 struct _Rehash_base;
1223
1224 /// Specialization when rehash policy doesn't provide load factor management.
1225 template<typename _Key, typename _Value, typename _Alloc,
1226 typename _ExtractKey, typename _Equal,
1227 typename _Hash, typename _RangeHash, typename _Unused,
1228 typename _RehashPolicy, typename _Traits>
1229 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1230 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1231 false_type /* Has load factor */>
1232 {
1233 };
1234
1235 /// Specialization when rehash policy provide load factor management.
1236 template<typename _Key, typename _Value, typename _Alloc,
1237 typename _ExtractKey, typename _Equal,
1238 typename _Hash, typename _RangeHash, typename _Unused,
1239 typename _RehashPolicy, typename _Traits>
1240 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1241 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1242 true_type /* Has load factor */>
1243 {
1244 private:
1245 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1246 _Equal, _Hash, _RangeHash, _Unused,
1247 _RehashPolicy, _Traits>;
1248
1249 public:
1250 float
1251 max_load_factor() const noexcept
1252 {
1253 const __hashtable* __this = static_cast<const __hashtable*>(this);
1254 return __this->__rehash_policy().max_load_factor();
1255 }
1256
1257 void
1258 max_load_factor(float __z)
1259 {
1260 __hashtable* __this = static_cast<__hashtable*>(this);
1261 __this->__rehash_policy(_RehashPolicy(__z));
1262 }
1263
1264 void
1265 reserve(std::size_t __n)
1266 {
1267 __hashtable* __this = static_cast<__hashtable*>(this);
1268 __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1269 }
1270 };
1271
1272 /**
1273 * Primary class template _Hashtable_ebo_helper.
1274 *
1275 * Helper class using EBO when it is not forbidden (the type is not
1276 * final) and when it is worth it (the type is empty.)
1277 */
1278 template<int _Nm, typename _Tp,
1279 bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1280 struct _Hashtable_ebo_helper;
1281
1282 /// Specialization using EBO.
1283 template<int _Nm, typename _Tp>
1284 struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1285 : private _Tp
1286 {
1287 _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
1288
1289 template<typename _OtherTp>
1290 _Hashtable_ebo_helper(_OtherTp&& __tp)
1291 : _Tp(std::forward<_OtherTp>(__tp))
1292 { }
1293
1294 const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1295 _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1296 };
1297
1298 /// Specialization not using EBO.
1299 template<int _Nm, typename _Tp>
1300 struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1301 {
1302 _Hashtable_ebo_helper() = default;
1303
1304 template<typename _OtherTp>
1305 _Hashtable_ebo_helper(_OtherTp&& __tp)
1306 : _M_tp(std::forward<_OtherTp>(__tp))
1307 { }
1308
1309 const _Tp& _M_cget() const { return _M_tp; }
1310 _Tp& _M_get() { return _M_tp; }
1311
1312 private:
1313 _Tp _M_tp{};
1314 };
1315
1316 /**
1317 * Primary class template _Local_iterator_base.
1318 *
1319 * Base class for local iterators, used to iterate within a bucket
1320 * but not between buckets.
1321 */
1322 template<typename _Key, typename _Value, typename _ExtractKey,
1323 typename _Hash, typename _RangeHash, typename _Unused,
1324 bool __cache_hash_code>
1325 struct _Local_iterator_base;
1326
1327 /**
1328 * Primary class template _Hash_code_base.
1329 *
1330 * Encapsulates two policy issues that aren't quite orthogonal.
1331 * (1) the difference between using a ranged hash function and using
1332 * the combination of a hash function and a range-hashing function.
1333 * In the former case we don't have such things as hash codes, so
1334 * we have a dummy type as placeholder.
1335 * (2) Whether or not we cache hash codes. Caching hash codes is
1336 * meaningless if we have a ranged hash function.
1337 *
1338 * We also put the key extraction objects here, for convenience.
1339 * Each specialization derives from one or more of the template
1340 * parameters to benefit from Ebo. This is important as this type
1341 * is inherited in some cases by the _Local_iterator_base type used
1342 * to implement local_iterator and const_local_iterator. As with
1343 * any iterator type we prefer to make it as small as possible.
1344 */
1345 template<typename _Key, typename _Value, typename _ExtractKey,
1346 typename _Hash, typename _RangeHash, typename _Unused,
1347 bool __cache_hash_code>
1348 struct _Hash_code_base
1349 : private _Hashtable_ebo_helper<1, _Hash>
1350 {
1351 private:
1352 using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
1353
1354 // Gives the local iterator implementation access to _M_bucket_index().
1355 friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1356 _Hash, _RangeHash, _Unused, false>;
1357
1358 public:
1359 typedef _Hash hasher;
1360
1361 hasher
1362 hash_function() const
1363 { return _M_hash(); }
1364
1365 protected:
1366 typedef std::size_t __hash_code;
1367
1368 // We need the default constructor for the local iterators and _Hashtable
1369 // default constructor.
1370 _Hash_code_base() = default;
1371
1372 _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1373
1374 __hash_code
1375 _M_hash_code(const _Key& __k) const
1376 {
1377 static_assert(__is_invocable<const _Hash&, const _Key&>{},
1378 "hash function must be invocable with an argument of key type");
1379 return _M_hash()(__k);
1380 }
1381
1382 template<typename _Kt>
1383 __hash_code
1384 _M_hash_code_tr(const _Kt& __k) const
1385 {
1386 static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1387 "hash function must be invocable with an argument of key type");
1388 return _M_hash()(__k);
1389 }
1390
1391 __hash_code
1392 _M_hash_code(const _Hash_node_value<_Value, false>& __n) const
1393 { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
1394
1395 __hash_code
1396 _M_hash_code(const _Hash_node_value<_Value, true>& __n) const
1397 { return __n._M_hash_code; }
1398
1399 std::size_t
1400 _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1401 { return _RangeHash{}(__c, __bkt_count); }
1402
1403 std::size_t
1404 _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1405 std::size_t __bkt_count) const
1406 noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1407 && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1408 (std::size_t)0)) )
1409 {
1410 return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1411 __bkt_count);
1412 }
1413
1414 std::size_t
1415 _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1416 std::size_t __bkt_count) const
1417 noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1418 (std::size_t)0)) )
1419 { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1420
1421 void
1422 _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1423 { }
1424
1425 void
1426 _M_copy_code(_Hash_node_code_cache<false>&,
1427 const _Hash_node_code_cache<false>&) const
1428 { }
1429
1430 void
1431 _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1432 { __n._M_hash_code = __c; }
1433
1434 void
1435 _M_copy_code(_Hash_node_code_cache<true>& __to,
1436 const _Hash_node_code_cache<true>& __from) const
1437 { __to._M_hash_code = __from._M_hash_code; }
1438
1439 void
1440 _M_swap(_Hash_code_base& __x)
1441 {
1442 using std::swap;
1443 swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get());
1444 }
1445
1446 const _Hash&
1447 _M_hash() const { return __ebo_hash::_M_cget(); }
1448 };
1449
1450 /// Partial specialization used when nodes contain a cached hash code.
1451 template<typename _Key, typename _Value, typename _ExtractKey,
1452 typename _Hash, typename _RangeHash, typename _Unused>
1453 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1454 _Hash, _RangeHash, _Unused, true>
1455 : public _Node_iterator_base<_Value, true>
1456 {
1457 protected:
1458 using __base_node_iter = _Node_iterator_base<_Value, true>;
1459 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1460 _Hash, _RangeHash, _Unused, true>;
1461
1462 _Local_iterator_base() = default;
1463 _Local_iterator_base(const __hash_code_base&,
1464 _Hash_node<_Value, true>* __p,
1465 std::size_t __bkt, std::size_t __bkt_count)
1466 : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1467 { }
1468
1469 void
1470 _M_incr()
1471 {
1472 __base_node_iter::_M_incr();
1473 if (this->_M_cur)
1474 {
1475 std::size_t __bkt
1476 = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1477 if (__bkt != _M_bucket)
1478 this->_M_cur = nullptr;
1479 }
1480 }
1481
1482 std::size_t _M_bucket;
1483 std::size_t _M_bucket_count;
1484
1485 public:
1486 std::size_t
1487 _M_get_bucket() const { return _M_bucket; } // for debug mode
1488 };
1489
1490 // Uninitialized storage for a _Hash_code_base.
1491 // This type is DefaultConstructible and Assignable even if the
1492 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1493 // can be DefaultConstructible and Assignable.
1494 template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1495 struct _Hash_code_storage
1496 {
1497 __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1498
1499 _Tp*
1500 _M_h() { return _M_storage._M_ptr(); }
1501
1502 const _Tp*
1503 _M_h() const { return _M_storage._M_ptr(); }
1504 };
1505
1506 // Empty partial specialization for empty _Hash_code_base types.
1507 template<typename _Tp>
1508 struct _Hash_code_storage<_Tp, true>
1509 {
1510 static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1511
1512 // As _Tp is an empty type there will be no bytes written/read through
1513 // the cast pointer, so no strict-aliasing violation.
1514 _Tp*
1515 _M_h() { return reinterpret_cast<_Tp*>(this); }
1516
1517 const _Tp*
1518 _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1519 };
1520
1521 template<typename _Key, typename _Value, typename _ExtractKey,
1522 typename _Hash, typename _RangeHash, typename _Unused>
1523 using __hash_code_for_local_iter
1524 = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1525 _Hash, _RangeHash, _Unused, false>>;
1526
1527 // Partial specialization used when hash codes are not cached
1528 template<typename _Key, typename _Value, typename _ExtractKey,
1529 typename _Hash, typename _RangeHash, typename _Unused>
1530 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1531 _Hash, _RangeHash, _Unused, false>
1532 : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1533 _Unused>
1534 , _Node_iterator_base<_Value, false>
1535 {
1536 protected:
1537 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1538 _Hash, _RangeHash, _Unused, false>;
1539 using __node_iter_base = _Node_iterator_base<_Value, false>;
1540
1541 _Local_iterator_base() : _M_bucket_count(-1) { }
1542
1543 _Local_iterator_base(const __hash_code_base& __base,
1544 _Hash_node<_Value, false>* __p,
1545 std::size_t __bkt, std::size_t __bkt_count)
1546 : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1547 { _M_init(__base); }
1548
1549 ~_Local_iterator_base()
1550 {
1551 if (_M_bucket_count != size_t(-1))
1552 _M_destroy();
1553 }
1554
1555 _Local_iterator_base(const _Local_iterator_base& __iter)
1556 : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1557 , _M_bucket_count(__iter._M_bucket_count)
1558 {
1559 if (_M_bucket_count != size_t(-1))
1560 _M_init(*__iter._M_h());
1561 }
1562
1563 _Local_iterator_base&
1564 operator=(const _Local_iterator_base& __iter)
1565 {
1566 if (_M_bucket_count != -1)
1567 _M_destroy();
1568 this->_M_cur = __iter._M_cur;
1569 _M_bucket = __iter._M_bucket;
1570 _M_bucket_count = __iter._M_bucket_count;
1571 if (_M_bucket_count != -1)
1572 _M_init(*__iter._M_h());
1573 return *this;
1574 }
1575
1576 void
1577 _M_incr()
1578 {
1579 __node_iter_base::_M_incr();
1580 if (this->_M_cur)
1581 {
1582 std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1583 _M_bucket_count);
1584 if (__bkt != _M_bucket)
1585 this->_M_cur = nullptr;
1586 }
1587 }
1588
1589 std::size_t _M_bucket;
1590 std::size_t _M_bucket_count;
1591
1592 void
1593 _M_init(const __hash_code_base& __base)
1594 { ::new(this->_M_h()) __hash_code_base(__base); }
1595
1596 void
1597 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1598
1599 public:
1600 std::size_t
1601 _M_get_bucket() const { return _M_bucket; } // for debug mode
1602 };
1603
1604 /// local iterators
1605 template<typename _Key, typename _Value, typename _ExtractKey,
1606 typename _Hash, typename _RangeHash, typename _Unused,
1607 bool __constant_iterators, bool __cache>
1608 struct _Local_iterator
1609 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1610 _Hash, _RangeHash, _Unused, __cache>
1611 {
1612 private:
1613 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1614 _Hash, _RangeHash, _Unused, __cache>;
1615 using __hash_code_base = typename __base_type::__hash_code_base;
1616
1617 public:
1618 using value_type = _Value;
1619 using pointer = __conditional_t<__constant_iterators,
1620 const value_type*, value_type*>;
1621 using reference = __conditional_t<__constant_iterators,
1622 const value_type&, value_type&>;
1623 using difference_type = ptrdiff_t;
1624 using iterator_category = forward_iterator_tag;
1625
1626 _Local_iterator() = default;
1627
1628 _Local_iterator(const __hash_code_base& __base,
1629 _Hash_node<_Value, __cache>* __n,
1630 std::size_t __bkt, std::size_t __bkt_count)
1631 : __base_type(__base, __n, __bkt, __bkt_count)
1632 { }
1633
1634 reference
1635 operator*() const
1636 { return this->_M_cur->_M_v(); }
1637
1638 pointer
1639 operator->() const
1640 { return this->_M_cur->_M_valptr(); }
1641
1642 _Local_iterator&
1643 operator++()
1644 {
1645 this->_M_incr();
1646 return *this;
1647 }
1648
1649 _Local_iterator
1650 operator++(int)
1651 {
1652 _Local_iterator __tmp(*this);
1653 this->_M_incr();
1654 return __tmp;
1655 }
1656 };
1657
1658 /// local const_iterators
1659 template<typename _Key, typename _Value, typename _ExtractKey,
1660 typename _Hash, typename _RangeHash, typename _Unused,
1661 bool __constant_iterators, bool __cache>
1662 struct _Local_const_iterator
1663 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1664 _Hash, _RangeHash, _Unused, __cache>
1665 {
1666 private:
1667 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1668 _Hash, _RangeHash, _Unused, __cache>;
1669 using __hash_code_base = typename __base_type::__hash_code_base;
1670
1671 public:
1672 typedef _Value value_type;
1673 typedef const value_type* pointer;
1674 typedef const value_type& reference;
1675 typedef std::ptrdiff_t difference_type;
1676 typedef std::forward_iterator_tag iterator_category;
1677
1678 _Local_const_iterator() = default;
1679
1680 _Local_const_iterator(const __hash_code_base& __base,
1681 _Hash_node<_Value, __cache>* __n,
1682 std::size_t __bkt, std::size_t __bkt_count)
1683 : __base_type(__base, __n, __bkt, __bkt_count)
1684 { }
1685
1686 _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1687 _Hash, _RangeHash, _Unused,
1688 __constant_iterators,
1689 __cache>& __x)
1690 : __base_type(__x)
1691 { }
1692
1693 reference
1694 operator*() const
1695 { return this->_M_cur->_M_v(); }
1696
1697 pointer
1698 operator->() const
1699 { return this->_M_cur->_M_valptr(); }
1700
1701 _Local_const_iterator&
1702 operator++()
1703 {
1704 this->_M_incr();
1705 return *this;
1706 }
1707
1708 _Local_const_iterator
1709 operator++(int)
1710 {
1711 _Local_const_iterator __tmp(*this);
1712 this->_M_incr();
1713 return __tmp;
1714 }
1715 };
1716
1717 /**
1718 * Primary class template _Hashtable_base.
1719 *
1720 * Helper class adding management of _Equal functor to
1721 * _Hash_code_base type.
1722 *
1723 * Base class templates are:
1724 * - __detail::_Hash_code_base
1725 * - __detail::_Hashtable_ebo_helper
1726 */
1727 template<typename _Key, typename _Value, typename _ExtractKey,
1728 typename _Equal, typename _Hash, typename _RangeHash,
1729 typename _Unused, typename _Traits>
1730 struct _Hashtable_base
1731 : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1732 _Unused, _Traits::__hash_cached::value>,
1733 private _Hashtable_ebo_helper<0, _Equal>
1734 {
1735 public:
1736 typedef _Key key_type;
1737 typedef _Value value_type;
1738 typedef _Equal key_equal;
1739 typedef std::size_t size_type;
1740 typedef std::ptrdiff_t difference_type;
1741
1742 using __traits_type = _Traits;
1743 using __hash_cached = typename __traits_type::__hash_cached;
1744
1745 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1746 _Hash, _RangeHash, _Unused,
1747 __hash_cached::value>;
1748
1749 using __hash_code = typename __hash_code_base::__hash_code;
1750
1751 private:
1752 using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
1753
1754 static bool
1755 _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1756 { return true; }
1757
1758 static bool
1759 _S_node_equals(const _Hash_node_code_cache<false>&,
1760 const _Hash_node_code_cache<false>&)
1761 { return true; }
1762
1763 static bool
1764 _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1765 { return __c == __n._M_hash_code; }
1766
1767 static bool
1768 _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1769 const _Hash_node_code_cache<true>& __rhn)
1770 { return __lhn._M_hash_code == __rhn._M_hash_code; }
1771
1772 protected:
1773 _Hashtable_base() = default;
1774
1775 _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1776 : __hash_code_base(__hash), _EqualEBO(__eq)
1777 { }
1778
1779 bool
1780 _M_key_equals(const _Key& __k,
1781 const _Hash_node_value<_Value,
1782 __hash_cached::value>& __n) const
1783 {
1784 static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1785 "key equality predicate must be invocable with two arguments of "
1786 "key type");
1787 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1788 }
1789
1790 template<typename _Kt>
1791 bool
1792 _M_key_equals_tr(const _Kt& __k,
1793 const _Hash_node_value<_Value,
1794 __hash_cached::value>& __n) const
1795 {
1796 static_assert(
1797 __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1798 "key equality predicate must be invocable with two arguments of "
1799 "key type");
1800 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1801 }
1802
1803 bool
1804 _M_equals(const _Key& __k, __hash_code __c,
1805 const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1806 { return _S_equals(__c, __n) && _M_key_equals(__k, __n); }
1807
1808 template<typename _Kt>
1809 bool
1810 _M_equals_tr(const _Kt& __k, __hash_code __c,
1811 const _Hash_node_value<_Value,
1812 __hash_cached::value>& __n) const
1813 { return _S_equals(__c, __n) && _M_key_equals_tr(__k, __n); }
1814
1815 bool
1816 _M_node_equals(
1817 const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1818 const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1819 {
1820 return _S_node_equals(__lhn, __rhn)
1821 && _M_key_equals(_ExtractKey{}(__lhn._M_v()), __rhn);
1822 }
1823
1824 void
1825 _M_swap(_Hashtable_base& __x)
1826 {
1827 __hash_code_base::_M_swap(__x);
1828 using std::swap;
1829 swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1830 }
1831
1832 const _Equal&
1833 _M_eq() const { return _EqualEBO::_M_cget(); }
1834 };
1835
1836 /**
1837 * Primary class template _Equality.
1838 *
1839 * This is for implementing equality comparison for unordered
1840 * containers, per N3068, by John Lakos and Pablo Halpern.
1841 * Algorithmically, we follow closely the reference implementations
1842 * therein.
1843 */
1844 template<typename _Key, typename _Value, typename _Alloc,
1845 typename _ExtractKey, typename _Equal,
1846 typename _Hash, typename _RangeHash, typename _Unused,
1847 typename _RehashPolicy, typename _Traits,
1848 bool _Unique_keys = _Traits::__unique_keys::value>
1849 struct _Equality;
1850
1851 /// unordered_map and unordered_set specializations.
1852 template<typename _Key, typename _Value, typename _Alloc,
1853 typename _ExtractKey, typename _Equal,
1854 typename _Hash, typename _RangeHash, typename _Unused,
1855 typename _RehashPolicy, typename _Traits>
1856 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1857 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1858 {
1859 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1860 _Hash, _RangeHash, _Unused,
1861 _RehashPolicy, _Traits>;
1862
1863 bool
1864 _M_equal(const __hashtable&) const;
1865 };
1866
1867 template<typename _Key, typename _Value, typename _Alloc,
1868 typename _ExtractKey, typename _Equal,
1869 typename _Hash, typename _RangeHash, typename _Unused,
1870 typename _RehashPolicy, typename _Traits>
1871 bool
1872 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1873 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1874 _M_equal(const __hashtable& __other) const
1875 {
1876 using __node_ptr = typename __hashtable::__node_ptr;
1877 const __hashtable* __this = static_cast<const __hashtable*>(this);
1878 if (__this->size() != __other.size())
1879 return false;
1880
1881 for (auto __x_n = __this->_M_begin(); __x_n; __x_n = __x_n->_M_next())
1882 {
1883 std::size_t __ybkt = __other._M_bucket_index(*__x_n);
1884 auto __prev_n = __other._M_buckets[__ybkt];
1885 if (!__prev_n)
1886 return false;
1887
1888 for (__node_ptr __n = static_cast<__node_ptr>(__prev_n->_M_nxt);;
1889 __n = __n->_M_next())
1890 {
1891 if (__n->_M_v() == __x_n->_M_v())
1892 break;
1893
1894 if (!__n->_M_nxt
1895 || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1896 return false;
1897 }
1898 }
1899
1900 return true;
1901 }
1902
1903 /// unordered_multiset and unordered_multimap specializations.
1904 template<typename _Key, typename _Value, typename _Alloc,
1905 typename _ExtractKey, typename _Equal,
1906 typename _Hash, typename _RangeHash, typename _Unused,
1907 typename _RehashPolicy, typename _Traits>
1908 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1909 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1910 {
1911 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1912 _Hash, _RangeHash, _Unused,
1913 _RehashPolicy, _Traits>;
1914
1915 bool
1916 _M_equal(const __hashtable&) const;
1917 };
1918
1919 template<typename _Key, typename _Value, typename _Alloc,
1920 typename _ExtractKey, typename _Equal,
1921 typename _Hash, typename _RangeHash, typename _Unused,
1922 typename _RehashPolicy, typename _Traits>
1923 bool
1924 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1925 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1926 _M_equal(const __hashtable& __other) const
1927 {
1928 using __node_ptr = typename __hashtable::__node_ptr;
1929 using const_iterator = typename __hashtable::const_iterator;
1930 const __hashtable* __this = static_cast<const __hashtable*>(this);
1931 if (__this->size() != __other.size())
1932 return false;
1933
1934 for (auto __x_n = __this->_M_begin(); __x_n;)
1935 {
1936 std::size_t __x_count = 1;
1937 auto __x_n_end = __x_n->_M_next();
1938 for (; __x_n_end
1939 && __this->key_eq()(_ExtractKey{}(__x_n->_M_v()),
1940 _ExtractKey{}(__x_n_end->_M_v()));
1941 __x_n_end = __x_n_end->_M_next())
1942 ++__x_count;
1943
1944 std::size_t __ybkt = __other._M_bucket_index(*__x_n);
1945 auto __y_prev_n = __other._M_buckets[__ybkt];
1946 if (!__y_prev_n)
1947 return false;
1948
1949 __node_ptr __y_n = static_cast<__node_ptr>(__y_prev_n->_M_nxt);
1950 for (;;)
1951 {
1952 if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1953 _ExtractKey{}(__x_n->_M_v())))
1954 break;
1955
1956 auto __y_ref_n = __y_n;
1957 for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1958 if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1959 break;
1960
1961 if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1962 return false;
1963 }
1964
1965 auto __y_n_end = __y_n;
1966 for (; __y_n_end; __y_n_end = __y_n_end->_M_next())
1967 if (--__x_count == 0)
1968 break;
1969
1970 if (__x_count != 0)
1971 return false;
1972
1973 const_iterator __itx(__x_n), __itx_end(__x_n_end);
1974 const_iterator __ity(__y_n);
1975 if (!std::is_permutation(__itx, __itx_end, __ity))
1976 return false;
1977
1978 __x_n = __x_n_end;
1979 }
1980 return true;
1981 }
1982
1983 /**
1984 * This type deals with all allocation and keeps an allocator instance
1985 * through inheritance to benefit from EBO when possible.
1986 */
1987 template<typename _NodeAlloc>
1988 struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1989 {
1990 private:
1991 using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
1992
1993 template<typename>
1994 struct __get_value_type;
1995 template<typename _Val, bool _Cache_hash_code>
1996 struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
1997 { using type = _Val; };
1998
1999 public:
2000 using __node_type = typename _NodeAlloc::value_type;
2001 using __node_alloc_type = _NodeAlloc;
2002 // Use __gnu_cxx to benefit from _S_always_equal and al.
2003 using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
2004
2005 using __value_alloc_traits = typename __node_alloc_traits::template
2006 rebind_traits<typename __get_value_type<__node_type>::type>;
2007
2008 using __node_ptr = __node_type*;
2009 using __node_base = _Hash_node_base;
2010 using __node_base_ptr = __node_base*;
2011 using __buckets_alloc_type =
2012 __alloc_rebind<__node_alloc_type, __node_base_ptr>;
2013 using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
2014 using __buckets_ptr = __node_base_ptr*;
2015
2016 _Hashtable_alloc() = default;
2017 _Hashtable_alloc(const _Hashtable_alloc&) = default;
2018 _Hashtable_alloc(_Hashtable_alloc&&) = default;
2019
2020 template<typename _Alloc>
2021 _Hashtable_alloc(_Alloc&& __a)
2022 : __ebo_node_alloc(std::forward<_Alloc>(__a))
2023 { }
2024
2025 __node_alloc_type&
2026 _M_node_allocator()
2027 { return __ebo_node_alloc::_M_get(); }
2028
2029 const __node_alloc_type&
2030 _M_node_allocator() const
2031 { return __ebo_node_alloc::_M_cget(); }
2032
2033 // Allocate a node and construct an element within it.
2034 template<typename... _Args>
2035 __node_ptr
2036 _M_allocate_node(_Args&&... __args);
2037
2038 // Destroy the element within a node and deallocate the node.
2039 void
2040 _M_deallocate_node(__node_ptr __n);
2041
2042 // Deallocate a node.
2043 void
2044 _M_deallocate_node_ptr(__node_ptr __n);
2045
2046 // Deallocate the linked list of nodes pointed to by __n.
2047 // The elements within the nodes are destroyed.
2048 void
2049 _M_deallocate_nodes(__node_ptr __n);
2050
2051 __buckets_ptr
2052 _M_allocate_buckets(std::size_t __bkt_count);
2053
2054 void
2055 _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
2056 };
2057
2058 // Definitions of class template _Hashtable_alloc's out-of-line member
2059 // functions.
2060 template<typename _NodeAlloc>
2061 template<typename... _Args>
2062 auto
2063 _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
2064 -> __node_ptr
2065 {
2066 auto& __alloc = _M_node_allocator();
2067 auto __nptr = __node_alloc_traits::allocate(__alloc, 1);
2068 __node_ptr __n = std::__to_address(__nptr);
2069 __try
2070 {
2071 ::new ((void*)__n) __node_type;
2072 __node_alloc_traits::construct(__alloc, __n->_M_valptr(),
2073 std::forward<_Args>(__args)...);
2074 return __n;
2075 }
2076 __catch(...)
2077 {
2078 __n->~__node_type();
2079 __node_alloc_traits::deallocate(__alloc, __nptr, 1);
2080 __throw_exception_again;
2081 }
2082 }
2083
2084 template<typename _NodeAlloc>
2085 void
2086 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
2087 {
2088 __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
2089 _M_deallocate_node_ptr(__n);
2090 }
2091
2092 template<typename _NodeAlloc>
2093 void
2094 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
2095 {
2096 typedef typename __node_alloc_traits::pointer _Ptr;
2097 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
2098 __n->~__node_type();
2099 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
2100 }
2101
2102 template<typename _NodeAlloc>
2103 void
2104 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
2105 {
2106 while (__n)
2107 {
2108 __node_ptr __tmp = __n;
2109 __n = __n->_M_next();
2110 _M_deallocate_node(__tmp);
2111 }
2112 }
2113
2114 template<typename _NodeAlloc>
2115 auto
2116 _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
2117 -> __buckets_ptr
2118 {
2119 __buckets_alloc_type __alloc(_M_node_allocator());
2120
2121 auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
2122 __buckets_ptr __p = std::__to_address(__ptr);
2123 __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
2124 return __p;
2125 }
2126
2127 template<typename _NodeAlloc>
2128 void
2129 _Hashtable_alloc<_NodeAlloc>::
2130 _M_deallocate_buckets(__buckets_ptr __bkts,
2131 std::size_t __bkt_count)
2132 {
2133 typedef typename __buckets_alloc_traits::pointer _Ptr;
2134 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
2135 __buckets_alloc_type __alloc(_M_node_allocator());
2136 __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
2137 }
2138
2139 ///@} hashtable-detail
2140} // namespace __detail
2141/// @endcond
2142_GLIBCXX_END_NAMESPACE_VERSION
2143} // namespace std
2144
2145#endif // _HASHTABLE_POLICY_H
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition complex:400
__bool_constant< true > true_type
The type used as a compile-time boolean with true value.
Definition type_traits:111
__bool_constant< false > false_type
The type used as a compile-time boolean with false value.
Definition type_traits:114
constexpr tuple< _Elements &&... > forward_as_tuple(_Elements &&... __args) noexcept
Create a tuple of lvalue or rvalue references to the arguments.
Definition tuple:2654
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition move.h:137
constexpr piecewise_construct_t piecewise_construct
Tag for piecewise construction of std::pair objects.
Definition stl_pair.h:82
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition move.h:51
constexpr _Tp && forward(typename std::remove_reference< _Tp >::type &__t) noexcept
Forward an lvalue.
Definition move.h:71
constexpr iterator_traits< _Iter >::iterator_category __iterator_category(const _Iter &)
ISO C++ entities toplevel namespace is std.
constexpr iterator_traits< _InputIterator >::difference_type distance(_InputIterator __first, _InputIterator __last)
A generalization of pointer arithmetic.
__numeric_traits_integer< _Tp > __int_traits
Convenience alias for __numeric_traits<integer-type>.
constexpr _Iterator __base(_Iterator __it)
Primary class template, tuple.
Definition tuple:837
is_empty
Definition type_traits:915
Traits class for iterators.
Uniform interface to all pointer-like types.
Definition ptr_traits.h:178
Marking input iterators.
Forward iterators support a superset of input iterator operations.
Uniform interface to C++98 and C++11 allocators.