libstdc++
hashtable_policy.h
Go to the documentation of this file.
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
2 
3 // Copyright (C) 2010-2021 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/hashtable_policy.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly.
28  * @headername{unordered_map,unordered_set}
29  */
30 
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
33 
34 #include <tuple> // for std::tuple, std::forward_as_tuple
35 #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36 #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
37 
38 namespace std _GLIBCXX_VISIBILITY(default)
39 {
40 _GLIBCXX_BEGIN_NAMESPACE_VERSION
41 
42  template<typename _Key, typename _Value, typename _Alloc,
43  typename _ExtractKey, typename _Equal,
44  typename _Hash, typename _RangeHash, typename _Unused,
45  typename _RehashPolicy, typename _Traits>
46  class _Hashtable;
47 
48 namespace __detail
49 {
50  /**
51  * @defgroup hashtable-detail Base and Implementation Classes
52  * @ingroup unordered_associative_containers
53  * @{
54  */
55  template<typename _Key, typename _Value, typename _ExtractKey,
56  typename _Equal, typename _Hash, typename _RangeHash,
57  typename _Unused, typename _Traits>
58  struct _Hashtable_base;
59 
60  // Helper function: return distance(first, last) for forward
61  // iterators, or 0/1 for input iterators.
62  template<class _Iterator>
64  __distance_fw(_Iterator __first, _Iterator __last,
66  { return __first != __last ? 1 : 0; }
67 
68  template<class _Iterator>
70  __distance_fw(_Iterator __first, _Iterator __last,
72  { return std::distance(__first, __last); }
73 
74  template<class _Iterator>
76  __distance_fw(_Iterator __first, _Iterator __last)
77  { return __distance_fw(__first, __last,
78  std::__iterator_category(__first)); }
79 
80  struct _Identity
81  {
82  template<typename _Tp>
83  _Tp&&
84  operator()(_Tp&& __x) const noexcept
85  { return std::forward<_Tp>(__x); }
86  };
87 
88  struct _Select1st
89  {
90  template<typename _Tp>
91  auto
92  operator()(_Tp&& __x) const noexcept
93  -> decltype(std::get<0>(std::forward<_Tp>(__x)))
94  { return std::get<0>(std::forward<_Tp>(__x)); }
95  };
96 
97  template<typename _NodeAlloc>
98  struct _Hashtable_alloc;
99 
100  // Functor recycling a pool of nodes and using allocation once the pool is
101  // empty.
102  template<typename _NodeAlloc>
103  struct _ReuseOrAllocNode
104  {
105  private:
106  using __node_alloc_type = _NodeAlloc;
107  using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
108  using __node_alloc_traits =
109  typename __hashtable_alloc::__node_alloc_traits;
110  using __node_type = typename __hashtable_alloc::__node_type;
111 
112  public:
113  _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
114  : _M_nodes(__nodes), _M_h(__h) { }
115  _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
116 
117  ~_ReuseOrAllocNode()
118  { _M_h._M_deallocate_nodes(_M_nodes); }
119 
120  template<typename _Arg>
121  __node_type*
122  operator()(_Arg&& __arg) const
123  {
124  if (_M_nodes)
125  {
126  __node_type* __node = _M_nodes;
127  _M_nodes = _M_nodes->_M_next();
128  __node->_M_nxt = nullptr;
129  auto& __a = _M_h._M_node_allocator();
130  __node_alloc_traits::destroy(__a, __node->_M_valptr());
131  __try
132  {
133  __node_alloc_traits::construct(__a, __node->_M_valptr(),
134  std::forward<_Arg>(__arg));
135  }
136  __catch(...)
137  {
138  _M_h._M_deallocate_node_ptr(__node);
139  __throw_exception_again;
140  }
141  return __node;
142  }
143  return _M_h._M_allocate_node(std::forward<_Arg>(__arg));
144  }
145 
146  private:
147  mutable __node_type* _M_nodes;
148  __hashtable_alloc& _M_h;
149  };
150 
151  // Functor similar to the previous one but without any pool of nodes to
152  // recycle.
153  template<typename _NodeAlloc>
154  struct _AllocNode
155  {
156  private:
157  using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
158  using __node_type = typename __hashtable_alloc::__node_type;
159 
160  public:
161  _AllocNode(__hashtable_alloc& __h)
162  : _M_h(__h) { }
163 
164  template<typename _Arg>
165  __node_type*
166  operator()(_Arg&& __arg) const
167  { return _M_h._M_allocate_node(std::forward<_Arg>(__arg)); }
168 
169  private:
170  __hashtable_alloc& _M_h;
171  };
172 
173  // Auxiliary types used for all instantiations of _Hashtable nodes
174  // and iterators.
175 
176  /**
177  * struct _Hashtable_traits
178  *
179  * Important traits for hash tables.
180  *
181  * @tparam _Cache_hash_code Boolean value. True if the value of
182  * the hash function is stored along with the value. This is a
183  * time-space tradeoff. Storing it may improve lookup speed by
184  * reducing the number of times we need to call the _Hash or _Equal
185  * functors.
186  *
187  * @tparam _Constant_iterators Boolean value. True if iterator and
188  * const_iterator are both constant iterator types. This is true
189  * for unordered_set and unordered_multiset, false for
190  * unordered_map and unordered_multimap.
191  *
192  * @tparam _Unique_keys Boolean value. True if the return value
193  * of _Hashtable::count(k) is always at most one, false if it may
194  * be an arbitrary number. This is true for unordered_set and
195  * unordered_map, false for unordered_multiset and
196  * unordered_multimap.
197  */
198  template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
200  {
204  };
205 
206  /**
207  * struct _Hash_node_base
208  *
209  * Nodes, used to wrap elements stored in the hash table. A policy
210  * template parameter of class template _Hashtable controls whether
211  * nodes also store a hash code. In some cases (e.g. strings) this
212  * may be a performance win.
213  */
215  {
216  _Hash_node_base* _M_nxt;
217 
218  _Hash_node_base() noexcept : _M_nxt() { }
219 
220  _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
221  };
222 
223  /**
224  * struct _Hash_node_value_base
225  *
226  * Node type with the value to store.
227  */
228  template<typename _Value>
230  {
231  typedef _Value value_type;
232 
233  __gnu_cxx::__aligned_buffer<_Value> _M_storage;
234 
235  _Value*
236  _M_valptr() noexcept
237  { return _M_storage._M_ptr(); }
238 
239  const _Value*
240  _M_valptr() const noexcept
241  { return _M_storage._M_ptr(); }
242 
243  _Value&
244  _M_v() noexcept
245  { return *_M_valptr(); }
246 
247  const _Value&
248  _M_v() const noexcept
249  { return *_M_valptr(); }
250  };
251 
252  /**
253  * Primary template struct _Hash_node_code_cache.
254  */
255  template<bool _Cache_hash_code>
257  { };
258 
259  /**
260  * Specialization for node with cache, struct _Hash_node_code_cache.
261  */
262  template<>
264  { std::size_t _M_hash_code; };
265 
266  template<typename _Value, bool _Cache_hash_code>
267  struct _Hash_node_value
268  : _Hash_node_value_base<_Value>
269  , _Hash_node_code_cache<_Cache_hash_code>
270  { };
271 
272  /**
273  * Primary template struct _Hash_node.
274  */
275  template<typename _Value, bool _Cache_hash_code>
276  struct _Hash_node
278  , _Hash_node_value<_Value, _Cache_hash_code>
279  {
280  _Hash_node*
281  _M_next() const noexcept
282  { return static_cast<_Hash_node*>(this->_M_nxt); }
283  };
284 
285  /// Base class for node iterators.
286  template<typename _Value, bool _Cache_hash_code>
288  {
290 
291  __node_type* _M_cur;
292 
293  _Node_iterator_base() : _M_cur(nullptr) { }
294  _Node_iterator_base(__node_type* __p) noexcept
295  : _M_cur(__p) { }
296 
297  void
298  _M_incr() noexcept
299  { _M_cur = _M_cur->_M_next(); }
300 
301  friend bool
302  operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
303  noexcept
304  { return __x._M_cur == __y._M_cur; }
305 
306 #if __cpp_impl_three_way_comparison < 201907L
307  friend bool
308  operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
309  noexcept
310  { return __x._M_cur != __y._M_cur; }
311 #endif
312  };
313 
314  /// Node iterators, used to iterate through all the hashtable.
315  template<typename _Value, bool __constant_iterators, bool __cache>
317  : public _Node_iterator_base<_Value, __cache>
318  {
319  private:
321  using __node_type = typename __base_type::__node_type;
322 
323  public:
324  typedef _Value value_type;
325  typedef std::ptrdiff_t difference_type;
327 
328  using pointer = typename std::conditional<__constant_iterators,
329  const value_type*, value_type*>::type;
330 
331  using reference = typename std::conditional<__constant_iterators,
332  const value_type&, value_type&>::type;
333 
334  _Node_iterator() = default;
335 
336  explicit
337  _Node_iterator(__node_type* __p) noexcept
338  : __base_type(__p) { }
339 
340  reference
341  operator*() const noexcept
342  { return this->_M_cur->_M_v(); }
343 
344  pointer
345  operator->() const noexcept
346  { return this->_M_cur->_M_valptr(); }
347 
349  operator++() noexcept
350  {
351  this->_M_incr();
352  return *this;
353  }
354 
356  operator++(int) noexcept
357  {
358  _Node_iterator __tmp(*this);
359  this->_M_incr();
360  return __tmp;
361  }
362  };
363 
364  /// Node const_iterators, used to iterate through all the hashtable.
365  template<typename _Value, bool __constant_iterators, bool __cache>
367  : public _Node_iterator_base<_Value, __cache>
368  {
369  private:
371  using __node_type = typename __base_type::__node_type;
372 
373  public:
374  typedef _Value value_type;
375  typedef std::ptrdiff_t difference_type;
377 
378  typedef const value_type* pointer;
379  typedef const value_type& reference;
380 
381  _Node_const_iterator() = default;
382 
383  explicit
384  _Node_const_iterator(__node_type* __p) noexcept
385  : __base_type(__p) { }
386 
387  _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
388  __cache>& __x) noexcept
389  : __base_type(__x._M_cur) { }
390 
391  reference
392  operator*() const noexcept
393  { return this->_M_cur->_M_v(); }
394 
395  pointer
396  operator->() const noexcept
397  { return this->_M_cur->_M_valptr(); }
398 
400  operator++() noexcept
401  {
402  this->_M_incr();
403  return *this;
404  }
405 
407  operator++(int) noexcept
408  {
409  _Node_const_iterator __tmp(*this);
410  this->_M_incr();
411  return __tmp;
412  }
413  };
414 
415  // Many of class template _Hashtable's template parameters are policy
416  // classes. These are defaults for the policies.
417 
418  /// Default range hashing function: use division to fold a large number
419  /// into the range [0, N).
421  {
422  typedef std::size_t first_argument_type;
423  typedef std::size_t second_argument_type;
424  typedef std::size_t result_type;
425 
426  result_type
427  operator()(first_argument_type __num,
428  second_argument_type __den) const noexcept
429  { return __num % __den; }
430  };
431 
432  /// Default ranged hash function H. In principle it should be a
433  /// function object composed from objects of type H1 and H2 such that
434  /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
435  /// h1 and h2. So instead we'll just use a tag to tell class template
436  /// hashtable to do that composition.
438 
439  /// Default value for rehash policy. Bucket size is (usually) the
440  /// smallest prime that keeps the load factor small enough.
442  {
444 
445  _Prime_rehash_policy(float __z = 1.0) noexcept
446  : _M_max_load_factor(__z), _M_next_resize(0) { }
447 
448  float
449  max_load_factor() const noexcept
450  { return _M_max_load_factor; }
451 
452  // Return a bucket size no smaller than n.
453  std::size_t
454  _M_next_bkt(std::size_t __n) const;
455 
456  // Return a bucket count appropriate for n elements
457  std::size_t
458  _M_bkt_for_elements(std::size_t __n) const
459  { return __builtin_ceil(__n / (double)_M_max_load_factor); }
460 
461  // __n_bkt is current bucket count, __n_elt is current element count,
462  // and __n_ins is number of elements to be inserted. Do we need to
463  // increase bucket count? If so, return make_pair(true, n), where n
464  // is the new bucket count. If not, return make_pair(false, 0).
466  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
467  std::size_t __n_ins) const;
468 
469  typedef std::size_t _State;
470 
471  _State
472  _M_state() const
473  { return _M_next_resize; }
474 
475  void
476  _M_reset() noexcept
477  { _M_next_resize = 0; }
478 
479  void
480  _M_reset(_State __state)
481  { _M_next_resize = __state; }
482 
483  static const std::size_t _S_growth_factor = 2;
484 
485  float _M_max_load_factor;
486  mutable std::size_t _M_next_resize;
487  };
488 
489  /// Range hashing function assuming that second arg is a power of 2.
491  {
492  typedef std::size_t first_argument_type;
493  typedef std::size_t second_argument_type;
494  typedef std::size_t result_type;
495 
496  result_type
497  operator()(first_argument_type __num,
498  second_argument_type __den) const noexcept
499  { return __num & (__den - 1); }
500  };
501 
502  /// Compute closest power of 2 not less than __n
503  inline std::size_t
504  __clp2(std::size_t __n) noexcept
505  {
507  // Equivalent to return __n ? std::bit_ceil(__n) : 0;
508  if (__n < 2)
509  return __n;
510  const unsigned __lz = sizeof(size_t) > sizeof(long)
511  ? __builtin_clzll(__n - 1ull)
512  : __builtin_clzl(__n - 1ul);
513  // Doing two shifts avoids undefined behaviour when __lz == 0.
514  return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
515  }
516 
517  /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
518  /// operations.
520  {
522 
523  _Power2_rehash_policy(float __z = 1.0) noexcept
524  : _M_max_load_factor(__z), _M_next_resize(0) { }
525 
526  float
527  max_load_factor() const noexcept
528  { return _M_max_load_factor; }
529 
530  // Return a bucket size no smaller than n (as long as n is not above the
531  // highest power of 2).
532  std::size_t
533  _M_next_bkt(std::size_t __n) noexcept
534  {
535  if (__n == 0)
536  // Special case on container 1st initialization with 0 bucket count
537  // hint. We keep _M_next_resize to 0 to make sure that next time we
538  // want to add an element allocation will take place.
539  return 1;
540 
541  const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
542  const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
543  std::size_t __res = __clp2(__n);
544 
545  if (__res == 0)
546  __res = __max_bkt;
547  else if (__res == 1)
548  // If __res is 1 we force it to 2 to make sure there will be an
549  // allocation so that nothing need to be stored in the initial
550  // single bucket
551  __res = 2;
552 
553  if (__res == __max_bkt)
554  // Set next resize to the max value so that we never try to rehash again
555  // as we already reach the biggest possible bucket number.
556  // Note that it might result in max_load_factor not being respected.
557  _M_next_resize = size_t(-1);
558  else
559  _M_next_resize
560  = __builtin_floor(__res * (double)_M_max_load_factor);
561 
562  return __res;
563  }
564 
565  // Return a bucket count appropriate for n elements
566  std::size_t
567  _M_bkt_for_elements(std::size_t __n) const noexcept
568  { return __builtin_ceil(__n / (double)_M_max_load_factor); }
569 
570  // __n_bkt is current bucket count, __n_elt is current element count,
571  // and __n_ins is number of elements to be inserted. Do we need to
572  // increase bucket count? If so, return make_pair(true, n), where n
573  // is the new bucket count. If not, return make_pair(false, 0).
575  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
576  std::size_t __n_ins) noexcept
577  {
578  if (__n_elt + __n_ins > _M_next_resize)
579  {
580  // If _M_next_resize is 0 it means that we have nothing allocated so
581  // far and that we start inserting elements. In this case we start
582  // with an initial bucket size of 11.
583  double __min_bkts
584  = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
585  / (double)_M_max_load_factor;
586  if (__min_bkts >= __n_bkt)
587  return { true,
588  _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
589  __n_bkt * _S_growth_factor)) };
590 
591  _M_next_resize
592  = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
593  return { false, 0 };
594  }
595  else
596  return { false, 0 };
597  }
598 
599  typedef std::size_t _State;
600 
601  _State
602  _M_state() const noexcept
603  { return _M_next_resize; }
604 
605  void
606  _M_reset() noexcept
607  { _M_next_resize = 0; }
608 
609  void
610  _M_reset(_State __state) noexcept
611  { _M_next_resize = __state; }
612 
613  static const std::size_t _S_growth_factor = 2;
614 
615  float _M_max_load_factor;
616  std::size_t _M_next_resize;
617  };
618 
619  // Base classes for std::_Hashtable. We define these base classes
620  // because in some cases we want to do different things depending on
621  // the value of a policy class. In some cases the policy class
622  // affects which member functions and nested typedefs are defined;
623  // we handle that by specializing base class templates. Several of
624  // the base class templates need to access other members of class
625  // template _Hashtable, so we use a variant of the "Curiously
626  // Recurring Template Pattern" (CRTP) technique.
627 
628  /**
629  * Primary class template _Map_base.
630  *
631  * If the hashtable has a value type of the form pair<T1, T2> and a
632  * key extraction policy (_ExtractKey) that returns the first part
633  * of the pair, the hashtable gets a mapped_type typedef. If it
634  * satisfies those criteria and also has unique keys, then it also
635  * gets an operator[].
636  */
637  template<typename _Key, typename _Value, typename _Alloc,
638  typename _ExtractKey, typename _Equal,
639  typename _Hash, typename _RangeHash, typename _Unused,
640  typename _RehashPolicy, typename _Traits,
641  bool _Unique_keys = _Traits::__unique_keys::value>
642  struct _Map_base { };
643 
644  /// Partial specialization, __unique_keys set to false.
645  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
646  typename _Hash, typename _RangeHash, typename _Unused,
647  typename _RehashPolicy, typename _Traits>
648  struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
649  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
650  {
651  using mapped_type = typename std::tuple_element<1, _Pair>::type;
652  };
653 
654  /// Partial specialization, __unique_keys set to true.
655  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
656  typename _Hash, typename _RangeHash, typename _Unused,
657  typename _RehashPolicy, typename _Traits>
658  struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
659  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
660  {
661  private:
662  using __hashtable_base = _Hashtable_base<_Key, _Pair, _Select1st, _Equal,
663  _Hash, _RangeHash, _Unused,
664  _Traits>;
665 
666  using __hashtable = _Hashtable<_Key, _Pair, _Alloc, _Select1st, _Equal,
667  _Hash, _RangeHash,
668  _Unused, _RehashPolicy, _Traits>;
669 
670  using __hash_code = typename __hashtable_base::__hash_code;
671 
672  public:
673  using key_type = typename __hashtable_base::key_type;
674  using mapped_type = typename std::tuple_element<1, _Pair>::type;
675 
676  mapped_type&
677  operator[](const key_type& __k);
678 
679  mapped_type&
680  operator[](key_type&& __k);
681 
682  // _GLIBCXX_RESOLVE_LIB_DEFECTS
683  // DR 761. unordered_map needs an at() member function.
684  mapped_type&
685  at(const key_type& __k);
686 
687  const mapped_type&
688  at(const key_type& __k) const;
689  };
690 
691  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
692  typename _Hash, typename _RangeHash, typename _Unused,
693  typename _RehashPolicy, typename _Traits>
694  auto
695  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
696  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
697  operator[](const key_type& __k)
698  -> mapped_type&
699  {
700  __hashtable* __h = static_cast<__hashtable*>(this);
701  __hash_code __code = __h->_M_hash_code(__k);
702  std::size_t __bkt = __h->_M_bucket_index(__code);
703  if (auto __node = __h->_M_find_node(__bkt, __k, __code))
704  return __node->_M_v().second;
705 
706  typename __hashtable::_Scoped_node __node {
707  __h,
710  std::tuple<>()
711  };
712  auto __pos
713  = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
714  __node._M_node = nullptr;
715  return __pos->second;
716  }
717 
718  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
719  typename _Hash, typename _RangeHash, typename _Unused,
720  typename _RehashPolicy, typename _Traits>
721  auto
722  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
723  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
724  operator[](key_type&& __k)
725  -> mapped_type&
726  {
727  __hashtable* __h = static_cast<__hashtable*>(this);
728  __hash_code __code = __h->_M_hash_code(__k);
729  std::size_t __bkt = __h->_M_bucket_index(__code);
730  if (auto __node = __h->_M_find_node(__bkt, __k, __code))
731  return __node->_M_v().second;
732 
733  typename __hashtable::_Scoped_node __node {
734  __h,
737  std::tuple<>()
738  };
739  auto __pos
740  = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
741  __node._M_node = nullptr;
742  return __pos->second;
743  }
744 
745  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
746  typename _Hash, typename _RangeHash, typename _Unused,
747  typename _RehashPolicy, typename _Traits>
748  auto
749  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
750  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
751  at(const key_type& __k)
752  -> mapped_type&
753  {
754  __hashtable* __h = static_cast<__hashtable*>(this);
755  auto __ite = __h->find(__k);
756 
757  if (!__ite._M_cur)
758  __throw_out_of_range(__N("_Map_base::at"));
759  return __ite->second;
760  }
761 
762  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
763  typename _Hash, typename _RangeHash, typename _Unused,
764  typename _RehashPolicy, typename _Traits>
765  auto
766  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
767  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
768  at(const key_type& __k) const
769  -> const mapped_type&
770  {
771  const __hashtable* __h = static_cast<const __hashtable*>(this);
772  auto __ite = __h->find(__k);
773 
774  if (!__ite._M_cur)
775  __throw_out_of_range(__N("_Map_base::at"));
776  return __ite->second;
777  }
778 
779  /**
780  * Primary class template _Insert_base.
781  *
782  * Defines @c insert member functions appropriate to all _Hashtables.
783  */
784  template<typename _Key, typename _Value, typename _Alloc,
785  typename _ExtractKey, typename _Equal,
786  typename _Hash, typename _RangeHash, typename _Unused,
787  typename _RehashPolicy, typename _Traits>
789  {
790  protected:
791  using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
792  _Equal, _Hash, _RangeHash,
793  _Unused, _Traits>;
794 
795  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
796  _Hash, _RangeHash,
797  _Unused, _RehashPolicy, _Traits>;
798 
799  using __hash_cached = typename _Traits::__hash_cached;
800  using __constant_iterators = typename _Traits::__constant_iterators;
801 
803  __alloc_rebind<_Alloc, _Hash_node<_Value,
804  __hash_cached::value>>>;
805 
806  using value_type = typename __hashtable_base::value_type;
807  using size_type = typename __hashtable_base::size_type;
808 
809  using __unique_keys = typename _Traits::__unique_keys;
810  using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
811  using __node_gen_type = _AllocNode<__node_alloc_type>;
812 
813  __hashtable&
814  _M_conjure_hashtable()
815  { return *(static_cast<__hashtable*>(this)); }
816 
817  template<typename _InputIterator, typename _NodeGetter>
818  void
819  _M_insert_range(_InputIterator __first, _InputIterator __last,
820  const _NodeGetter&, true_type __uks);
821 
822  template<typename _InputIterator, typename _NodeGetter>
823  void
824  _M_insert_range(_InputIterator __first, _InputIterator __last,
825  const _NodeGetter&, false_type __uks);
826 
827  public:
828  using iterator = _Node_iterator<_Value, __constant_iterators::value,
829  __hash_cached::value>;
830 
831  using const_iterator = _Node_const_iterator<_Value, __constant_iterators::value,
832  __hash_cached::value>;
833 
834  using __ireturn_type = typename std::conditional<__unique_keys::value,
836  iterator>::type;
837 
838  __ireturn_type
839  insert(const value_type& __v)
840  {
841  __hashtable& __h = _M_conjure_hashtable();
842  __node_gen_type __node_gen(__h);
843  return __h._M_insert(__v, __node_gen, __unique_keys{});
844  }
845 
846  iterator
847  insert(const_iterator __hint, const value_type& __v)
848  {
849  __hashtable& __h = _M_conjure_hashtable();
850  __node_gen_type __node_gen(__h);
851  return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
852  }
853 
854  template<typename _KType, typename... _Args>
856  try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
857  {
858  __hashtable& __h = _M_conjure_hashtable();
859  auto __code = __h._M_hash_code(__k);
860  std::size_t __bkt = __h._M_bucket_index(__code);
861  if (auto __node = __h._M_find_node(__bkt, __k, __code))
862  return { iterator(__node), false };
863 
864  typename __hashtable::_Scoped_node __node {
865  &__h,
867  std::forward_as_tuple(std::forward<_KType>(__k)),
868  std::forward_as_tuple(std::forward<_Args>(__args)...)
869  };
870  auto __it
871  = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
872  __node._M_node = nullptr;
873  return { __it, true };
874  }
875 
876  void
877  insert(initializer_list<value_type> __l)
878  { this->insert(__l.begin(), __l.end()); }
879 
880  template<typename _InputIterator>
881  void
882  insert(_InputIterator __first, _InputIterator __last)
883  {
884  __hashtable& __h = _M_conjure_hashtable();
885  __node_gen_type __node_gen(__h);
886  return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
887  }
888  };
889 
890  template<typename _Key, typename _Value, typename _Alloc,
891  typename _ExtractKey, typename _Equal,
892  typename _Hash, typename _RangeHash, typename _Unused,
893  typename _RehashPolicy, typename _Traits>
894  template<typename _InputIterator, typename _NodeGetter>
895  void
896  _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
897  _Hash, _RangeHash, _Unused,
898  _RehashPolicy, _Traits>::
899  _M_insert_range(_InputIterator __first, _InputIterator __last,
900  const _NodeGetter& __node_gen, true_type __uks)
901  {
902  __hashtable& __h = _M_conjure_hashtable();
903  for (; __first != __last; ++__first)
904  __h._M_insert(*__first, __node_gen, __uks);
905  }
906 
907  template<typename _Key, typename _Value, typename _Alloc,
908  typename _ExtractKey, typename _Equal,
909  typename _Hash, typename _RangeHash, typename _Unused,
910  typename _RehashPolicy, typename _Traits>
911  template<typename _InputIterator, typename _NodeGetter>
912  void
913  _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
914  _Hash, _RangeHash, _Unused,
915  _RehashPolicy, _Traits>::
916  _M_insert_range(_InputIterator __first, _InputIterator __last,
917  const _NodeGetter& __node_gen, false_type __uks)
918  {
919  using __rehash_type = typename __hashtable::__rehash_type;
920  using __rehash_state = typename __hashtable::__rehash_state;
921  using pair_type = std::pair<bool, std::size_t>;
922 
923  size_type __n_elt = __detail::__distance_fw(__first, __last);
924  if (__n_elt == 0)
925  return;
926 
927  __hashtable& __h = _M_conjure_hashtable();
928  __rehash_type& __rehash = __h._M_rehash_policy;
929  const __rehash_state& __saved_state = __rehash._M_state();
930  pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
931  __h._M_element_count,
932  __n_elt);
933 
934  if (__do_rehash.first)
935  __h._M_rehash(__do_rehash.second, __saved_state);
936 
937  for (; __first != __last; ++__first)
938  __h._M_insert(*__first, __node_gen, __uks);
939  }
940 
941  /**
942  * Primary class template _Insert.
943  *
944  * Defines @c insert member functions that depend on _Hashtable policies,
945  * via partial specializations.
946  */
947  template<typename _Key, typename _Value, typename _Alloc,
948  typename _ExtractKey, typename _Equal,
949  typename _Hash, typename _RangeHash, typename _Unused,
950  typename _RehashPolicy, typename _Traits,
951  bool _Constant_iterators = _Traits::__constant_iterators::value>
952  struct _Insert;
953 
954  /// Specialization.
955  template<typename _Key, typename _Value, typename _Alloc,
956  typename _ExtractKey, typename _Equal,
957  typename _Hash, typename _RangeHash, typename _Unused,
958  typename _RehashPolicy, typename _Traits>
959  struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
960  _Hash, _RangeHash, _Unused,
961  _RehashPolicy, _Traits, true>
962  : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
963  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
964  {
965  using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
966  _Equal, _Hash, _RangeHash, _Unused,
967  _RehashPolicy, _Traits>;
968 
969  using value_type = typename __base_type::value_type;
970  using iterator = typename __base_type::iterator;
971  using const_iterator = typename __base_type::const_iterator;
972  using __ireturn_type = typename __base_type::__ireturn_type;
973 
974  using __unique_keys = typename __base_type::__unique_keys;
975  using __hashtable = typename __base_type::__hashtable;
976  using __node_gen_type = typename __base_type::__node_gen_type;
977 
978  using __base_type::insert;
979 
980  __ireturn_type
981  insert(value_type&& __v)
982  {
983  __hashtable& __h = this->_M_conjure_hashtable();
984  __node_gen_type __node_gen(__h);
985  return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
986  }
987 
988  iterator
989  insert(const_iterator __hint, value_type&& __v)
990  {
991  __hashtable& __h = this->_M_conjure_hashtable();
992  __node_gen_type __node_gen(__h);
993  return __h._M_insert(__hint, std::move(__v), __node_gen,
994  __unique_keys{});
995  }
996  };
997 
998  /// Specialization.
999  template<typename _Key, typename _Value, typename _Alloc,
1000  typename _ExtractKey, typename _Equal,
1001  typename _Hash, typename _RangeHash, typename _Unused,
1002  typename _RehashPolicy, typename _Traits>
1003  struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1004  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1005  : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1006  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1007  {
1008  using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1009  _Equal, _Hash, _RangeHash, _Unused,
1010  _RehashPolicy, _Traits>;
1011  using value_type = typename __base_type::value_type;
1012  using iterator = typename __base_type::iterator;
1014 
1015  using __unique_keys = typename __base_type::__unique_keys;
1016  using __hashtable = typename __base_type::__hashtable;
1017  using __ireturn_type = typename __base_type::__ireturn_type;
1018 
1019  using __base_type::insert;
1020 
1021  template<typename _Pair>
1023 
1024  template<typename _Pair>
1026 
1027  template<typename _Pair>
1028  using _IFconsp = typename _IFcons<_Pair>::type;
1029 
1030  template<typename _Pair, typename = _IFconsp<_Pair>>
1031  __ireturn_type
1032  insert(_Pair&& __v)
1033  {
1034  __hashtable& __h = this->_M_conjure_hashtable();
1035  return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1036  }
1037 
1038  template<typename _Pair, typename = _IFconsp<_Pair>>
1039  iterator
1040  insert(const_iterator __hint, _Pair&& __v)
1041  {
1042  __hashtable& __h = this->_M_conjure_hashtable();
1043  return __h._M_emplace(__hint, __unique_keys{},
1044  std::forward<_Pair>(__v));
1045  }
1046  };
1047 
1048  template<typename _Policy>
1049  using __has_load_factor = typename _Policy::__has_load_factor;
1050 
1051  /**
1052  * Primary class template _Rehash_base.
1053  *
1054  * Give hashtable the max_load_factor functions and reserve iff the
1055  * rehash policy supports it.
1056  */
1057  template<typename _Key, typename _Value, typename _Alloc,
1058  typename _ExtractKey, typename _Equal,
1059  typename _Hash, typename _RangeHash, typename _Unused,
1060  typename _RehashPolicy, typename _Traits,
1061  typename =
1062  __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1064 
1065  /// Specialization when rehash policy doesn't provide load factor management.
1066  template<typename _Key, typename _Value, typename _Alloc,
1067  typename _ExtractKey, typename _Equal,
1068  typename _Hash, typename _RangeHash, typename _Unused,
1069  typename _RehashPolicy, typename _Traits>
1070  struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1071  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1072  false_type /* Has load factor */>
1073  {
1074  };
1075 
1076  /// Specialization when rehash policy provide load factor management.
1077  template<typename _Key, typename _Value, typename _Alloc,
1078  typename _ExtractKey, typename _Equal,
1079  typename _Hash, typename _RangeHash, typename _Unused,
1080  typename _RehashPolicy, typename _Traits>
1081  struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1082  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1083  true_type /* Has load factor */>
1084  {
1085  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1086  _Equal, _Hash, _RangeHash, _Unused,
1087  _RehashPolicy, _Traits>;
1088 
1089  float
1090  max_load_factor() const noexcept
1091  {
1092  const __hashtable* __this = static_cast<const __hashtable*>(this);
1093  return __this->__rehash_policy().max_load_factor();
1094  }
1095 
1096  void
1097  max_load_factor(float __z)
1098  {
1099  __hashtable* __this = static_cast<__hashtable*>(this);
1100  __this->__rehash_policy(_RehashPolicy(__z));
1101  }
1102 
1103  void
1104  reserve(std::size_t __n)
1105  {
1106  __hashtable* __this = static_cast<__hashtable*>(this);
1107  __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1108  }
1109  };
1110 
1111  /**
1112  * Primary class template _Hashtable_ebo_helper.
1113  *
1114  * Helper class using EBO when it is not forbidden (the type is not
1115  * final) and when it is worth it (the type is empty.)
1116  */
1117  template<int _Nm, typename _Tp,
1118  bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1120 
1121  /// Specialization using EBO.
1122  template<int _Nm, typename _Tp>
1123  struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1124  : private _Tp
1125  {
1126  _Hashtable_ebo_helper() = default;
1127 
1128  template<typename _OtherTp>
1129  _Hashtable_ebo_helper(_OtherTp&& __tp)
1130  : _Tp(std::forward<_OtherTp>(__tp))
1131  { }
1132 
1133  const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1134  _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1135  };
1136 
1137  /// Specialization not using EBO.
1138  template<int _Nm, typename _Tp>
1139  struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1140  {
1141  _Hashtable_ebo_helper() = default;
1142 
1143  template<typename _OtherTp>
1144  _Hashtable_ebo_helper(_OtherTp&& __tp)
1145  : _M_tp(std::forward<_OtherTp>(__tp))
1146  { }
1147 
1148  const _Tp& _M_cget() const { return _M_tp; }
1149  _Tp& _M_get() { return _M_tp; }
1150 
1151  private:
1152  _Tp _M_tp;
1153  };
1154 
1155  /**
1156  * Primary class template _Local_iterator_base.
1157  *
1158  * Base class for local iterators, used to iterate within a bucket
1159  * but not between buckets.
1160  */
1161  template<typename _Key, typename _Value, typename _ExtractKey,
1162  typename _Hash, typename _RangeHash, typename _Unused,
1163  bool __cache_hash_code>
1165 
1166  /**
1167  * Primary class template _Hash_code_base.
1168  *
1169  * Encapsulates two policy issues that aren't quite orthogonal.
1170  * (1) the difference between using a ranged hash function and using
1171  * the combination of a hash function and a range-hashing function.
1172  * In the former case we don't have such things as hash codes, so
1173  * we have a dummy type as placeholder.
1174  * (2) Whether or not we cache hash codes. Caching hash codes is
1175  * meaningless if we have a ranged hash function.
1176  *
1177  * We also put the key extraction objects here, for convenience.
1178  * Each specialization derives from one or more of the template
1179  * parameters to benefit from Ebo. This is important as this type
1180  * is inherited in some cases by the _Local_iterator_base type used
1181  * to implement local_iterator and const_local_iterator. As with
1182  * any iterator type we prefer to make it as small as possible.
1183  */
1184  template<typename _Key, typename _Value, typename _ExtractKey,
1185  typename _Hash, typename _RangeHash, typename _Unused,
1186  bool __cache_hash_code>
1188  : private _Hashtable_ebo_helper<1, _Hash>
1189  {
1190  private:
1192 
1193  // Gives the local iterator implementation access to _M_bucket_index().
1194  friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1195  _Hash, _RangeHash, _Unused, false>;
1196 
1197  public:
1198  typedef _Hash hasher;
1199 
1200  hasher
1201  hash_function() const
1202  { return _M_hash(); }
1203 
1204  protected:
1205  typedef std::size_t __hash_code;
1206 
1207  // We need the default constructor for the local iterators and _Hashtable
1208  // default constructor.
1209  _Hash_code_base() = default;
1210  _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1211 
1212  __hash_code
1213  _M_hash_code(const _Key& __k) const
1214  {
1215  static_assert(__is_invocable<const _Hash&, const _Key&>{},
1216  "hash function must be invocable with an argument of key type");
1217  return _M_hash()(__k);
1218  }
1219 
1220  template<typename _Kt>
1221  __hash_code
1222  _M_hash_code_tr(const _Kt& __k) const
1223  {
1224  static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1225  "hash function must be invocable with an argument of key type");
1226  return _M_hash()(__k);
1227  }
1228 
1229  std::size_t
1230  _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1231  { return _RangeHash{}(__c, __bkt_count); }
1232 
1233  std::size_t
1234  _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1235  std::size_t __bkt_count) const
1236  noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1237  && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1238  (std::size_t)0)) )
1239  {
1240  return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1241  __bkt_count);
1242  }
1243 
1244  std::size_t
1245  _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1246  std::size_t __bkt_count) const
1247  noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1248  (std::size_t)0)) )
1249  { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1250 
1251  void
1252  _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1253  { }
1254 
1255  void
1256  _M_copy_code(_Hash_node_code_cache<false>&,
1257  const _Hash_node_code_cache<false>&) const
1258  { }
1259 
1260  void
1261  _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1262  { __n._M_hash_code = __c; }
1263 
1264  void
1265  _M_copy_code(_Hash_node_code_cache<true>& __to,
1266  const _Hash_node_code_cache<true>& __from) const
1267  { __to._M_hash_code = __from._M_hash_code; }
1268 
1269  void
1270  _M_swap(_Hash_code_base& __x)
1271  { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1272 
1273  const _Hash&
1274  _M_hash() const { return __ebo_hash::_M_cget(); }
1275  };
1276 
1277  /// Partial specialization used when nodes contain a cached hash code.
1278  template<typename _Key, typename _Value, typename _ExtractKey,
1279  typename _Hash, typename _RangeHash, typename _Unused>
1280  struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1281  _Hash, _RangeHash, _Unused, true>
1282  : public _Node_iterator_base<_Value, true>
1283  {
1284  protected:
1286  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1287  _Hash, _RangeHash, _Unused, true>;
1288 
1289  _Local_iterator_base() = default;
1292  std::size_t __bkt, std::size_t __bkt_count)
1293  : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1294  { }
1295 
1296  void
1297  _M_incr()
1298  {
1299  __base_node_iter::_M_incr();
1300  if (this->_M_cur)
1301  {
1302  std::size_t __bkt
1303  = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1304  if (__bkt != _M_bucket)
1305  this->_M_cur = nullptr;
1306  }
1307  }
1308 
1309  std::size_t _M_bucket;
1310  std::size_t _M_bucket_count;
1311 
1312  public:
1313  std::size_t
1314  _M_get_bucket() const { return _M_bucket; } // for debug mode
1315  };
1316 
1317  // Uninitialized storage for a _Hash_code_base.
1318  // This type is DefaultConstructible and Assignable even if the
1319  // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1320  // can be DefaultConstructible and Assignable.
1321  template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1322  struct _Hash_code_storage
1323  {
1324  __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1325 
1326  _Tp*
1327  _M_h() { return _M_storage._M_ptr(); }
1328 
1329  const _Tp*
1330  _M_h() const { return _M_storage._M_ptr(); }
1331  };
1332 
1333  // Empty partial specialization for empty _Hash_code_base types.
1334  template<typename _Tp>
1335  struct _Hash_code_storage<_Tp, true>
1336  {
1337  static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1338 
1339  // As _Tp is an empty type there will be no bytes written/read through
1340  // the cast pointer, so no strict-aliasing violation.
1341  _Tp*
1342  _M_h() { return reinterpret_cast<_Tp*>(this); }
1343 
1344  const _Tp*
1345  _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1346  };
1347 
1348  template<typename _Key, typename _Value, typename _ExtractKey,
1349  typename _Hash, typename _RangeHash, typename _Unused>
1350  using __hash_code_for_local_iter
1351  = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1352  _Hash, _RangeHash, _Unused, false>>;
1353 
1354  // Partial specialization used when hash codes are not cached
1355  template<typename _Key, typename _Value, typename _ExtractKey,
1356  typename _Hash, typename _RangeHash, typename _Unused>
1357  struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1358  _Hash, _RangeHash, _Unused, false>
1359  : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1360  _Unused>
1361  , _Node_iterator_base<_Value, false>
1362  {
1363  protected:
1364  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1365  _Hash, _RangeHash, _Unused, false>;
1366  using __node_iter_base = _Node_iterator_base<_Value, false>;
1367 
1368  _Local_iterator_base() : _M_bucket_count(-1) { }
1369 
1370  _Local_iterator_base(const __hash_code_base& __base,
1371  _Hash_node<_Value, false>* __p,
1372  std::size_t __bkt, std::size_t __bkt_count)
1373  : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1374  { _M_init(__base); }
1375 
1376  ~_Local_iterator_base()
1377  {
1378  if (_M_bucket_count != size_t(-1))
1379  _M_destroy();
1380  }
1381 
1382  _Local_iterator_base(const _Local_iterator_base& __iter)
1383  : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1384  , _M_bucket_count(__iter._M_bucket_count)
1385  {
1386  if (_M_bucket_count != size_t(-1))
1387  _M_init(*__iter._M_h());
1388  }
1389 
1390  _Local_iterator_base&
1391  operator=(const _Local_iterator_base& __iter)
1392  {
1393  if (_M_bucket_count != -1)
1394  _M_destroy();
1395  this->_M_cur = __iter._M_cur;
1396  _M_bucket = __iter._M_bucket;
1397  _M_bucket_count = __iter._M_bucket_count;
1398  if (_M_bucket_count != -1)
1399  _M_init(*__iter._M_h());
1400  return *this;
1401  }
1402 
1403  void
1404  _M_incr()
1405  {
1406  __node_iter_base::_M_incr();
1407  if (this->_M_cur)
1408  {
1409  std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1410  _M_bucket_count);
1411  if (__bkt != _M_bucket)
1412  this->_M_cur = nullptr;
1413  }
1414  }
1415 
1416  std::size_t _M_bucket;
1417  std::size_t _M_bucket_count;
1418 
1419  void
1420  _M_init(const __hash_code_base& __base)
1421  { ::new(this->_M_h()) __hash_code_base(__base); }
1422 
1423  void
1424  _M_destroy() { this->_M_h()->~__hash_code_base(); }
1425 
1426  public:
1427  std::size_t
1428  _M_get_bucket() const { return _M_bucket; } // for debug mode
1429  };
1430 
1431  /// local iterators
1432  template<typename _Key, typename _Value, typename _ExtractKey,
1433  typename _Hash, typename _RangeHash, typename _Unused,
1434  bool __constant_iterators, bool __cache>
1436  : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1437  _Hash, _RangeHash, _Unused, __cache>
1438  {
1439  private:
1440  using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1441  _Hash, _RangeHash, _Unused, __cache>;
1442  using __hash_code_base = typename __base_type::__hash_code_base;
1443 
1444  public:
1445  typedef _Value value_type;
1446  typedef typename std::conditional<__constant_iterators,
1447  const value_type*, value_type*>::type
1448  pointer;
1449  typedef typename std::conditional<__constant_iterators,
1450  const value_type&, value_type&>::type
1451  reference;
1452  typedef std::ptrdiff_t difference_type;
1454 
1455  _Local_iterator() = default;
1456 
1457  _Local_iterator(const __hash_code_base& __base,
1459  std::size_t __bkt, std::size_t __bkt_count)
1460  : __base_type(__base, __n, __bkt, __bkt_count)
1461  { }
1462 
1463  reference
1464  operator*() const
1465  { return this->_M_cur->_M_v(); }
1466 
1467  pointer
1468  operator->() const
1469  { return this->_M_cur->_M_valptr(); }
1470 
1472  operator++()
1473  {
1474  this->_M_incr();
1475  return *this;
1476  }
1477 
1479  operator++(int)
1480  {
1481  _Local_iterator __tmp(*this);
1482  this->_M_incr();
1483  return __tmp;
1484  }
1485  };
1486 
1487  /// local const_iterators
1488  template<typename _Key, typename _Value, typename _ExtractKey,
1489  typename _Hash, typename _RangeHash, typename _Unused,
1490  bool __constant_iterators, bool __cache>
1492  : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1493  _Hash, _RangeHash, _Unused, __cache>
1494  {
1495  private:
1496  using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1497  _Hash, _RangeHash, _Unused, __cache>;
1498  using __hash_code_base = typename __base_type::__hash_code_base;
1499 
1500  public:
1501  typedef _Value value_type;
1502  typedef const value_type* pointer;
1503  typedef const value_type& reference;
1504  typedef std::ptrdiff_t difference_type;
1506 
1507  _Local_const_iterator() = default;
1508 
1509  _Local_const_iterator(const __hash_code_base& __base,
1511  std::size_t __bkt, std::size_t __bkt_count)
1512  : __base_type(__base, __n, __bkt, __bkt_count)
1513  { }
1514 
1515  _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1516  _Hash, _RangeHash, _Unused,
1517  __constant_iterators,
1518  __cache>& __x)
1519  : __base_type(__x)
1520  { }
1521 
1522  reference
1523  operator*() const
1524  { return this->_M_cur->_M_v(); }
1525 
1526  pointer
1527  operator->() const
1528  { return this->_M_cur->_M_valptr(); }
1529 
1531  operator++()
1532  {
1533  this->_M_incr();
1534  return *this;
1535  }
1536 
1538  operator++(int)
1539  {
1540  _Local_const_iterator __tmp(*this);
1541  this->_M_incr();
1542  return __tmp;
1543  }
1544  };
1545 
1546  /**
1547  * Primary class template _Hashtable_base.
1548  *
1549  * Helper class adding management of _Equal functor to
1550  * _Hash_code_base type.
1551  *
1552  * Base class templates are:
1553  * - __detail::_Hash_code_base
1554  * - __detail::_Hashtable_ebo_helper
1555  */
1556  template<typename _Key, typename _Value, typename _ExtractKey,
1557  typename _Equal, typename _Hash, typename _RangeHash,
1558  typename _Unused, typename _Traits>
1560  : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1561  _Unused, _Traits::__hash_cached::value>,
1562  private _Hashtable_ebo_helper<0, _Equal>
1563  {
1564  public:
1565  typedef _Key key_type;
1566  typedef _Value value_type;
1567  typedef _Equal key_equal;
1568  typedef std::size_t size_type;
1569  typedef std::ptrdiff_t difference_type;
1570 
1571  using __traits_type = _Traits;
1572  using __hash_cached = typename __traits_type::__hash_cached;
1573 
1574  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1575  _Hash, _RangeHash, _Unused,
1576  __hash_cached::value>;
1577 
1578  using __hash_code = typename __hash_code_base::__hash_code;
1579 
1580  private:
1582 
1583  static bool
1584  _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1585  { return true; }
1586 
1587  static bool
1588  _S_node_equals(const _Hash_node_code_cache<false>&,
1590  { return true; }
1591 
1592  static bool
1593  _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1594  { return __c == __n._M_hash_code; }
1595 
1596  static bool
1597  _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1598  const _Hash_node_code_cache<true>& __rhn)
1599  { return __lhn._M_hash_code == __rhn._M_hash_code; }
1600 
1601  protected:
1602  _Hashtable_base() = default;
1603  _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1604  : __hash_code_base(__hash), _EqualEBO(__eq)
1605  { }
1606 
1607  bool
1608  _M_equals(const _Key& __k, __hash_code __c,
1609  const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1610  {
1611  static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1612  "key equality predicate must be invocable with two arguments of "
1613  "key type");
1614  return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1615  }
1616 
1617  template<typename _Kt>
1618  bool
1619  _M_equals_tr(const _Kt& __k, __hash_code __c,
1620  const _Hash_node_value<_Value,
1621  __hash_cached::value>& __n) const
1622  {
1623  static_assert(
1624  __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1625  "key equality predicate must be invocable with two arguments of "
1626  "key type");
1627  return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1628  }
1629 
1630  bool
1631  _M_node_equals(
1632  const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1633  const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1634  {
1635  return _S_node_equals(__lhn, __rhn)
1636  && _M_eq()(_ExtractKey{}(__lhn._M_v()), _ExtractKey{}(__rhn._M_v()));
1637  }
1638 
1639  void
1640  _M_swap(_Hashtable_base& __x)
1641  {
1642  __hash_code_base::_M_swap(__x);
1643  std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1644  }
1645 
1646  const _Equal&
1647  _M_eq() const { return _EqualEBO::_M_cget(); }
1648  };
1649 
1650  /**
1651  * Primary class template _Equality.
1652  *
1653  * This is for implementing equality comparison for unordered
1654  * containers, per N3068, by John Lakos and Pablo Halpern.
1655  * Algorithmically, we follow closely the reference implementations
1656  * therein.
1657  */
1658  template<typename _Key, typename _Value, typename _Alloc,
1659  typename _ExtractKey, typename _Equal,
1660  typename _Hash, typename _RangeHash, typename _Unused,
1661  typename _RehashPolicy, typename _Traits,
1662  bool _Unique_keys = _Traits::__unique_keys::value>
1663  struct _Equality;
1664 
1665  /// unordered_map and unordered_set specializations.
1666  template<typename _Key, typename _Value, typename _Alloc,
1667  typename _ExtractKey, typename _Equal,
1668  typename _Hash, typename _RangeHash, typename _Unused,
1669  typename _RehashPolicy, typename _Traits>
1670  struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1671  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1672  {
1673  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1674  _Hash, _RangeHash, _Unused,
1675  _RehashPolicy, _Traits>;
1676 
1677  bool
1678  _M_equal(const __hashtable&) const;
1679  };
1680 
1681  template<typename _Key, typename _Value, typename _Alloc,
1682  typename _ExtractKey, typename _Equal,
1683  typename _Hash, typename _RangeHash, typename _Unused,
1684  typename _RehashPolicy, typename _Traits>
1685  bool
1686  _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1687  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1688  _M_equal(const __hashtable& __other) const
1689  {
1690  using __node_type = typename __hashtable::__node_type;
1691  const __hashtable* __this = static_cast<const __hashtable*>(this);
1692  if (__this->size() != __other.size())
1693  return false;
1694 
1695  for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
1696  {
1697  std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1698  auto __prev_n = __other._M_buckets[__ybkt];
1699  if (!__prev_n)
1700  return false;
1701 
1702  for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
1703  __n = __n->_M_next())
1704  {
1705  if (__n->_M_v() == *__itx)
1706  break;
1707 
1708  if (!__n->_M_nxt
1709  || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1710  return false;
1711  }
1712  }
1713 
1714  return true;
1715  }
1716 
1717  /// unordered_multiset and unordered_multimap specializations.
1718  template<typename _Key, typename _Value, typename _Alloc,
1719  typename _ExtractKey, typename _Equal,
1720  typename _Hash, typename _RangeHash, typename _Unused,
1721  typename _RehashPolicy, typename _Traits>
1722  struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1723  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1724  {
1725  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1726  _Hash, _RangeHash, _Unused,
1727  _RehashPolicy, _Traits>;
1728 
1729  bool
1730  _M_equal(const __hashtable&) const;
1731  };
1732 
1733  template<typename _Key, typename _Value, typename _Alloc,
1734  typename _ExtractKey, typename _Equal,
1735  typename _Hash, typename _RangeHash, typename _Unused,
1736  typename _RehashPolicy, typename _Traits>
1737  bool
1738  _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1739  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1740  _M_equal(const __hashtable& __other) const
1741  {
1742  using __node_type = typename __hashtable::__node_type;
1743  const __hashtable* __this = static_cast<const __hashtable*>(this);
1744  if (__this->size() != __other.size())
1745  return false;
1746 
1747  for (auto __itx = __this->begin(); __itx != __this->end();)
1748  {
1749  std::size_t __x_count = 1;
1750  auto __itx_end = __itx;
1751  for (++__itx_end; __itx_end != __this->end()
1752  && __this->key_eq()(_ExtractKey{}(*__itx),
1753  _ExtractKey{}(*__itx_end));
1754  ++__itx_end)
1755  ++__x_count;
1756 
1757  std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1758  auto __y_prev_n = __other._M_buckets[__ybkt];
1759  if (!__y_prev_n)
1760  return false;
1761 
1762  __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
1763  for (;;)
1764  {
1765  if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1766  _ExtractKey{}(*__itx)))
1767  break;
1768 
1769  auto __y_ref_n = __y_n;
1770  for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1771  if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1772  break;
1773 
1774  if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1775  return false;
1776  }
1777 
1778  typename __hashtable::const_iterator __ity(__y_n);
1779  for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
1780  if (--__x_count == 0)
1781  break;
1782 
1783  if (__x_count != 0)
1784  return false;
1785 
1786  if (!std::is_permutation(__itx, __itx_end, __ity))
1787  return false;
1788 
1789  __itx = __itx_end;
1790  }
1791  return true;
1792  }
1793 
1794  /**
1795  * This type deals with all allocation and keeps an allocator instance
1796  * through inheritance to benefit from EBO when possible.
1797  */
1798  template<typename _NodeAlloc>
1799  struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1800  {
1801  private:
1803  public:
1804  using __node_type = typename _NodeAlloc::value_type;
1805  using __node_alloc_type = _NodeAlloc;
1806  // Use __gnu_cxx to benefit from _S_always_equal and al.
1808 
1809  using __value_alloc_traits = typename __node_alloc_traits::template
1810  rebind_traits<typename __node_type::value_type>;
1811 
1812  using __node_ptr = __node_type*;
1813  using __node_base = _Hash_node_base;
1814  using __node_base_ptr = __node_base*;
1815  using __buckets_alloc_type =
1816  __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1818  using __buckets_ptr = __node_base_ptr*;
1819 
1820  _Hashtable_alloc() = default;
1821  _Hashtable_alloc(const _Hashtable_alloc&) = default;
1822  _Hashtable_alloc(_Hashtable_alloc&&) = default;
1823 
1824  template<typename _Alloc>
1825  _Hashtable_alloc(_Alloc&& __a)
1826  : __ebo_node_alloc(std::forward<_Alloc>(__a))
1827  { }
1828 
1829  __node_alloc_type&
1830  _M_node_allocator()
1831  { return __ebo_node_alloc::_M_get(); }
1832 
1833  const __node_alloc_type&
1834  _M_node_allocator() const
1835  { return __ebo_node_alloc::_M_cget(); }
1836 
1837  // Allocate a node and construct an element within it.
1838  template<typename... _Args>
1839  __node_ptr
1840  _M_allocate_node(_Args&&... __args);
1841 
1842  // Destroy the element within a node and deallocate the node.
1843  void
1844  _M_deallocate_node(__node_ptr __n);
1845 
1846  // Deallocate a node.
1847  void
1848  _M_deallocate_node_ptr(__node_ptr __n);
1849 
1850  // Deallocate the linked list of nodes pointed to by __n.
1851  // The elements within the nodes are destroyed.
1852  void
1853  _M_deallocate_nodes(__node_ptr __n);
1854 
1856  _M_allocate_buckets(std::size_t __bkt_count);
1857 
1858  void
1859  _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1860  };
1861 
1862  // Definitions of class template _Hashtable_alloc's out-of-line member
1863  // functions.
1864  template<typename _NodeAlloc>
1865  template<typename... _Args>
1866  auto
1868  -> __node_ptr
1869  {
1870  auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
1871  __node_ptr __n = std::__to_address(__nptr);
1872  __try
1873  {
1874  ::new ((void*)__n) __node_type;
1875  __node_alloc_traits::construct(_M_node_allocator(),
1876  __n->_M_valptr(),
1877  std::forward<_Args>(__args)...);
1878  return __n;
1879  }
1880  __catch(...)
1881  {
1882  __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
1883  __throw_exception_again;
1884  }
1885  }
1886 
1887  template<typename _NodeAlloc>
1888  void
1889  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1890  {
1891  __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1892  _M_deallocate_node_ptr(__n);
1893  }
1894 
1895  template<typename _NodeAlloc>
1896  void
1897  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1898  {
1899  typedef typename __node_alloc_traits::pointer _Ptr;
1900  auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1901  __n->~__node_type();
1902  __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1903  }
1904 
1905  template<typename _NodeAlloc>
1906  void
1907  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1908  {
1909  while (__n)
1910  {
1911  __node_ptr __tmp = __n;
1912  __n = __n->_M_next();
1913  _M_deallocate_node(__tmp);
1914  }
1915  }
1916 
1917  template<typename _NodeAlloc>
1918  auto
1919  _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
1920  -> __buckets_ptr
1921  {
1922  __buckets_alloc_type __alloc(_M_node_allocator());
1923 
1924  auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
1925  __buckets_ptr __p = std::__to_address(__ptr);
1926  __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
1927  return __p;
1928  }
1929 
1930  template<typename _NodeAlloc>
1931  void
1932  _Hashtable_alloc<_NodeAlloc>::
1933  _M_deallocate_buckets(__buckets_ptr __bkts,
1934  std::size_t __bkt_count)
1935  {
1936  typedef typename __buckets_alloc_traits::pointer _Ptr;
1937  auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
1938  __buckets_alloc_type __alloc(_M_node_allocator());
1939  __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
1940  }
1941 
1942  //@} hashtable-detail
1943 } // namespace __detail
1944 _GLIBCXX_END_NAMESPACE_VERSION
1945 } // namespace std
1946 
1947 #endif // _HASHTABLE_POLICY_H
auto_ptr & operator=(auto_ptr &__a)
auto_ptr assignment operator.
Definition: auto_ptr.h:47
element_type * operator->() const
Smart pointer dereferencing.
Definition: auto_ptr.h:105
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition: complex:392
integral_constant< bool, true > true_type
The type used as a compile-time boolean with true value.
Definition: type_traits:75
constexpr piecewise_construct_t piecewise_construct
Tag for piecewise construction of std::pair objects.
Definition: stl_pair.h:83
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition: move.h:101
constexpr tuple< _Elements &&... > forward_as_tuple(_Elements &&... __args) noexcept
std::forward_as_tuple
Definition: tuple:1569
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
Definition: any:412
std::size_t __clp2(std::size_t __n) noexcept
Compute closest power of 2 not less than __n.
ISO C++ entities toplevel namespace is std.
constexpr iterator_traits< _Iter >::iterator_category __iterator_category(const _Iter &)
__numeric_traits_integer< _Tp > __int_traits
Convenience alias for __numeric_traits<integer-type>.
constexpr _Iterator __base(_Iterator __it)
initializer_list
tuple_element
Definition: array:442
Primary class template, tuple.
Definition: tuple:600
integral_constant
Definition: type_traits:58
Define a member typedef type to one of two argument types.
Definition: type_traits:2161
is_empty
Definition: type_traits:722
is_constructible
Definition: type_traits:913
Define a member typedef type only if a boolean constant is true.
Definition: type_traits:2143
Uniform interface to all allocator types.
Traits class for iterators.
Base class for node iterators.
Node iterators, used to iterate through all the hashtable.
Node const_iterators, used to iterate through all the hashtable.
Default range hashing function: use division to fold a large number into the range [0,...
Default ranged hash function H. In principle it should be a function object composed from objects of ...
Default value for rehash policy. Bucket size is (usually) the smallest prime that keeps the load fact...
Range hashing function assuming that second arg is a power of 2.
Rehash policy providing power of 2 bucket numbers. Avoids modulo operations.
Uniform interface to all pointer-like types.
Definition: ptr_traits.h:84
Marking input iterators.
Forward iterators support a superset of input iterator operations.
Struct holding two objects of arbitrary type.
Definition: stl_pair.h:213
Uniform interface to C++98 and C++11 allocators.