hashtable_policy.h 63 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039
  1. // Internal policy header for unordered_set and unordered_map -*- C++ -*-
  2. // Copyright (C) 2010-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/hashtable_policy.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly.
  23. * @headername{unordered_map,unordered_set}
  24. */
  25. #ifndef _HASHTABLE_POLICY_H
  26. #define _HASHTABLE_POLICY_H 1
  27. #include <tuple> // for std::tuple, std::forward_as_tuple
  28. #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
  29. #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
  30. namespace std _GLIBCXX_VISIBILITY(default)
  31. {
  32. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  33. /// @cond undocumented
  34. template<typename _Key, typename _Value, typename _Alloc,
  35. typename _ExtractKey, typename _Equal,
  36. typename _Hash, typename _RangeHash, typename _Unused,
  37. typename _RehashPolicy, typename _Traits>
  38. class _Hashtable;
  39. namespace __detail
  40. {
  41. /**
  42. * @defgroup hashtable-detail Base and Implementation Classes
  43. * @ingroup unordered_associative_containers
  44. * @{
  45. */
  46. template<typename _Key, typename _Value, typename _ExtractKey,
  47. typename _Equal, typename _Hash, typename _RangeHash,
  48. typename _Unused, typename _Traits>
  49. struct _Hashtable_base;
  50. // Helper function: return distance(first, last) for forward
  51. // iterators, or 0/1 for input iterators.
  52. template<typename _Iterator>
  53. inline typename std::iterator_traits<_Iterator>::difference_type
  54. __distance_fw(_Iterator __first, _Iterator __last,
  55. std::input_iterator_tag)
  56. { return __first != __last ? 1 : 0; }
  57. template<typename _Iterator>
  58. inline typename std::iterator_traits<_Iterator>::difference_type
  59. __distance_fw(_Iterator __first, _Iterator __last,
  60. std::forward_iterator_tag)
  61. { return std::distance(__first, __last); }
  62. template<typename _Iterator>
  63. inline typename std::iterator_traits<_Iterator>::difference_type
  64. __distance_fw(_Iterator __first, _Iterator __last)
  65. { return __distance_fw(__first, __last,
  66. std::__iterator_category(__first)); }
  67. struct _Identity
  68. {
  69. template<typename _Tp>
  70. _Tp&&
  71. operator()(_Tp&& __x) const noexcept
  72. { return std::forward<_Tp>(__x); }
  73. };
  74. struct _Select1st
  75. {
  76. template<typename _Pair>
  77. struct __1st_type;
  78. template<typename _Tp, typename _Up>
  79. struct __1st_type<pair<_Tp, _Up>>
  80. { using type = _Tp; };
  81. template<typename _Tp, typename _Up>
  82. struct __1st_type<const pair<_Tp, _Up>>
  83. { using type = const _Tp; };
  84. template<typename _Pair>
  85. struct __1st_type<_Pair&>
  86. { using type = typename __1st_type<_Pair>::type&; };
  87. template<typename _Tp>
  88. typename __1st_type<_Tp>::type&&
  89. operator()(_Tp&& __x) const noexcept
  90. { return std::forward<_Tp>(__x).first; }
  91. };
  92. template<typename _ExKey>
  93. struct _NodeBuilder;
  94. template<>
  95. struct _NodeBuilder<_Select1st>
  96. {
  97. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  98. static auto
  99. _S_build(_Kt&& __k, _Arg&& __arg, const _NodeGenerator& __node_gen)
  100. -> typename _NodeGenerator::__node_type*
  101. {
  102. return __node_gen(std::forward<_Kt>(__k),
  103. std::forward<_Arg>(__arg).second);
  104. }
  105. };
  106. template<>
  107. struct _NodeBuilder<_Identity>
  108. {
  109. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  110. static auto
  111. _S_build(_Kt&& __k, _Arg&&, const _NodeGenerator& __node_gen)
  112. -> typename _NodeGenerator::__node_type*
  113. { return __node_gen(std::forward<_Kt>(__k)); }
  114. };
  115. template<typename _NodeAlloc>
  116. struct _Hashtable_alloc;
  117. // Functor recycling a pool of nodes and using allocation once the pool is
  118. // empty.
  119. template<typename _NodeAlloc>
  120. struct _ReuseOrAllocNode
  121. {
  122. private:
  123. using __node_alloc_type = _NodeAlloc;
  124. using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
  125. using __node_alloc_traits =
  126. typename __hashtable_alloc::__node_alloc_traits;
  127. public:
  128. using __node_type = typename __hashtable_alloc::__node_type;
  129. _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
  130. : _M_nodes(__nodes), _M_h(__h) { }
  131. _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
  132. ~_ReuseOrAllocNode()
  133. { _M_h._M_deallocate_nodes(_M_nodes); }
  134. template<typename... _Args>
  135. __node_type*
  136. operator()(_Args&&... __args) const
  137. {
  138. if (_M_nodes)
  139. {
  140. __node_type* __node = _M_nodes;
  141. _M_nodes = _M_nodes->_M_next();
  142. __node->_M_nxt = nullptr;
  143. auto& __a = _M_h._M_node_allocator();
  144. __node_alloc_traits::destroy(__a, __node->_M_valptr());
  145. __try
  146. {
  147. __node_alloc_traits::construct(__a, __node->_M_valptr(),
  148. std::forward<_Args>(__args)...);
  149. }
  150. __catch(...)
  151. {
  152. _M_h._M_deallocate_node_ptr(__node);
  153. __throw_exception_again;
  154. }
  155. return __node;
  156. }
  157. return _M_h._M_allocate_node(std::forward<_Args>(__args)...);
  158. }
  159. private:
  160. mutable __node_type* _M_nodes;
  161. __hashtable_alloc& _M_h;
  162. };
  163. // Functor similar to the previous one but without any pool of nodes to
  164. // recycle.
  165. template<typename _NodeAlloc>
  166. struct _AllocNode
  167. {
  168. private:
  169. using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
  170. public:
  171. using __node_type = typename __hashtable_alloc::__node_type;
  172. _AllocNode(__hashtable_alloc& __h)
  173. : _M_h(__h) { }
  174. template<typename... _Args>
  175. __node_type*
  176. operator()(_Args&&... __args) const
  177. { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
  178. private:
  179. __hashtable_alloc& _M_h;
  180. };
  181. // Auxiliary types used for all instantiations of _Hashtable nodes
  182. // and iterators.
  183. /**
  184. * struct _Hashtable_traits
  185. *
  186. * Important traits for hash tables.
  187. *
  188. * @tparam _Cache_hash_code Boolean value. True if the value of
  189. * the hash function is stored along with the value. This is a
  190. * time-space tradeoff. Storing it may improve lookup speed by
  191. * reducing the number of times we need to call the _Hash or _Equal
  192. * functors.
  193. *
  194. * @tparam _Constant_iterators Boolean value. True if iterator and
  195. * const_iterator are both constant iterator types. This is true
  196. * for unordered_set and unordered_multiset, false for
  197. * unordered_map and unordered_multimap.
  198. *
  199. * @tparam _Unique_keys Boolean value. True if the return value
  200. * of _Hashtable::count(k) is always at most one, false if it may
  201. * be an arbitrary number. This is true for unordered_set and
  202. * unordered_map, false for unordered_multiset and
  203. * unordered_multimap.
  204. */
  205. template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
  206. struct _Hashtable_traits
  207. {
  208. using __hash_cached = __bool_constant<_Cache_hash_code>;
  209. using __constant_iterators = __bool_constant<_Constant_iterators>;
  210. using __unique_keys = __bool_constant<_Unique_keys>;
  211. };
  212. /**
  213. * struct _Hashtable_hash_traits
  214. *
  215. * Important traits for hash tables depending on associated hasher.
  216. *
  217. */
  218. template<typename _Hash>
  219. struct _Hashtable_hash_traits
  220. {
  221. static constexpr std::size_t
  222. __small_size_threshold() noexcept
  223. { return std::__is_fast_hash<_Hash>::value ? 0 : 20; }
  224. };
  225. /**
  226. * struct _Hash_node_base
  227. *
  228. * Nodes, used to wrap elements stored in the hash table. A policy
  229. * template parameter of class template _Hashtable controls whether
  230. * nodes also store a hash code. In some cases (e.g. strings) this
  231. * may be a performance win.
  232. */
  233. struct _Hash_node_base
  234. {
  235. _Hash_node_base* _M_nxt;
  236. _Hash_node_base() noexcept : _M_nxt() { }
  237. _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
  238. };
  239. /**
  240. * struct _Hash_node_value_base
  241. *
  242. * Node type with the value to store.
  243. */
  244. template<typename _Value>
  245. struct _Hash_node_value_base
  246. {
  247. typedef _Value value_type;
  248. __gnu_cxx::__aligned_buffer<_Value> _M_storage;
  249. _Value*
  250. _M_valptr() noexcept
  251. { return _M_storage._M_ptr(); }
  252. const _Value*
  253. _M_valptr() const noexcept
  254. { return _M_storage._M_ptr(); }
  255. _Value&
  256. _M_v() noexcept
  257. { return *_M_valptr(); }
  258. const _Value&
  259. _M_v() const noexcept
  260. { return *_M_valptr(); }
  261. };
  262. /**
  263. * Primary template struct _Hash_node_code_cache.
  264. */
  265. template<bool _Cache_hash_code>
  266. struct _Hash_node_code_cache
  267. { };
  268. /**
  269. * Specialization for node with cache, struct _Hash_node_code_cache.
  270. */
  271. template<>
  272. struct _Hash_node_code_cache<true>
  273. { std::size_t _M_hash_code; };
  274. template<typename _Value, bool _Cache_hash_code>
  275. struct _Hash_node_value
  276. : _Hash_node_value_base<_Value>
  277. , _Hash_node_code_cache<_Cache_hash_code>
  278. { };
  279. /**
  280. * Primary template struct _Hash_node.
  281. */
  282. template<typename _Value, bool _Cache_hash_code>
  283. struct _Hash_node
  284. : _Hash_node_base
  285. , _Hash_node_value<_Value, _Cache_hash_code>
  286. {
  287. _Hash_node*
  288. _M_next() const noexcept
  289. { return static_cast<_Hash_node*>(this->_M_nxt); }
  290. };
  291. /// Base class for node iterators.
  292. template<typename _Value, bool _Cache_hash_code>
  293. struct _Node_iterator_base
  294. {
  295. using __node_type = _Hash_node<_Value, _Cache_hash_code>;
  296. __node_type* _M_cur;
  297. _Node_iterator_base() : _M_cur(nullptr) { }
  298. _Node_iterator_base(__node_type* __p) noexcept
  299. : _M_cur(__p) { }
  300. void
  301. _M_incr() noexcept
  302. { _M_cur = _M_cur->_M_next(); }
  303. friend bool
  304. operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  305. noexcept
  306. { return __x._M_cur == __y._M_cur; }
  307. #if __cpp_impl_three_way_comparison < 201907L
  308. friend bool
  309. operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
  310. noexcept
  311. { return __x._M_cur != __y._M_cur; }
  312. #endif
  313. };
  314. /// Node iterators, used to iterate through all the hashtable.
  315. template<typename _Value, bool __constant_iterators, bool __cache>
  316. struct _Node_iterator
  317. : public _Node_iterator_base<_Value, __cache>
  318. {
  319. private:
  320. using __base_type = _Node_iterator_base<_Value, __cache>;
  321. using __node_type = typename __base_type::__node_type;
  322. public:
  323. using value_type = _Value;
  324. using difference_type = std::ptrdiff_t;
  325. using iterator_category = std::forward_iterator_tag;
  326. using pointer = __conditional_t<__constant_iterators,
  327. const value_type*, value_type*>;
  328. using reference = __conditional_t<__constant_iterators,
  329. const value_type&, value_type&>;
  330. _Node_iterator() = default;
  331. explicit
  332. _Node_iterator(__node_type* __p) noexcept
  333. : __base_type(__p) { }
  334. reference
  335. operator*() const noexcept
  336. { return this->_M_cur->_M_v(); }
  337. pointer
  338. operator->() const noexcept
  339. { return this->_M_cur->_M_valptr(); }
  340. _Node_iterator&
  341. operator++() noexcept
  342. {
  343. this->_M_incr();
  344. return *this;
  345. }
  346. _Node_iterator
  347. operator++(int) noexcept
  348. {
  349. _Node_iterator __tmp(*this);
  350. this->_M_incr();
  351. return __tmp;
  352. }
  353. };
  354. /// Node const_iterators, used to iterate through all the hashtable.
  355. template<typename _Value, bool __constant_iterators, bool __cache>
  356. struct _Node_const_iterator
  357. : public _Node_iterator_base<_Value, __cache>
  358. {
  359. private:
  360. using __base_type = _Node_iterator_base<_Value, __cache>;
  361. using __node_type = typename __base_type::__node_type;
  362. public:
  363. typedef _Value value_type;
  364. typedef std::ptrdiff_t difference_type;
  365. typedef std::forward_iterator_tag iterator_category;
  366. typedef const value_type* pointer;
  367. typedef const value_type& reference;
  368. _Node_const_iterator() = default;
  369. explicit
  370. _Node_const_iterator(__node_type* __p) noexcept
  371. : __base_type(__p) { }
  372. _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
  373. __cache>& __x) noexcept
  374. : __base_type(__x._M_cur) { }
  375. reference
  376. operator*() const noexcept
  377. { return this->_M_cur->_M_v(); }
  378. pointer
  379. operator->() const noexcept
  380. { return this->_M_cur->_M_valptr(); }
  381. _Node_const_iterator&
  382. operator++() noexcept
  383. {
  384. this->_M_incr();
  385. return *this;
  386. }
  387. _Node_const_iterator
  388. operator++(int) noexcept
  389. {
  390. _Node_const_iterator __tmp(*this);
  391. this->_M_incr();
  392. return __tmp;
  393. }
  394. };
  395. // Many of class template _Hashtable's template parameters are policy
  396. // classes. These are defaults for the policies.
  397. /// Default range hashing function: use division to fold a large number
  398. /// into the range [0, N).
  399. struct _Mod_range_hashing
  400. {
  401. typedef std::size_t first_argument_type;
  402. typedef std::size_t second_argument_type;
  403. typedef std::size_t result_type;
  404. result_type
  405. operator()(first_argument_type __num,
  406. second_argument_type __den) const noexcept
  407. { return __num % __den; }
  408. };
  409. /// Default ranged hash function H. In principle it should be a
  410. /// function object composed from objects of type H1 and H2 such that
  411. /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
  412. /// h1 and h2. So instead we'll just use a tag to tell class template
  413. /// hashtable to do that composition.
  414. struct _Default_ranged_hash { };
  415. /// Default value for rehash policy. Bucket size is (usually) the
  416. /// smallest prime that keeps the load factor small enough.
  417. struct _Prime_rehash_policy
  418. {
  419. using __has_load_factor = true_type;
  420. _Prime_rehash_policy(float __z = 1.0) noexcept
  421. : _M_max_load_factor(__z), _M_next_resize(0) { }
  422. float
  423. max_load_factor() const noexcept
  424. { return _M_max_load_factor; }
  425. // Return a bucket size no smaller than n.
  426. std::size_t
  427. _M_next_bkt(std::size_t __n) const;
  428. // Return a bucket count appropriate for n elements
  429. std::size_t
  430. _M_bkt_for_elements(std::size_t __n) const
  431. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  432. // __n_bkt is current bucket count, __n_elt is current element count,
  433. // and __n_ins is number of elements to be inserted. Do we need to
  434. // increase bucket count? If so, return make_pair(true, n), where n
  435. // is the new bucket count. If not, return make_pair(false, 0).
  436. std::pair<bool, std::size_t>
  437. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  438. std::size_t __n_ins) const;
  439. typedef std::size_t _State;
  440. _State
  441. _M_state() const
  442. { return _M_next_resize; }
  443. void
  444. _M_reset() noexcept
  445. { _M_next_resize = 0; }
  446. void
  447. _M_reset(_State __state)
  448. { _M_next_resize = __state; }
  449. static const std::size_t _S_growth_factor = 2;
  450. float _M_max_load_factor;
  451. mutable std::size_t _M_next_resize;
  452. };
  453. /// Range hashing function assuming that second arg is a power of 2.
  454. struct _Mask_range_hashing
  455. {
  456. typedef std::size_t first_argument_type;
  457. typedef std::size_t second_argument_type;
  458. typedef std::size_t result_type;
  459. result_type
  460. operator()(first_argument_type __num,
  461. second_argument_type __den) const noexcept
  462. { return __num & (__den - 1); }
  463. };
  464. /// Compute closest power of 2 not less than __n
  465. inline std::size_t
  466. __clp2(std::size_t __n) noexcept
  467. {
  468. using __gnu_cxx::__int_traits;
  469. // Equivalent to return __n ? std::bit_ceil(__n) : 0;
  470. if (__n < 2)
  471. return __n;
  472. const unsigned __lz = sizeof(size_t) > sizeof(long)
  473. ? __builtin_clzll(__n - 1ull)
  474. : __builtin_clzl(__n - 1ul);
  475. // Doing two shifts avoids undefined behaviour when __lz == 0.
  476. return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
  477. }
  478. /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
  479. /// operations.
  480. struct _Power2_rehash_policy
  481. {
  482. using __has_load_factor = true_type;
  483. _Power2_rehash_policy(float __z = 1.0) noexcept
  484. : _M_max_load_factor(__z), _M_next_resize(0) { }
  485. float
  486. max_load_factor() const noexcept
  487. { return _M_max_load_factor; }
  488. // Return a bucket size no smaller than n (as long as n is not above the
  489. // highest power of 2).
  490. std::size_t
  491. _M_next_bkt(std::size_t __n) noexcept
  492. {
  493. if (__n == 0)
  494. // Special case on container 1st initialization with 0 bucket count
  495. // hint. We keep _M_next_resize to 0 to make sure that next time we
  496. // want to add an element allocation will take place.
  497. return 1;
  498. const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
  499. const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
  500. std::size_t __res = __clp2(__n);
  501. if (__res == 0)
  502. __res = __max_bkt;
  503. else if (__res == 1)
  504. // If __res is 1 we force it to 2 to make sure there will be an
  505. // allocation so that nothing need to be stored in the initial
  506. // single bucket
  507. __res = 2;
  508. if (__res == __max_bkt)
  509. // Set next resize to the max value so that we never try to rehash again
  510. // as we already reach the biggest possible bucket number.
  511. // Note that it might result in max_load_factor not being respected.
  512. _M_next_resize = size_t(-1);
  513. else
  514. _M_next_resize
  515. = __builtin_floor(__res * (double)_M_max_load_factor);
  516. return __res;
  517. }
  518. // Return a bucket count appropriate for n elements
  519. std::size_t
  520. _M_bkt_for_elements(std::size_t __n) const noexcept
  521. { return __builtin_ceil(__n / (double)_M_max_load_factor); }
  522. // __n_bkt is current bucket count, __n_elt is current element count,
  523. // and __n_ins is number of elements to be inserted. Do we need to
  524. // increase bucket count? If so, return make_pair(true, n), where n
  525. // is the new bucket count. If not, return make_pair(false, 0).
  526. std::pair<bool, std::size_t>
  527. _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
  528. std::size_t __n_ins) noexcept
  529. {
  530. if (__n_elt + __n_ins > _M_next_resize)
  531. {
  532. // If _M_next_resize is 0 it means that we have nothing allocated so
  533. // far and that we start inserting elements. In this case we start
  534. // with an initial bucket size of 11.
  535. double __min_bkts
  536. = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
  537. / (double)_M_max_load_factor;
  538. if (__min_bkts >= __n_bkt)
  539. return { true,
  540. _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
  541. __n_bkt * _S_growth_factor)) };
  542. _M_next_resize
  543. = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
  544. return { false, 0 };
  545. }
  546. else
  547. return { false, 0 };
  548. }
  549. typedef std::size_t _State;
  550. _State
  551. _M_state() const noexcept
  552. { return _M_next_resize; }
  553. void
  554. _M_reset() noexcept
  555. { _M_next_resize = 0; }
  556. void
  557. _M_reset(_State __state) noexcept
  558. { _M_next_resize = __state; }
  559. static const std::size_t _S_growth_factor = 2;
  560. float _M_max_load_factor;
  561. std::size_t _M_next_resize;
  562. };
  563. // Base classes for std::_Hashtable. We define these base classes
  564. // because in some cases we want to do different things depending on
  565. // the value of a policy class. In some cases the policy class
  566. // affects which member functions and nested typedefs are defined;
  567. // we handle that by specializing base class templates. Several of
  568. // the base class templates need to access other members of class
  569. // template _Hashtable, so we use a variant of the "Curiously
  570. // Recurring Template Pattern" (CRTP) technique.
  571. /**
  572. * Primary class template _Map_base.
  573. *
  574. * If the hashtable has a value type of the form pair<const T1, T2> and
  575. * a key extraction policy (_ExtractKey) that returns the first part
  576. * of the pair, the hashtable gets a mapped_type typedef. If it
  577. * satisfies those criteria and also has unique keys, then it also
  578. * gets an operator[].
  579. */
  580. template<typename _Key, typename _Value, typename _Alloc,
  581. typename _ExtractKey, typename _Equal,
  582. typename _Hash, typename _RangeHash, typename _Unused,
  583. typename _RehashPolicy, typename _Traits,
  584. bool _Unique_keys = _Traits::__unique_keys::value>
  585. struct _Map_base { };
  586. /// Partial specialization, __unique_keys set to false, std::pair value type.
  587. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  588. typename _Hash, typename _RangeHash, typename _Unused,
  589. typename _RehashPolicy, typename _Traits>
  590. struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  591. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  592. {
  593. using mapped_type = _Val;
  594. };
  595. /// Partial specialization, __unique_keys set to true.
  596. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  597. typename _Hash, typename _RangeHash, typename _Unused,
  598. typename _RehashPolicy, typename _Traits>
  599. struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  600. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  601. {
  602. private:
  603. using __hashtable_base = _Hashtable_base<_Key, pair<const _Key, _Val>,
  604. _Select1st, _Equal, _Hash,
  605. _RangeHash, _Unused,
  606. _Traits>;
  607. using __hashtable = _Hashtable<_Key, pair<const _Key, _Val>, _Alloc,
  608. _Select1st, _Equal, _Hash, _RangeHash,
  609. _Unused, _RehashPolicy, _Traits>;
  610. using __hash_code = typename __hashtable_base::__hash_code;
  611. public:
  612. using key_type = typename __hashtable_base::key_type;
  613. using mapped_type = _Val;
  614. mapped_type&
  615. operator[](const key_type& __k);
  616. mapped_type&
  617. operator[](key_type&& __k);
  618. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  619. // DR 761. unordered_map needs an at() member function.
  620. mapped_type&
  621. at(const key_type& __k)
  622. {
  623. auto __ite = static_cast<__hashtable*>(this)->find(__k);
  624. if (!__ite._M_cur)
  625. __throw_out_of_range(__N("unordered_map::at"));
  626. return __ite->second;
  627. }
  628. const mapped_type&
  629. at(const key_type& __k) const
  630. {
  631. auto __ite = static_cast<const __hashtable*>(this)->find(__k);
  632. if (!__ite._M_cur)
  633. __throw_out_of_range(__N("unordered_map::at"));
  634. return __ite->second;
  635. }
  636. };
  637. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  638. typename _Hash, typename _RangeHash, typename _Unused,
  639. typename _RehashPolicy, typename _Traits>
  640. auto
  641. _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  642. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  643. operator[](const key_type& __k)
  644. -> mapped_type&
  645. {
  646. __hashtable* __h = static_cast<__hashtable*>(this);
  647. __hash_code __code = __h->_M_hash_code(__k);
  648. std::size_t __bkt = __h->_M_bucket_index(__code);
  649. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  650. return __node->_M_v().second;
  651. typename __hashtable::_Scoped_node __node {
  652. __h,
  653. std::piecewise_construct,
  654. std::tuple<const key_type&>(__k),
  655. std::tuple<>()
  656. };
  657. auto __pos
  658. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  659. __node._M_node = nullptr;
  660. return __pos->second;
  661. }
  662. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  663. typename _Hash, typename _RangeHash, typename _Unused,
  664. typename _RehashPolicy, typename _Traits>
  665. auto
  666. _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
  667. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  668. operator[](key_type&& __k)
  669. -> mapped_type&
  670. {
  671. __hashtable* __h = static_cast<__hashtable*>(this);
  672. __hash_code __code = __h->_M_hash_code(__k);
  673. std::size_t __bkt = __h->_M_bucket_index(__code);
  674. if (auto __node = __h->_M_find_node(__bkt, __k, __code))
  675. return __node->_M_v().second;
  676. typename __hashtable::_Scoped_node __node {
  677. __h,
  678. std::piecewise_construct,
  679. std::forward_as_tuple(std::move(__k)),
  680. std::tuple<>()
  681. };
  682. auto __pos
  683. = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
  684. __node._M_node = nullptr;
  685. return __pos->second;
  686. }
  687. // Partial specialization for unordered_map<const T, U>, see PR 104174.
  688. template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
  689. typename _Hash, typename _RangeHash, typename _Unused,
  690. typename _RehashPolicy, typename _Traits, bool __uniq>
  691. struct _Map_base<const _Key, pair<const _Key, _Val>,
  692. _Alloc, _Select1st, _Equal, _Hash,
  693. _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
  694. : _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal, _Hash,
  695. _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
  696. { };
  697. /**
  698. * Primary class template _Insert_base.
  699. *
  700. * Defines @c insert member functions appropriate to all _Hashtables.
  701. */
  702. template<typename _Key, typename _Value, typename _Alloc,
  703. typename _ExtractKey, typename _Equal,
  704. typename _Hash, typename _RangeHash, typename _Unused,
  705. typename _RehashPolicy, typename _Traits>
  706. struct _Insert_base
  707. {
  708. protected:
  709. using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
  710. _Equal, _Hash, _RangeHash,
  711. _Unused, _Traits>;
  712. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  713. _Hash, _RangeHash,
  714. _Unused, _RehashPolicy, _Traits>;
  715. using __hash_cached = typename _Traits::__hash_cached;
  716. using __constant_iterators = typename _Traits::__constant_iterators;
  717. using __hashtable_alloc = _Hashtable_alloc<
  718. __alloc_rebind<_Alloc, _Hash_node<_Value,
  719. __hash_cached::value>>>;
  720. using value_type = typename __hashtable_base::value_type;
  721. using size_type = typename __hashtable_base::size_type;
  722. using __unique_keys = typename _Traits::__unique_keys;
  723. using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
  724. using __node_gen_type = _AllocNode<__node_alloc_type>;
  725. __hashtable&
  726. _M_conjure_hashtable()
  727. { return *(static_cast<__hashtable*>(this)); }
  728. template<typename _InputIterator, typename _NodeGetter>
  729. void
  730. _M_insert_range(_InputIterator __first, _InputIterator __last,
  731. const _NodeGetter&, true_type __uks);
  732. template<typename _InputIterator, typename _NodeGetter>
  733. void
  734. _M_insert_range(_InputIterator __first, _InputIterator __last,
  735. const _NodeGetter&, false_type __uks);
  736. public:
  737. using iterator = _Node_iterator<_Value, __constant_iterators::value,
  738. __hash_cached::value>;
  739. using const_iterator = _Node_const_iterator<_Value,
  740. __constant_iterators::value,
  741. __hash_cached::value>;
  742. using __ireturn_type = __conditional_t<__unique_keys::value,
  743. std::pair<iterator, bool>,
  744. iterator>;
  745. __ireturn_type
  746. insert(const value_type& __v)
  747. {
  748. __hashtable& __h = _M_conjure_hashtable();
  749. __node_gen_type __node_gen(__h);
  750. return __h._M_insert(__v, __node_gen, __unique_keys{});
  751. }
  752. iterator
  753. insert(const_iterator __hint, const value_type& __v)
  754. {
  755. __hashtable& __h = _M_conjure_hashtable();
  756. __node_gen_type __node_gen(__h);
  757. return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
  758. }
  759. template<typename _KType, typename... _Args>
  760. std::pair<iterator, bool>
  761. try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
  762. {
  763. __hashtable& __h = _M_conjure_hashtable();
  764. auto __code = __h._M_hash_code(__k);
  765. std::size_t __bkt = __h._M_bucket_index(__code);
  766. if (auto __node = __h._M_find_node(__bkt, __k, __code))
  767. return { iterator(__node), false };
  768. typename __hashtable::_Scoped_node __node {
  769. &__h,
  770. std::piecewise_construct,
  771. std::forward_as_tuple(std::forward<_KType>(__k)),
  772. std::forward_as_tuple(std::forward<_Args>(__args)...)
  773. };
  774. auto __it
  775. = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
  776. __node._M_node = nullptr;
  777. return { __it, true };
  778. }
  779. void
  780. insert(initializer_list<value_type> __l)
  781. { this->insert(__l.begin(), __l.end()); }
  782. template<typename _InputIterator>
  783. void
  784. insert(_InputIterator __first, _InputIterator __last)
  785. {
  786. __hashtable& __h = _M_conjure_hashtable();
  787. __node_gen_type __node_gen(__h);
  788. return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
  789. }
  790. };
  791. template<typename _Key, typename _Value, typename _Alloc,
  792. typename _ExtractKey, typename _Equal,
  793. typename _Hash, typename _RangeHash, typename _Unused,
  794. typename _RehashPolicy, typename _Traits>
  795. template<typename _InputIterator, typename _NodeGetter>
  796. void
  797. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  798. _Hash, _RangeHash, _Unused,
  799. _RehashPolicy, _Traits>::
  800. _M_insert_range(_InputIterator __first, _InputIterator __last,
  801. const _NodeGetter& __node_gen, true_type __uks)
  802. {
  803. __hashtable& __h = _M_conjure_hashtable();
  804. for (; __first != __last; ++__first)
  805. __h._M_insert(*__first, __node_gen, __uks);
  806. }
  807. template<typename _Key, typename _Value, typename _Alloc,
  808. typename _ExtractKey, typename _Equal,
  809. typename _Hash, typename _RangeHash, typename _Unused,
  810. typename _RehashPolicy, typename _Traits>
  811. template<typename _InputIterator, typename _NodeGetter>
  812. void
  813. _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  814. _Hash, _RangeHash, _Unused,
  815. _RehashPolicy, _Traits>::
  816. _M_insert_range(_InputIterator __first, _InputIterator __last,
  817. const _NodeGetter& __node_gen, false_type __uks)
  818. {
  819. using __rehash_type = typename __hashtable::__rehash_type;
  820. using __rehash_state = typename __hashtable::__rehash_state;
  821. using pair_type = std::pair<bool, std::size_t>;
  822. size_type __n_elt = __detail::__distance_fw(__first, __last);
  823. if (__n_elt == 0)
  824. return;
  825. __hashtable& __h = _M_conjure_hashtable();
  826. __rehash_type& __rehash = __h._M_rehash_policy;
  827. const __rehash_state& __saved_state = __rehash._M_state();
  828. pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
  829. __h._M_element_count,
  830. __n_elt);
  831. if (__do_rehash.first)
  832. __h._M_rehash(__do_rehash.second, __saved_state);
  833. for (; __first != __last; ++__first)
  834. __h._M_insert(*__first, __node_gen, __uks);
  835. }
  836. /**
  837. * Primary class template _Insert.
  838. *
  839. * Defines @c insert member functions that depend on _Hashtable policies,
  840. * via partial specializations.
  841. */
  842. template<typename _Key, typename _Value, typename _Alloc,
  843. typename _ExtractKey, typename _Equal,
  844. typename _Hash, typename _RangeHash, typename _Unused,
  845. typename _RehashPolicy, typename _Traits,
  846. bool _Constant_iterators = _Traits::__constant_iterators::value>
  847. struct _Insert;
  848. /// Specialization.
  849. template<typename _Key, typename _Value, typename _Alloc,
  850. typename _ExtractKey, typename _Equal,
  851. typename _Hash, typename _RangeHash, typename _Unused,
  852. typename _RehashPolicy, typename _Traits>
  853. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  854. _Hash, _RangeHash, _Unused,
  855. _RehashPolicy, _Traits, true>
  856. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  857. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  858. {
  859. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  860. _Equal, _Hash, _RangeHash, _Unused,
  861. _RehashPolicy, _Traits>;
  862. using value_type = typename __base_type::value_type;
  863. using iterator = typename __base_type::iterator;
  864. using const_iterator = typename __base_type::const_iterator;
  865. using __ireturn_type = typename __base_type::__ireturn_type;
  866. using __unique_keys = typename __base_type::__unique_keys;
  867. using __hashtable = typename __base_type::__hashtable;
  868. using __node_gen_type = typename __base_type::__node_gen_type;
  869. using __base_type::insert;
  870. __ireturn_type
  871. insert(value_type&& __v)
  872. {
  873. __hashtable& __h = this->_M_conjure_hashtable();
  874. __node_gen_type __node_gen(__h);
  875. return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
  876. }
  877. iterator
  878. insert(const_iterator __hint, value_type&& __v)
  879. {
  880. __hashtable& __h = this->_M_conjure_hashtable();
  881. __node_gen_type __node_gen(__h);
  882. return __h._M_insert(__hint, std::move(__v), __node_gen,
  883. __unique_keys{});
  884. }
  885. };
  886. /// Specialization.
  887. template<typename _Key, typename _Value, typename _Alloc,
  888. typename _ExtractKey, typename _Equal,
  889. typename _Hash, typename _RangeHash, typename _Unused,
  890. typename _RehashPolicy, typename _Traits>
  891. struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  892. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  893. : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  894. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
  895. {
  896. using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
  897. _Equal, _Hash, _RangeHash, _Unused,
  898. _RehashPolicy, _Traits>;
  899. using value_type = typename __base_type::value_type;
  900. using iterator = typename __base_type::iterator;
  901. using const_iterator = typename __base_type::const_iterator;
  902. using __unique_keys = typename __base_type::__unique_keys;
  903. using __hashtable = typename __base_type::__hashtable;
  904. using __ireturn_type = typename __base_type::__ireturn_type;
  905. using __base_type::insert;
  906. template<typename _Pair>
  907. using __is_cons = std::is_constructible<value_type, _Pair&&>;
  908. template<typename _Pair>
  909. using _IFcons = std::enable_if<__is_cons<_Pair>::value>;
  910. template<typename _Pair>
  911. using _IFconsp = typename _IFcons<_Pair>::type;
  912. template<typename _Pair, typename = _IFconsp<_Pair>>
  913. __ireturn_type
  914. insert(_Pair&& __v)
  915. {
  916. __hashtable& __h = this->_M_conjure_hashtable();
  917. return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
  918. }
  919. template<typename _Pair, typename = _IFconsp<_Pair>>
  920. iterator
  921. insert(const_iterator __hint, _Pair&& __v)
  922. {
  923. __hashtable& __h = this->_M_conjure_hashtable();
  924. return __h._M_emplace(__hint, __unique_keys{},
  925. std::forward<_Pair>(__v));
  926. }
  927. };
  928. template<typename _Policy>
  929. using __has_load_factor = typename _Policy::__has_load_factor;
  930. /**
  931. * Primary class template _Rehash_base.
  932. *
  933. * Give hashtable the max_load_factor functions and reserve iff the
  934. * rehash policy supports it.
  935. */
  936. template<typename _Key, typename _Value, typename _Alloc,
  937. typename _ExtractKey, typename _Equal,
  938. typename _Hash, typename _RangeHash, typename _Unused,
  939. typename _RehashPolicy, typename _Traits,
  940. typename =
  941. __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
  942. struct _Rehash_base;
  943. /// Specialization when rehash policy doesn't provide load factor management.
  944. template<typename _Key, typename _Value, typename _Alloc,
  945. typename _ExtractKey, typename _Equal,
  946. typename _Hash, typename _RangeHash, typename _Unused,
  947. typename _RehashPolicy, typename _Traits>
  948. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  949. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  950. false_type /* Has load factor */>
  951. {
  952. };
  953. /// Specialization when rehash policy provide load factor management.
  954. template<typename _Key, typename _Value, typename _Alloc,
  955. typename _ExtractKey, typename _Equal,
  956. typename _Hash, typename _RangeHash, typename _Unused,
  957. typename _RehashPolicy, typename _Traits>
  958. struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  959. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
  960. true_type /* Has load factor */>
  961. {
  962. private:
  963. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
  964. _Equal, _Hash, _RangeHash, _Unused,
  965. _RehashPolicy, _Traits>;
  966. public:
  967. float
  968. max_load_factor() const noexcept
  969. {
  970. const __hashtable* __this = static_cast<const __hashtable*>(this);
  971. return __this->__rehash_policy().max_load_factor();
  972. }
  973. void
  974. max_load_factor(float __z)
  975. {
  976. __hashtable* __this = static_cast<__hashtable*>(this);
  977. __this->__rehash_policy(_RehashPolicy(__z));
  978. }
  979. void
  980. reserve(std::size_t __n)
  981. {
  982. __hashtable* __this = static_cast<__hashtable*>(this);
  983. __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
  984. }
  985. };
  986. /**
  987. * Primary class template _Hashtable_ebo_helper.
  988. *
  989. * Helper class using EBO when it is not forbidden (the type is not
  990. * final) and when it is worth it (the type is empty.)
  991. */
  992. template<int _Nm, typename _Tp,
  993. bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
  994. struct _Hashtable_ebo_helper;
  995. /// Specialization using EBO.
  996. template<int _Nm, typename _Tp>
  997. struct _Hashtable_ebo_helper<_Nm, _Tp, true>
  998. : private _Tp
  999. {
  1000. _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
  1001. template<typename _OtherTp>
  1002. _Hashtable_ebo_helper(_OtherTp&& __tp)
  1003. : _Tp(std::forward<_OtherTp>(__tp))
  1004. { }
  1005. const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
  1006. _Tp& _M_get() { return static_cast<_Tp&>(*this); }
  1007. };
  1008. /// Specialization not using EBO.
  1009. template<int _Nm, typename _Tp>
  1010. struct _Hashtable_ebo_helper<_Nm, _Tp, false>
  1011. {
  1012. _Hashtable_ebo_helper() = default;
  1013. template<typename _OtherTp>
  1014. _Hashtable_ebo_helper(_OtherTp&& __tp)
  1015. : _M_tp(std::forward<_OtherTp>(__tp))
  1016. { }
  1017. const _Tp& _M_cget() const { return _M_tp; }
  1018. _Tp& _M_get() { return _M_tp; }
  1019. private:
  1020. _Tp _M_tp{};
  1021. };
  1022. /**
  1023. * Primary class template _Local_iterator_base.
  1024. *
  1025. * Base class for local iterators, used to iterate within a bucket
  1026. * but not between buckets.
  1027. */
  1028. template<typename _Key, typename _Value, typename _ExtractKey,
  1029. typename _Hash, typename _RangeHash, typename _Unused,
  1030. bool __cache_hash_code>
  1031. struct _Local_iterator_base;
  1032. /**
  1033. * Primary class template _Hash_code_base.
  1034. *
  1035. * Encapsulates two policy issues that aren't quite orthogonal.
  1036. * (1) the difference between using a ranged hash function and using
  1037. * the combination of a hash function and a range-hashing function.
  1038. * In the former case we don't have such things as hash codes, so
  1039. * we have a dummy type as placeholder.
  1040. * (2) Whether or not we cache hash codes. Caching hash codes is
  1041. * meaningless if we have a ranged hash function.
  1042. *
  1043. * We also put the key extraction objects here, for convenience.
  1044. * Each specialization derives from one or more of the template
  1045. * parameters to benefit from Ebo. This is important as this type
  1046. * is inherited in some cases by the _Local_iterator_base type used
  1047. * to implement local_iterator and const_local_iterator. As with
  1048. * any iterator type we prefer to make it as small as possible.
  1049. */
  1050. template<typename _Key, typename _Value, typename _ExtractKey,
  1051. typename _Hash, typename _RangeHash, typename _Unused,
  1052. bool __cache_hash_code>
  1053. struct _Hash_code_base
  1054. : private _Hashtable_ebo_helper<1, _Hash>
  1055. {
  1056. private:
  1057. using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
  1058. // Gives the local iterator implementation access to _M_bucket_index().
  1059. friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1060. _Hash, _RangeHash, _Unused, false>;
  1061. public:
  1062. typedef _Hash hasher;
  1063. hasher
  1064. hash_function() const
  1065. { return _M_hash(); }
  1066. protected:
  1067. typedef std::size_t __hash_code;
  1068. // We need the default constructor for the local iterators and _Hashtable
  1069. // default constructor.
  1070. _Hash_code_base() = default;
  1071. _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
  1072. __hash_code
  1073. _M_hash_code(const _Key& __k) const
  1074. {
  1075. static_assert(__is_invocable<const _Hash&, const _Key&>{},
  1076. "hash function must be invocable with an argument of key type");
  1077. return _M_hash()(__k);
  1078. }
  1079. template<typename _Kt>
  1080. __hash_code
  1081. _M_hash_code_tr(const _Kt& __k) const
  1082. {
  1083. static_assert(__is_invocable<const _Hash&, const _Kt&>{},
  1084. "hash function must be invocable with an argument of key type");
  1085. return _M_hash()(__k);
  1086. }
  1087. __hash_code
  1088. _M_hash_code(const _Hash&,
  1089. const _Hash_node_value<_Value, true>& __n) const
  1090. { return __n._M_hash_code; }
  1091. // Compute hash code using _Hash as __n _M_hash_code, if present, was
  1092. // computed using _H2.
  1093. template<typename _H2>
  1094. __hash_code
  1095. _M_hash_code(const _H2&,
  1096. const _Hash_node_value<_Value, __cache_hash_code>& __n) const
  1097. { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
  1098. __hash_code
  1099. _M_hash_code(const _Hash_node_value<_Value, false>& __n) const
  1100. { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
  1101. __hash_code
  1102. _M_hash_code(const _Hash_node_value<_Value, true>& __n) const
  1103. { return __n._M_hash_code; }
  1104. std::size_t
  1105. _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
  1106. { return _RangeHash{}(__c, __bkt_count); }
  1107. std::size_t
  1108. _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
  1109. std::size_t __bkt_count) const
  1110. noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
  1111. && noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1112. (std::size_t)0)) )
  1113. {
  1114. return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
  1115. __bkt_count);
  1116. }
  1117. std::size_t
  1118. _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
  1119. std::size_t __bkt_count) const
  1120. noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
  1121. (std::size_t)0)) )
  1122. { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
  1123. void
  1124. _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
  1125. { }
  1126. void
  1127. _M_copy_code(_Hash_node_code_cache<false>&,
  1128. const _Hash_node_code_cache<false>&) const
  1129. { }
  1130. void
  1131. _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
  1132. { __n._M_hash_code = __c; }
  1133. void
  1134. _M_copy_code(_Hash_node_code_cache<true>& __to,
  1135. const _Hash_node_code_cache<true>& __from) const
  1136. { __to._M_hash_code = __from._M_hash_code; }
  1137. void
  1138. _M_swap(_Hash_code_base& __x)
  1139. { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
  1140. const _Hash&
  1141. _M_hash() const { return __ebo_hash::_M_cget(); }
  1142. };
  1143. /// Partial specialization used when nodes contain a cached hash code.
  1144. template<typename _Key, typename _Value, typename _ExtractKey,
  1145. typename _Hash, typename _RangeHash, typename _Unused>
  1146. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1147. _Hash, _RangeHash, _Unused, true>
  1148. : public _Node_iterator_base<_Value, true>
  1149. {
  1150. protected:
  1151. using __base_node_iter = _Node_iterator_base<_Value, true>;
  1152. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1153. _Hash, _RangeHash, _Unused, true>;
  1154. _Local_iterator_base() = default;
  1155. _Local_iterator_base(const __hash_code_base&,
  1156. _Hash_node<_Value, true>* __p,
  1157. std::size_t __bkt, std::size_t __bkt_count)
  1158. : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1159. { }
  1160. void
  1161. _M_incr()
  1162. {
  1163. __base_node_iter::_M_incr();
  1164. if (this->_M_cur)
  1165. {
  1166. std::size_t __bkt
  1167. = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
  1168. if (__bkt != _M_bucket)
  1169. this->_M_cur = nullptr;
  1170. }
  1171. }
  1172. std::size_t _M_bucket;
  1173. std::size_t _M_bucket_count;
  1174. public:
  1175. std::size_t
  1176. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1177. };
  1178. // Uninitialized storage for a _Hash_code_base.
  1179. // This type is DefaultConstructible and Assignable even if the
  1180. // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
  1181. // can be DefaultConstructible and Assignable.
  1182. template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
  1183. struct _Hash_code_storage
  1184. {
  1185. __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
  1186. _Tp*
  1187. _M_h() { return _M_storage._M_ptr(); }
  1188. const _Tp*
  1189. _M_h() const { return _M_storage._M_ptr(); }
  1190. };
  1191. // Empty partial specialization for empty _Hash_code_base types.
  1192. template<typename _Tp>
  1193. struct _Hash_code_storage<_Tp, true>
  1194. {
  1195. static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
  1196. // As _Tp is an empty type there will be no bytes written/read through
  1197. // the cast pointer, so no strict-aliasing violation.
  1198. _Tp*
  1199. _M_h() { return reinterpret_cast<_Tp*>(this); }
  1200. const _Tp*
  1201. _M_h() const { return reinterpret_cast<const _Tp*>(this); }
  1202. };
  1203. template<typename _Key, typename _Value, typename _ExtractKey,
  1204. typename _Hash, typename _RangeHash, typename _Unused>
  1205. using __hash_code_for_local_iter
  1206. = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
  1207. _Hash, _RangeHash, _Unused, false>>;
  1208. // Partial specialization used when hash codes are not cached
  1209. template<typename _Key, typename _Value, typename _ExtractKey,
  1210. typename _Hash, typename _RangeHash, typename _Unused>
  1211. struct _Local_iterator_base<_Key, _Value, _ExtractKey,
  1212. _Hash, _RangeHash, _Unused, false>
  1213. : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1214. _Unused>
  1215. , _Node_iterator_base<_Value, false>
  1216. {
  1217. protected:
  1218. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1219. _Hash, _RangeHash, _Unused, false>;
  1220. using __node_iter_base = _Node_iterator_base<_Value, false>;
  1221. _Local_iterator_base() : _M_bucket_count(-1) { }
  1222. _Local_iterator_base(const __hash_code_base& __base,
  1223. _Hash_node<_Value, false>* __p,
  1224. std::size_t __bkt, std::size_t __bkt_count)
  1225. : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
  1226. { _M_init(__base); }
  1227. ~_Local_iterator_base()
  1228. {
  1229. if (_M_bucket_count != size_t(-1))
  1230. _M_destroy();
  1231. }
  1232. _Local_iterator_base(const _Local_iterator_base& __iter)
  1233. : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
  1234. , _M_bucket_count(__iter._M_bucket_count)
  1235. {
  1236. if (_M_bucket_count != size_t(-1))
  1237. _M_init(*__iter._M_h());
  1238. }
  1239. _Local_iterator_base&
  1240. operator=(const _Local_iterator_base& __iter)
  1241. {
  1242. if (_M_bucket_count != -1)
  1243. _M_destroy();
  1244. this->_M_cur = __iter._M_cur;
  1245. _M_bucket = __iter._M_bucket;
  1246. _M_bucket_count = __iter._M_bucket_count;
  1247. if (_M_bucket_count != -1)
  1248. _M_init(*__iter._M_h());
  1249. return *this;
  1250. }
  1251. void
  1252. _M_incr()
  1253. {
  1254. __node_iter_base::_M_incr();
  1255. if (this->_M_cur)
  1256. {
  1257. std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
  1258. _M_bucket_count);
  1259. if (__bkt != _M_bucket)
  1260. this->_M_cur = nullptr;
  1261. }
  1262. }
  1263. std::size_t _M_bucket;
  1264. std::size_t _M_bucket_count;
  1265. void
  1266. _M_init(const __hash_code_base& __base)
  1267. { ::new(this->_M_h()) __hash_code_base(__base); }
  1268. void
  1269. _M_destroy() { this->_M_h()->~__hash_code_base(); }
  1270. public:
  1271. std::size_t
  1272. _M_get_bucket() const { return _M_bucket; } // for debug mode
  1273. };
  1274. /// local iterators
  1275. template<typename _Key, typename _Value, typename _ExtractKey,
  1276. typename _Hash, typename _RangeHash, typename _Unused,
  1277. bool __constant_iterators, bool __cache>
  1278. struct _Local_iterator
  1279. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1280. _Hash, _RangeHash, _Unused, __cache>
  1281. {
  1282. private:
  1283. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1284. _Hash, _RangeHash, _Unused, __cache>;
  1285. using __hash_code_base = typename __base_type::__hash_code_base;
  1286. public:
  1287. using value_type = _Value;
  1288. using pointer = __conditional_t<__constant_iterators,
  1289. const value_type*, value_type*>;
  1290. using reference = __conditional_t<__constant_iterators,
  1291. const value_type&, value_type&>;
  1292. using difference_type = ptrdiff_t;
  1293. using iterator_category = forward_iterator_tag;
  1294. _Local_iterator() = default;
  1295. _Local_iterator(const __hash_code_base& __base,
  1296. _Hash_node<_Value, __cache>* __n,
  1297. std::size_t __bkt, std::size_t __bkt_count)
  1298. : __base_type(__base, __n, __bkt, __bkt_count)
  1299. { }
  1300. reference
  1301. operator*() const
  1302. { return this->_M_cur->_M_v(); }
  1303. pointer
  1304. operator->() const
  1305. { return this->_M_cur->_M_valptr(); }
  1306. _Local_iterator&
  1307. operator++()
  1308. {
  1309. this->_M_incr();
  1310. return *this;
  1311. }
  1312. _Local_iterator
  1313. operator++(int)
  1314. {
  1315. _Local_iterator __tmp(*this);
  1316. this->_M_incr();
  1317. return __tmp;
  1318. }
  1319. };
  1320. /// local const_iterators
  1321. template<typename _Key, typename _Value, typename _ExtractKey,
  1322. typename _Hash, typename _RangeHash, typename _Unused,
  1323. bool __constant_iterators, bool __cache>
  1324. struct _Local_const_iterator
  1325. : public _Local_iterator_base<_Key, _Value, _ExtractKey,
  1326. _Hash, _RangeHash, _Unused, __cache>
  1327. {
  1328. private:
  1329. using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
  1330. _Hash, _RangeHash, _Unused, __cache>;
  1331. using __hash_code_base = typename __base_type::__hash_code_base;
  1332. public:
  1333. typedef _Value value_type;
  1334. typedef const value_type* pointer;
  1335. typedef const value_type& reference;
  1336. typedef std::ptrdiff_t difference_type;
  1337. typedef std::forward_iterator_tag iterator_category;
  1338. _Local_const_iterator() = default;
  1339. _Local_const_iterator(const __hash_code_base& __base,
  1340. _Hash_node<_Value, __cache>* __n,
  1341. std::size_t __bkt, std::size_t __bkt_count)
  1342. : __base_type(__base, __n, __bkt, __bkt_count)
  1343. { }
  1344. _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
  1345. _Hash, _RangeHash, _Unused,
  1346. __constant_iterators,
  1347. __cache>& __x)
  1348. : __base_type(__x)
  1349. { }
  1350. reference
  1351. operator*() const
  1352. { return this->_M_cur->_M_v(); }
  1353. pointer
  1354. operator->() const
  1355. { return this->_M_cur->_M_valptr(); }
  1356. _Local_const_iterator&
  1357. operator++()
  1358. {
  1359. this->_M_incr();
  1360. return *this;
  1361. }
  1362. _Local_const_iterator
  1363. operator++(int)
  1364. {
  1365. _Local_const_iterator __tmp(*this);
  1366. this->_M_incr();
  1367. return __tmp;
  1368. }
  1369. };
  1370. /**
  1371. * Primary class template _Hashtable_base.
  1372. *
  1373. * Helper class adding management of _Equal functor to
  1374. * _Hash_code_base type.
  1375. *
  1376. * Base class templates are:
  1377. * - __detail::_Hash_code_base
  1378. * - __detail::_Hashtable_ebo_helper
  1379. */
  1380. template<typename _Key, typename _Value, typename _ExtractKey,
  1381. typename _Equal, typename _Hash, typename _RangeHash,
  1382. typename _Unused, typename _Traits>
  1383. struct _Hashtable_base
  1384. : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
  1385. _Unused, _Traits::__hash_cached::value>,
  1386. private _Hashtable_ebo_helper<0, _Equal>
  1387. {
  1388. public:
  1389. typedef _Key key_type;
  1390. typedef _Value value_type;
  1391. typedef _Equal key_equal;
  1392. typedef std::size_t size_type;
  1393. typedef std::ptrdiff_t difference_type;
  1394. using __traits_type = _Traits;
  1395. using __hash_cached = typename __traits_type::__hash_cached;
  1396. using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
  1397. _Hash, _RangeHash, _Unused,
  1398. __hash_cached::value>;
  1399. using __hash_code = typename __hash_code_base::__hash_code;
  1400. private:
  1401. using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
  1402. static bool
  1403. _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
  1404. { return true; }
  1405. static bool
  1406. _S_node_equals(const _Hash_node_code_cache<false>&,
  1407. const _Hash_node_code_cache<false>&)
  1408. { return true; }
  1409. static bool
  1410. _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
  1411. { return __c == __n._M_hash_code; }
  1412. static bool
  1413. _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
  1414. const _Hash_node_code_cache<true>& __rhn)
  1415. { return __lhn._M_hash_code == __rhn._M_hash_code; }
  1416. protected:
  1417. _Hashtable_base() = default;
  1418. _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
  1419. : __hash_code_base(__hash), _EqualEBO(__eq)
  1420. { }
  1421. bool
  1422. _M_key_equals(const _Key& __k,
  1423. const _Hash_node_value<_Value,
  1424. __hash_cached::value>& __n) const
  1425. {
  1426. static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
  1427. "key equality predicate must be invocable with two arguments of "
  1428. "key type");
  1429. return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1430. }
  1431. template<typename _Kt>
  1432. bool
  1433. _M_key_equals_tr(const _Kt& __k,
  1434. const _Hash_node_value<_Value,
  1435. __hash_cached::value>& __n) const
  1436. {
  1437. static_assert(
  1438. __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
  1439. "key equality predicate must be invocable with two arguments of "
  1440. "key type");
  1441. return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
  1442. }
  1443. bool
  1444. _M_equals(const _Key& __k, __hash_code __c,
  1445. const _Hash_node_value<_Value, __hash_cached::value>& __n) const
  1446. { return _S_equals(__c, __n) && _M_key_equals(__k, __n); }
  1447. template<typename _Kt>
  1448. bool
  1449. _M_equals_tr(const _Kt& __k, __hash_code __c,
  1450. const _Hash_node_value<_Value,
  1451. __hash_cached::value>& __n) const
  1452. { return _S_equals(__c, __n) && _M_key_equals_tr(__k, __n); }
  1453. bool
  1454. _M_node_equals(
  1455. const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
  1456. const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
  1457. {
  1458. return _S_node_equals(__lhn, __rhn)
  1459. && _M_key_equals(_ExtractKey{}(__lhn._M_v()), __rhn);
  1460. }
  1461. void
  1462. _M_swap(_Hashtable_base& __x)
  1463. {
  1464. __hash_code_base::_M_swap(__x);
  1465. std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
  1466. }
  1467. const _Equal&
  1468. _M_eq() const { return _EqualEBO::_M_cget(); }
  1469. };
  1470. /**
  1471. * Primary class template _Equality.
  1472. *
  1473. * This is for implementing equality comparison for unordered
  1474. * containers, per N3068, by John Lakos and Pablo Halpern.
  1475. * Algorithmically, we follow closely the reference implementations
  1476. * therein.
  1477. */
  1478. template<typename _Key, typename _Value, typename _Alloc,
  1479. typename _ExtractKey, typename _Equal,
  1480. typename _Hash, typename _RangeHash, typename _Unused,
  1481. typename _RehashPolicy, typename _Traits,
  1482. bool _Unique_keys = _Traits::__unique_keys::value>
  1483. struct _Equality;
  1484. /// unordered_map and unordered_set specializations.
  1485. template<typename _Key, typename _Value, typename _Alloc,
  1486. typename _ExtractKey, typename _Equal,
  1487. typename _Hash, typename _RangeHash, typename _Unused,
  1488. typename _RehashPolicy, typename _Traits>
  1489. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1490. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
  1491. {
  1492. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1493. _Hash, _RangeHash, _Unused,
  1494. _RehashPolicy, _Traits>;
  1495. bool
  1496. _M_equal(const __hashtable&) const;
  1497. };
  1498. template<typename _Key, typename _Value, typename _Alloc,
  1499. typename _ExtractKey, typename _Equal,
  1500. typename _Hash, typename _RangeHash, typename _Unused,
  1501. typename _RehashPolicy, typename _Traits>
  1502. bool
  1503. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1504. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
  1505. _M_equal(const __hashtable& __other) const
  1506. {
  1507. using __node_type = typename __hashtable::__node_type;
  1508. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1509. if (__this->size() != __other.size())
  1510. return false;
  1511. for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
  1512. {
  1513. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1514. auto __prev_n = __other._M_buckets[__ybkt];
  1515. if (!__prev_n)
  1516. return false;
  1517. for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
  1518. __n = __n->_M_next())
  1519. {
  1520. if (__n->_M_v() == *__itx)
  1521. break;
  1522. if (!__n->_M_nxt
  1523. || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
  1524. return false;
  1525. }
  1526. }
  1527. return true;
  1528. }
  1529. /// unordered_multiset and unordered_multimap specializations.
  1530. template<typename _Key, typename _Value, typename _Alloc,
  1531. typename _ExtractKey, typename _Equal,
  1532. typename _Hash, typename _RangeHash, typename _Unused,
  1533. typename _RehashPolicy, typename _Traits>
  1534. struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1535. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
  1536. {
  1537. using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1538. _Hash, _RangeHash, _Unused,
  1539. _RehashPolicy, _Traits>;
  1540. bool
  1541. _M_equal(const __hashtable&) const;
  1542. };
  1543. template<typename _Key, typename _Value, typename _Alloc,
  1544. typename _ExtractKey, typename _Equal,
  1545. typename _Hash, typename _RangeHash, typename _Unused,
  1546. typename _RehashPolicy, typename _Traits>
  1547. bool
  1548. _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1549. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
  1550. _M_equal(const __hashtable& __other) const
  1551. {
  1552. using __node_type = typename __hashtable::__node_type;
  1553. const __hashtable* __this = static_cast<const __hashtable*>(this);
  1554. if (__this->size() != __other.size())
  1555. return false;
  1556. for (auto __itx = __this->begin(); __itx != __this->end();)
  1557. {
  1558. std::size_t __x_count = 1;
  1559. auto __itx_end = __itx;
  1560. for (++__itx_end; __itx_end != __this->end()
  1561. && __this->key_eq()(_ExtractKey{}(*__itx),
  1562. _ExtractKey{}(*__itx_end));
  1563. ++__itx_end)
  1564. ++__x_count;
  1565. std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
  1566. auto __y_prev_n = __other._M_buckets[__ybkt];
  1567. if (!__y_prev_n)
  1568. return false;
  1569. __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
  1570. for (;;)
  1571. {
  1572. if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
  1573. _ExtractKey{}(*__itx)))
  1574. break;
  1575. auto __y_ref_n = __y_n;
  1576. for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
  1577. if (!__other._M_node_equals(*__y_ref_n, *__y_n))
  1578. break;
  1579. if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
  1580. return false;
  1581. }
  1582. typename __hashtable::const_iterator __ity(__y_n);
  1583. for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
  1584. if (--__x_count == 0)
  1585. break;
  1586. if (__x_count != 0)
  1587. return false;
  1588. if (!std::is_permutation(__itx, __itx_end, __ity))
  1589. return false;
  1590. __itx = __itx_end;
  1591. }
  1592. return true;
  1593. }
  1594. /**
  1595. * This type deals with all allocation and keeps an allocator instance
  1596. * through inheritance to benefit from EBO when possible.
  1597. */
  1598. template<typename _NodeAlloc>
  1599. struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
  1600. {
  1601. private:
  1602. using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
  1603. template<typename>
  1604. struct __get_value_type;
  1605. template<typename _Val, bool _Cache_hash_code>
  1606. struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
  1607. { using type = _Val; };
  1608. public:
  1609. using __node_type = typename _NodeAlloc::value_type;
  1610. using __node_alloc_type = _NodeAlloc;
  1611. // Use __gnu_cxx to benefit from _S_always_equal and al.
  1612. using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
  1613. using __value_alloc_traits = typename __node_alloc_traits::template
  1614. rebind_traits<typename __get_value_type<__node_type>::type>;
  1615. using __node_ptr = __node_type*;
  1616. using __node_base = _Hash_node_base;
  1617. using __node_base_ptr = __node_base*;
  1618. using __buckets_alloc_type =
  1619. __alloc_rebind<__node_alloc_type, __node_base_ptr>;
  1620. using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
  1621. using __buckets_ptr = __node_base_ptr*;
  1622. _Hashtable_alloc() = default;
  1623. _Hashtable_alloc(const _Hashtable_alloc&) = default;
  1624. _Hashtable_alloc(_Hashtable_alloc&&) = default;
  1625. template<typename _Alloc>
  1626. _Hashtable_alloc(_Alloc&& __a)
  1627. : __ebo_node_alloc(std::forward<_Alloc>(__a))
  1628. { }
  1629. __node_alloc_type&
  1630. _M_node_allocator()
  1631. { return __ebo_node_alloc::_M_get(); }
  1632. const __node_alloc_type&
  1633. _M_node_allocator() const
  1634. { return __ebo_node_alloc::_M_cget(); }
  1635. // Allocate a node and construct an element within it.
  1636. template<typename... _Args>
  1637. __node_ptr
  1638. _M_allocate_node(_Args&&... __args);
  1639. // Destroy the element within a node and deallocate the node.
  1640. void
  1641. _M_deallocate_node(__node_ptr __n);
  1642. // Deallocate a node.
  1643. void
  1644. _M_deallocate_node_ptr(__node_ptr __n);
  1645. // Deallocate the linked list of nodes pointed to by __n.
  1646. // The elements within the nodes are destroyed.
  1647. void
  1648. _M_deallocate_nodes(__node_ptr __n);
  1649. __buckets_ptr
  1650. _M_allocate_buckets(std::size_t __bkt_count);
  1651. void
  1652. _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
  1653. };
  1654. // Definitions of class template _Hashtable_alloc's out-of-line member
  1655. // functions.
  1656. template<typename _NodeAlloc>
  1657. template<typename... _Args>
  1658. auto
  1659. _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
  1660. -> __node_ptr
  1661. {
  1662. auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
  1663. __node_ptr __n = std::__to_address(__nptr);
  1664. __try
  1665. {
  1666. ::new ((void*)__n) __node_type;
  1667. __node_alloc_traits::construct(_M_node_allocator(),
  1668. __n->_M_valptr(),
  1669. std::forward<_Args>(__args)...);
  1670. return __n;
  1671. }
  1672. __catch(...)
  1673. {
  1674. __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
  1675. __throw_exception_again;
  1676. }
  1677. }
  1678. template<typename _NodeAlloc>
  1679. void
  1680. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
  1681. {
  1682. __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
  1683. _M_deallocate_node_ptr(__n);
  1684. }
  1685. template<typename _NodeAlloc>
  1686. void
  1687. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
  1688. {
  1689. typedef typename __node_alloc_traits::pointer _Ptr;
  1690. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
  1691. __n->~__node_type();
  1692. __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
  1693. }
  1694. template<typename _NodeAlloc>
  1695. void
  1696. _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
  1697. {
  1698. while (__n)
  1699. {
  1700. __node_ptr __tmp = __n;
  1701. __n = __n->_M_next();
  1702. _M_deallocate_node(__tmp);
  1703. }
  1704. }
  1705. template<typename _NodeAlloc>
  1706. auto
  1707. _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
  1708. -> __buckets_ptr
  1709. {
  1710. __buckets_alloc_type __alloc(_M_node_allocator());
  1711. auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
  1712. __buckets_ptr __p = std::__to_address(__ptr);
  1713. __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
  1714. return __p;
  1715. }
  1716. template<typename _NodeAlloc>
  1717. void
  1718. _Hashtable_alloc<_NodeAlloc>::
  1719. _M_deallocate_buckets(__buckets_ptr __bkts,
  1720. std::size_t __bkt_count)
  1721. {
  1722. typedef typename __buckets_alloc_traits::pointer _Ptr;
  1723. auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
  1724. __buckets_alloc_type __alloc(_M_node_allocator());
  1725. __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
  1726. }
  1727. ///@} hashtable-detail
  1728. } // namespace __detail
  1729. /// @endcond
  1730. _GLIBCXX_END_NAMESPACE_VERSION
  1731. } // namespace std
  1732. #endif // _HASHTABLE_POLICY_H