hashtable.h 88 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698
  1. // hashtable.h header -*- C++ -*-
  2. // Copyright (C) 2007-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/hashtable.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly. @headername{unordered_map, unordered_set}
  23. */
  24. #ifndef _HASHTABLE_H
  25. #define _HASHTABLE_H 1
  26. #pragma GCC system_header
  27. #include <bits/hashtable_policy.h>
  28. #include <bits/enable_special_members.h>
  29. #if __cplusplus > 201402L
  30. # include <bits/node_handle.h>
  31. #endif
  32. namespace std _GLIBCXX_VISIBILITY(default)
  33. {
  34. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  35. /// @cond undocumented
  36. template<typename _Tp, typename _Hash>
  37. using __cache_default
  38. = __not_<__and_<// Do not cache for fast hasher.
  39. __is_fast_hash<_Hash>,
  40. // Mandatory to have erase not throwing.
  41. __is_nothrow_invocable<const _Hash&, const _Tp&>>>;
  42. // Helper to conditionally delete the default constructor.
  43. // The _Hash_node_base type is used to distinguish this specialization
  44. // from any other potentially-overlapping subobjects of the hashtable.
  45. template<typename _Equal, typename _Hash, typename _Allocator>
  46. using _Hashtable_enable_default_ctor
  47. = _Enable_default_constructor<__and_<is_default_constructible<_Equal>,
  48. is_default_constructible<_Hash>,
  49. is_default_constructible<_Allocator>>{},
  50. __detail::_Hash_node_base>;
  51. /**
  52. * Primary class template _Hashtable.
  53. *
  54. * @ingroup hashtable-detail
  55. *
  56. * @tparam _Value CopyConstructible type.
  57. *
  58. * @tparam _Key CopyConstructible type.
  59. *
  60. * @tparam _Alloc An allocator type
  61. * ([lib.allocator.requirements]) whose _Alloc::value_type is
  62. * _Value. As a conforming extension, we allow for
  63. * _Alloc::value_type != _Value.
  64. *
  65. * @tparam _ExtractKey Function object that takes an object of type
  66. * _Value and returns a value of type _Key.
  67. *
  68. * @tparam _Equal Function object that takes two objects of type k
  69. * and returns a bool-like value that is true if the two objects
  70. * are considered equal.
  71. *
  72. * @tparam _Hash The hash function. A unary function object with
  73. * argument type _Key and result type size_t. Return values should
  74. * be distributed over the entire range [0, numeric_limits<size_t>:::max()].
  75. *
  76. * @tparam _RangeHash The range-hashing function (in the terminology of
  77. * Tavori and Dreizin). A binary function object whose argument
  78. * types and result type are all size_t. Given arguments r and N,
  79. * the return value is in the range [0, N).
  80. *
  81. * @tparam _Unused Not used.
  82. *
  83. * @tparam _RehashPolicy Policy class with three members, all of
  84. * which govern the bucket count. _M_next_bkt(n) returns a bucket
  85. * count no smaller than n. _M_bkt_for_elements(n) returns a
  86. * bucket count appropriate for an element count of n.
  87. * _M_need_rehash(n_bkt, n_elt, n_ins) determines whether, if the
  88. * current bucket count is n_bkt and the current element count is
  89. * n_elt, we need to increase the bucket count for n_ins insertions.
  90. * If so, returns make_pair(true, n), where n is the new bucket count. If
  91. * not, returns make_pair(false, <anything>)
  92. *
  93. * @tparam _Traits Compile-time class with three boolean
  94. * std::integral_constant members: __cache_hash_code, __constant_iterators,
  95. * __unique_keys.
  96. *
  97. * Each _Hashtable data structure has:
  98. *
  99. * - _Bucket[] _M_buckets
  100. * - _Hash_node_base _M_before_begin
  101. * - size_type _M_bucket_count
  102. * - size_type _M_element_count
  103. *
  104. * with _Bucket being _Hash_node_base* and _Hash_node containing:
  105. *
  106. * - _Hash_node* _M_next
  107. * - Tp _M_value
  108. * - size_t _M_hash_code if cache_hash_code is true
  109. *
  110. * In terms of Standard containers the hashtable is like the aggregation of:
  111. *
  112. * - std::forward_list<_Node> containing the elements
  113. * - std::vector<std::forward_list<_Node>::iterator> representing the buckets
  114. *
  115. * The non-empty buckets contain the node before the first node in the
  116. * bucket. This design makes it possible to implement something like a
  117. * std::forward_list::insert_after on container insertion and
  118. * std::forward_list::erase_after on container erase
  119. * calls. _M_before_begin is equivalent to
  120. * std::forward_list::before_begin. Empty buckets contain
  121. * nullptr. Note that one of the non-empty buckets contains
  122. * &_M_before_begin which is not a dereferenceable node so the
  123. * node pointer in a bucket shall never be dereferenced, only its
  124. * next node can be.
  125. *
  126. * Walking through a bucket's nodes requires a check on the hash code to
  127. * see if each node is still in the bucket. Such a design assumes a
  128. * quite efficient hash functor and is one of the reasons it is
  129. * highly advisable to set __cache_hash_code to true.
  130. *
  131. * The container iterators are simply built from nodes. This way
  132. * incrementing the iterator is perfectly efficient independent of
  133. * how many empty buckets there are in the container.
  134. *
  135. * On insert we compute the element's hash code and use it to find the
  136. * bucket index. If the element must be inserted in an empty bucket
  137. * we add it at the beginning of the singly linked list and make the
  138. * bucket point to _M_before_begin. The bucket that used to point to
  139. * _M_before_begin, if any, is updated to point to its new before
  140. * begin node.
  141. *
  142. * On erase, the simple iterator design requires using the hash
  143. * functor to get the index of the bucket to update. For this
  144. * reason, when __cache_hash_code is set to false the hash functor must
  145. * not throw and this is enforced by a static assertion.
  146. *
  147. * Functionality is implemented by decomposition into base classes,
  148. * where the derived _Hashtable class is used in _Map_base,
  149. * _Insert, _Rehash_base, and _Equality base classes to access the
  150. * "this" pointer. _Hashtable_base is used in the base classes as a
  151. * non-recursive, fully-completed-type so that detailed nested type
  152. * information, such as iterator type and node type, can be
  153. * used. This is similar to the "Curiously Recurring Template
  154. * Pattern" (CRTP) technique, but uses a reconstructed, not
  155. * explicitly passed, template pattern.
  156. *
  157. * Base class templates are:
  158. * - __detail::_Hashtable_base
  159. * - __detail::_Map_base
  160. * - __detail::_Insert
  161. * - __detail::_Rehash_base
  162. * - __detail::_Equality
  163. */
  164. template<typename _Key, typename _Value, typename _Alloc,
  165. typename _ExtractKey, typename _Equal,
  166. typename _Hash, typename _RangeHash, typename _Unused,
  167. typename _RehashPolicy, typename _Traits>
  168. class _Hashtable
  169. : public __detail::_Hashtable_base<_Key, _Value, _ExtractKey, _Equal,
  170. _Hash, _RangeHash, _Unused, _Traits>,
  171. public __detail::_Map_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  172. _Hash, _RangeHash, _Unused,
  173. _RehashPolicy, _Traits>,
  174. public __detail::_Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  175. _Hash, _RangeHash, _Unused,
  176. _RehashPolicy, _Traits>,
  177. public __detail::_Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  178. _Hash, _RangeHash, _Unused,
  179. _RehashPolicy, _Traits>,
  180. public __detail::_Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  181. _Hash, _RangeHash, _Unused,
  182. _RehashPolicy, _Traits>,
  183. private __detail::_Hashtable_alloc<
  184. __alloc_rebind<_Alloc,
  185. __detail::_Hash_node<_Value,
  186. _Traits::__hash_cached::value>>>,
  187. private _Hashtable_enable_default_ctor<_Equal, _Hash, _Alloc>
  188. {
  189. static_assert(is_same<typename remove_cv<_Value>::type, _Value>::value,
  190. "unordered container must have a non-const, non-volatile value_type");
  191. #if __cplusplus > 201703L || defined __STRICT_ANSI__
  192. static_assert(is_same<typename _Alloc::value_type, _Value>{},
  193. "unordered container must have the same value_type as its allocator");
  194. #endif
  195. using __traits_type = _Traits;
  196. using __hash_cached = typename __traits_type::__hash_cached;
  197. using __constant_iterators = typename __traits_type::__constant_iterators;
  198. using __node_type = __detail::_Hash_node<_Value, __hash_cached::value>;
  199. using __node_alloc_type = __alloc_rebind<_Alloc, __node_type>;
  200. using __hashtable_alloc = __detail::_Hashtable_alloc<__node_alloc_type>;
  201. using __node_value_type =
  202. __detail::_Hash_node_value<_Value, __hash_cached::value>;
  203. using __node_ptr = typename __hashtable_alloc::__node_ptr;
  204. using __value_alloc_traits =
  205. typename __hashtable_alloc::__value_alloc_traits;
  206. using __node_alloc_traits =
  207. typename __hashtable_alloc::__node_alloc_traits;
  208. using __node_base = typename __hashtable_alloc::__node_base;
  209. using __node_base_ptr = typename __hashtable_alloc::__node_base_ptr;
  210. using __buckets_ptr = typename __hashtable_alloc::__buckets_ptr;
  211. using __insert_base = __detail::_Insert<_Key, _Value, _Alloc, _ExtractKey,
  212. _Equal, _Hash,
  213. _RangeHash, _Unused,
  214. _RehashPolicy, _Traits>;
  215. using __enable_default_ctor
  216. = _Hashtable_enable_default_ctor<_Equal, _Hash, _Alloc>;
  217. public:
  218. typedef _Key key_type;
  219. typedef _Value value_type;
  220. typedef _Alloc allocator_type;
  221. typedef _Equal key_equal;
  222. // mapped_type, if present, comes from _Map_base.
  223. // hasher, if present, comes from _Hash_code_base/_Hashtable_base.
  224. typedef typename __value_alloc_traits::pointer pointer;
  225. typedef typename __value_alloc_traits::const_pointer const_pointer;
  226. typedef value_type& reference;
  227. typedef const value_type& const_reference;
  228. using iterator = typename __insert_base::iterator;
  229. using const_iterator = typename __insert_base::const_iterator;
  230. using local_iterator = __detail::_Local_iterator<key_type, _Value,
  231. _ExtractKey, _Hash, _RangeHash, _Unused,
  232. __constant_iterators::value,
  233. __hash_cached::value>;
  234. using const_local_iterator = __detail::_Local_const_iterator<
  235. key_type, _Value,
  236. _ExtractKey, _Hash, _RangeHash, _Unused,
  237. __constant_iterators::value, __hash_cached::value>;
  238. private:
  239. using __rehash_type = _RehashPolicy;
  240. using __rehash_state = typename __rehash_type::_State;
  241. using __unique_keys = typename __traits_type::__unique_keys;
  242. using __hashtable_base = __detail::
  243. _Hashtable_base<_Key, _Value, _ExtractKey,
  244. _Equal, _Hash, _RangeHash, _Unused, _Traits>;
  245. using __hash_code_base = typename __hashtable_base::__hash_code_base;
  246. using __hash_code = typename __hashtable_base::__hash_code;
  247. using __ireturn_type = typename __insert_base::__ireturn_type;
  248. using __map_base = __detail::_Map_base<_Key, _Value, _Alloc, _ExtractKey,
  249. _Equal, _Hash, _RangeHash, _Unused,
  250. _RehashPolicy, _Traits>;
  251. using __rehash_base = __detail::_Rehash_base<_Key, _Value, _Alloc,
  252. _ExtractKey, _Equal,
  253. _Hash, _RangeHash, _Unused,
  254. _RehashPolicy, _Traits>;
  255. using __eq_base = __detail::_Equality<_Key, _Value, _Alloc, _ExtractKey,
  256. _Equal, _Hash, _RangeHash, _Unused,
  257. _RehashPolicy, _Traits>;
  258. using __reuse_or_alloc_node_gen_t =
  259. __detail::_ReuseOrAllocNode<__node_alloc_type>;
  260. using __alloc_node_gen_t =
  261. __detail::_AllocNode<__node_alloc_type>;
  262. using __node_builder_t =
  263. __detail::_NodeBuilder<_ExtractKey>;
  264. // Simple RAII type for managing a node containing an element
  265. struct _Scoped_node
  266. {
  267. // Take ownership of a node with a constructed element.
  268. _Scoped_node(__node_ptr __n, __hashtable_alloc* __h)
  269. : _M_h(__h), _M_node(__n) { }
  270. // Allocate a node and construct an element within it.
  271. template<typename... _Args>
  272. _Scoped_node(__hashtable_alloc* __h, _Args&&... __args)
  273. : _M_h(__h),
  274. _M_node(__h->_M_allocate_node(std::forward<_Args>(__args)...))
  275. { }
  276. // Destroy element and deallocate node.
  277. ~_Scoped_node() { if (_M_node) _M_h->_M_deallocate_node(_M_node); };
  278. _Scoped_node(const _Scoped_node&) = delete;
  279. _Scoped_node& operator=(const _Scoped_node&) = delete;
  280. __hashtable_alloc* _M_h;
  281. __node_ptr _M_node;
  282. };
  283. template<typename _Ht>
  284. static constexpr
  285. __conditional_t<std::is_lvalue_reference<_Ht>::value,
  286. const value_type&, value_type&&>
  287. __fwd_value_for(value_type& __val) noexcept
  288. { return std::move(__val); }
  289. // Compile-time diagnostics.
  290. // _Hash_code_base has everything protected, so use this derived type to
  291. // access it.
  292. struct __hash_code_base_access : __hash_code_base
  293. { using __hash_code_base::_M_bucket_index; };
  294. // To get bucket index we need _RangeHash not to throw.
  295. static_assert(is_nothrow_default_constructible<_RangeHash>::value,
  296. "Functor used to map hash code to bucket index"
  297. " must be nothrow default constructible");
  298. static_assert(noexcept(
  299. std::declval<const _RangeHash&>()((std::size_t)0, (std::size_t)0)),
  300. "Functor used to map hash code to bucket index must be"
  301. " noexcept");
  302. // To compute bucket index we also need _ExtratKey not to throw.
  303. static_assert(is_nothrow_default_constructible<_ExtractKey>::value,
  304. "_ExtractKey must be nothrow default constructible");
  305. static_assert(noexcept(
  306. std::declval<const _ExtractKey&>()(std::declval<_Value>())),
  307. "_ExtractKey functor must be noexcept invocable");
  308. template<typename _Keya, typename _Valuea, typename _Alloca,
  309. typename _ExtractKeya, typename _Equala,
  310. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  311. typename _RehashPolicya, typename _Traitsa,
  312. bool _Unique_keysa>
  313. friend struct __detail::_Map_base;
  314. template<typename _Keya, typename _Valuea, typename _Alloca,
  315. typename _ExtractKeya, typename _Equala,
  316. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  317. typename _RehashPolicya, typename _Traitsa>
  318. friend struct __detail::_Insert_base;
  319. template<typename _Keya, typename _Valuea, typename _Alloca,
  320. typename _ExtractKeya, typename _Equala,
  321. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  322. typename _RehashPolicya, typename _Traitsa,
  323. bool _Constant_iteratorsa>
  324. friend struct __detail::_Insert;
  325. template<typename _Keya, typename _Valuea, typename _Alloca,
  326. typename _ExtractKeya, typename _Equala,
  327. typename _Hasha, typename _RangeHasha, typename _Unuseda,
  328. typename _RehashPolicya, typename _Traitsa,
  329. bool _Unique_keysa>
  330. friend struct __detail::_Equality;
  331. public:
  332. using size_type = typename __hashtable_base::size_type;
  333. using difference_type = typename __hashtable_base::difference_type;
  334. #if __cplusplus > 201402L
  335. using node_type = _Node_handle<_Key, _Value, __node_alloc_type>;
  336. using insert_return_type = _Node_insert_return<iterator, node_type>;
  337. #endif
  338. private:
  339. __buckets_ptr _M_buckets = &_M_single_bucket;
  340. size_type _M_bucket_count = 1;
  341. __node_base _M_before_begin;
  342. size_type _M_element_count = 0;
  343. _RehashPolicy _M_rehash_policy;
  344. // A single bucket used when only need for 1 bucket. Especially
  345. // interesting in move semantic to leave hashtable with only 1 bucket
  346. // which is not allocated so that we can have those operations noexcept
  347. // qualified.
  348. // Note that we can't leave hashtable with 0 bucket without adding
  349. // numerous checks in the code to avoid 0 modulus.
  350. __node_base_ptr _M_single_bucket = nullptr;
  351. void
  352. _M_update_bbegin()
  353. {
  354. if (_M_begin())
  355. _M_buckets[_M_bucket_index(*_M_begin())] = &_M_before_begin;
  356. }
  357. void
  358. _M_update_bbegin(__node_ptr __n)
  359. {
  360. _M_before_begin._M_nxt = __n;
  361. _M_update_bbegin();
  362. }
  363. bool
  364. _M_uses_single_bucket(__buckets_ptr __bkts) const
  365. { return __builtin_expect(__bkts == &_M_single_bucket, false); }
  366. bool
  367. _M_uses_single_bucket() const
  368. { return _M_uses_single_bucket(_M_buckets); }
  369. static constexpr size_t
  370. __small_size_threshold() noexcept
  371. {
  372. return
  373. __detail::_Hashtable_hash_traits<_Hash>::__small_size_threshold();
  374. }
  375. __hashtable_alloc&
  376. _M_base_alloc() { return *this; }
  377. __buckets_ptr
  378. _M_allocate_buckets(size_type __bkt_count)
  379. {
  380. if (__builtin_expect(__bkt_count == 1, false))
  381. {
  382. _M_single_bucket = nullptr;
  383. return &_M_single_bucket;
  384. }
  385. return __hashtable_alloc::_M_allocate_buckets(__bkt_count);
  386. }
  387. void
  388. _M_deallocate_buckets(__buckets_ptr __bkts, size_type __bkt_count)
  389. {
  390. if (_M_uses_single_bucket(__bkts))
  391. return;
  392. __hashtable_alloc::_M_deallocate_buckets(__bkts, __bkt_count);
  393. }
  394. void
  395. _M_deallocate_buckets()
  396. { _M_deallocate_buckets(_M_buckets, _M_bucket_count); }
  397. // Gets bucket begin, deals with the fact that non-empty buckets contain
  398. // their before begin node.
  399. __node_ptr
  400. _M_bucket_begin(size_type __bkt) const;
  401. __node_ptr
  402. _M_begin() const
  403. { return static_cast<__node_ptr>(_M_before_begin._M_nxt); }
  404. // Assign *this using another _Hashtable instance. Whether elements
  405. // are copied or moved depends on the _Ht reference.
  406. template<typename _Ht>
  407. void
  408. _M_assign_elements(_Ht&&);
  409. template<typename _Ht, typename _NodeGenerator>
  410. void
  411. _M_assign(_Ht&&, const _NodeGenerator&);
  412. void
  413. _M_move_assign(_Hashtable&&, true_type);
  414. void
  415. _M_move_assign(_Hashtable&&, false_type);
  416. void
  417. _M_reset() noexcept;
  418. _Hashtable(const _Hash& __h, const _Equal& __eq,
  419. const allocator_type& __a)
  420. : __hashtable_base(__h, __eq),
  421. __hashtable_alloc(__node_alloc_type(__a)),
  422. __enable_default_ctor(_Enable_default_constructor_tag{})
  423. { }
  424. template<bool _No_realloc = true>
  425. static constexpr bool
  426. _S_nothrow_move()
  427. {
  428. #if __cplusplus <= 201402L
  429. return __and_<__bool_constant<_No_realloc>,
  430. is_nothrow_copy_constructible<_Hash>,
  431. is_nothrow_copy_constructible<_Equal>>::value;
  432. #else
  433. if constexpr (_No_realloc)
  434. if constexpr (is_nothrow_copy_constructible<_Hash>())
  435. return is_nothrow_copy_constructible<_Equal>();
  436. return false;
  437. #endif
  438. }
  439. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  440. true_type /* alloc always equal */)
  441. noexcept(_S_nothrow_move());
  442. _Hashtable(_Hashtable&&, __node_alloc_type&&,
  443. false_type /* alloc always equal */);
  444. template<typename _InputIterator>
  445. _Hashtable(_InputIterator __first, _InputIterator __last,
  446. size_type __bkt_count_hint,
  447. const _Hash&, const _Equal&, const allocator_type&,
  448. true_type __uks);
  449. template<typename _InputIterator>
  450. _Hashtable(_InputIterator __first, _InputIterator __last,
  451. size_type __bkt_count_hint,
  452. const _Hash&, const _Equal&, const allocator_type&,
  453. false_type __uks);
  454. public:
  455. // Constructor, destructor, assignment, swap
  456. _Hashtable() = default;
  457. _Hashtable(const _Hashtable&);
  458. _Hashtable(const _Hashtable&, const allocator_type&);
  459. explicit
  460. _Hashtable(size_type __bkt_count_hint,
  461. const _Hash& __hf = _Hash(),
  462. const key_equal& __eql = key_equal(),
  463. const allocator_type& __a = allocator_type());
  464. // Use delegating constructors.
  465. _Hashtable(_Hashtable&& __ht)
  466. noexcept(_S_nothrow_move())
  467. : _Hashtable(std::move(__ht), std::move(__ht._M_node_allocator()),
  468. true_type{})
  469. { }
  470. _Hashtable(_Hashtable&& __ht, const allocator_type& __a)
  471. noexcept(_S_nothrow_move<__node_alloc_traits::_S_always_equal()>())
  472. : _Hashtable(std::move(__ht), __node_alloc_type(__a),
  473. typename __node_alloc_traits::is_always_equal{})
  474. { }
  475. explicit
  476. _Hashtable(const allocator_type& __a)
  477. : __hashtable_alloc(__node_alloc_type(__a)),
  478. __enable_default_ctor(_Enable_default_constructor_tag{})
  479. { }
  480. template<typename _InputIterator>
  481. _Hashtable(_InputIterator __f, _InputIterator __l,
  482. size_type __bkt_count_hint = 0,
  483. const _Hash& __hf = _Hash(),
  484. const key_equal& __eql = key_equal(),
  485. const allocator_type& __a = allocator_type())
  486. : _Hashtable(__f, __l, __bkt_count_hint, __hf, __eql, __a,
  487. __unique_keys{})
  488. { }
  489. _Hashtable(initializer_list<value_type> __l,
  490. size_type __bkt_count_hint = 0,
  491. const _Hash& __hf = _Hash(),
  492. const key_equal& __eql = key_equal(),
  493. const allocator_type& __a = allocator_type())
  494. : _Hashtable(__l.begin(), __l.end(), __bkt_count_hint,
  495. __hf, __eql, __a, __unique_keys{})
  496. { }
  497. _Hashtable&
  498. operator=(const _Hashtable& __ht);
  499. _Hashtable&
  500. operator=(_Hashtable&& __ht)
  501. noexcept(__node_alloc_traits::_S_nothrow_move()
  502. && is_nothrow_move_assignable<_Hash>::value
  503. && is_nothrow_move_assignable<_Equal>::value)
  504. {
  505. constexpr bool __move_storage =
  506. __node_alloc_traits::_S_propagate_on_move_assign()
  507. || __node_alloc_traits::_S_always_equal();
  508. _M_move_assign(std::move(__ht), __bool_constant<__move_storage>());
  509. return *this;
  510. }
  511. _Hashtable&
  512. operator=(initializer_list<value_type> __l)
  513. {
  514. __reuse_or_alloc_node_gen_t __roan(_M_begin(), *this);
  515. _M_before_begin._M_nxt = nullptr;
  516. clear();
  517. // We consider that all elements of __l are going to be inserted.
  518. auto __l_bkt_count = _M_rehash_policy._M_bkt_for_elements(__l.size());
  519. // Do not shrink to keep potential user reservation.
  520. if (_M_bucket_count < __l_bkt_count)
  521. rehash(__l_bkt_count);
  522. this->_M_insert_range(__l.begin(), __l.end(), __roan, __unique_keys{});
  523. return *this;
  524. }
  525. ~_Hashtable() noexcept;
  526. void
  527. swap(_Hashtable&)
  528. noexcept(__and_<__is_nothrow_swappable<_Hash>,
  529. __is_nothrow_swappable<_Equal>>::value);
  530. // Basic container operations
  531. iterator
  532. begin() noexcept
  533. { return iterator(_M_begin()); }
  534. const_iterator
  535. begin() const noexcept
  536. { return const_iterator(_M_begin()); }
  537. iterator
  538. end() noexcept
  539. { return iterator(nullptr); }
  540. const_iterator
  541. end() const noexcept
  542. { return const_iterator(nullptr); }
  543. const_iterator
  544. cbegin() const noexcept
  545. { return const_iterator(_M_begin()); }
  546. const_iterator
  547. cend() const noexcept
  548. { return const_iterator(nullptr); }
  549. size_type
  550. size() const noexcept
  551. { return _M_element_count; }
  552. _GLIBCXX_NODISCARD bool
  553. empty() const noexcept
  554. { return size() == 0; }
  555. allocator_type
  556. get_allocator() const noexcept
  557. { return allocator_type(this->_M_node_allocator()); }
  558. size_type
  559. max_size() const noexcept
  560. { return __node_alloc_traits::max_size(this->_M_node_allocator()); }
  561. // Observers
  562. key_equal
  563. key_eq() const
  564. { return this->_M_eq(); }
  565. // hash_function, if present, comes from _Hash_code_base.
  566. // Bucket operations
  567. size_type
  568. bucket_count() const noexcept
  569. { return _M_bucket_count; }
  570. size_type
  571. max_bucket_count() const noexcept
  572. { return max_size(); }
  573. size_type
  574. bucket_size(size_type __bkt) const
  575. { return std::distance(begin(__bkt), end(__bkt)); }
  576. size_type
  577. bucket(const key_type& __k) const
  578. { return _M_bucket_index(this->_M_hash_code(__k)); }
  579. local_iterator
  580. begin(size_type __bkt)
  581. {
  582. return local_iterator(*this, _M_bucket_begin(__bkt),
  583. __bkt, _M_bucket_count);
  584. }
  585. local_iterator
  586. end(size_type __bkt)
  587. { return local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  588. const_local_iterator
  589. begin(size_type __bkt) const
  590. {
  591. return const_local_iterator(*this, _M_bucket_begin(__bkt),
  592. __bkt, _M_bucket_count);
  593. }
  594. const_local_iterator
  595. end(size_type __bkt) const
  596. { return const_local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  597. // DR 691.
  598. const_local_iterator
  599. cbegin(size_type __bkt) const
  600. {
  601. return const_local_iterator(*this, _M_bucket_begin(__bkt),
  602. __bkt, _M_bucket_count);
  603. }
  604. const_local_iterator
  605. cend(size_type __bkt) const
  606. { return const_local_iterator(*this, nullptr, __bkt, _M_bucket_count); }
  607. float
  608. load_factor() const noexcept
  609. {
  610. return static_cast<float>(size()) / static_cast<float>(bucket_count());
  611. }
  612. // max_load_factor, if present, comes from _Rehash_base.
  613. // Generalization of max_load_factor. Extension, not found in
  614. // TR1. Only useful if _RehashPolicy is something other than
  615. // the default.
  616. const _RehashPolicy&
  617. __rehash_policy() const
  618. { return _M_rehash_policy; }
  619. void
  620. __rehash_policy(const _RehashPolicy& __pol)
  621. { _M_rehash_policy = __pol; }
  622. // Lookup.
  623. iterator
  624. find(const key_type& __k);
  625. const_iterator
  626. find(const key_type& __k) const;
  627. size_type
  628. count(const key_type& __k) const;
  629. std::pair<iterator, iterator>
  630. equal_range(const key_type& __k);
  631. std::pair<const_iterator, const_iterator>
  632. equal_range(const key_type& __k) const;
  633. #if __cplusplus >= 202002L
  634. #define __cpp_lib_generic_unordered_lookup 201811L
  635. template<typename _Kt,
  636. typename = __has_is_transparent_t<_Hash, _Kt>,
  637. typename = __has_is_transparent_t<_Equal, _Kt>>
  638. iterator
  639. _M_find_tr(const _Kt& __k);
  640. template<typename _Kt,
  641. typename = __has_is_transparent_t<_Hash, _Kt>,
  642. typename = __has_is_transparent_t<_Equal, _Kt>>
  643. const_iterator
  644. _M_find_tr(const _Kt& __k) const;
  645. template<typename _Kt,
  646. typename = __has_is_transparent_t<_Hash, _Kt>,
  647. typename = __has_is_transparent_t<_Equal, _Kt>>
  648. size_type
  649. _M_count_tr(const _Kt& __k) const;
  650. template<typename _Kt,
  651. typename = __has_is_transparent_t<_Hash, _Kt>,
  652. typename = __has_is_transparent_t<_Equal, _Kt>>
  653. pair<iterator, iterator>
  654. _M_equal_range_tr(const _Kt& __k);
  655. template<typename _Kt,
  656. typename = __has_is_transparent_t<_Hash, _Kt>,
  657. typename = __has_is_transparent_t<_Equal, _Kt>>
  658. pair<const_iterator, const_iterator>
  659. _M_equal_range_tr(const _Kt& __k) const;
  660. #endif // C++20
  661. private:
  662. // Bucket index computation helpers.
  663. size_type
  664. _M_bucket_index(const __node_value_type& __n) const noexcept
  665. { return __hash_code_base::_M_bucket_index(__n, _M_bucket_count); }
  666. size_type
  667. _M_bucket_index(__hash_code __c) const
  668. { return __hash_code_base::_M_bucket_index(__c, _M_bucket_count); }
  669. __node_base_ptr
  670. _M_find_before_node(const key_type&);
  671. // Find and insert helper functions and types
  672. // Find the node before the one matching the criteria.
  673. __node_base_ptr
  674. _M_find_before_node(size_type, const key_type&, __hash_code) const;
  675. template<typename _Kt>
  676. __node_base_ptr
  677. _M_find_before_node_tr(size_type, const _Kt&, __hash_code) const;
  678. __node_ptr
  679. _M_find_node(size_type __bkt, const key_type& __key,
  680. __hash_code __c) const
  681. {
  682. __node_base_ptr __before_n = _M_find_before_node(__bkt, __key, __c);
  683. if (__before_n)
  684. return static_cast<__node_ptr>(__before_n->_M_nxt);
  685. return nullptr;
  686. }
  687. template<typename _Kt>
  688. __node_ptr
  689. _M_find_node_tr(size_type __bkt, const _Kt& __key,
  690. __hash_code __c) const
  691. {
  692. auto __before_n = _M_find_before_node_tr(__bkt, __key, __c);
  693. if (__before_n)
  694. return static_cast<__node_ptr>(__before_n->_M_nxt);
  695. return nullptr;
  696. }
  697. // Insert a node at the beginning of a bucket.
  698. void
  699. _M_insert_bucket_begin(size_type, __node_ptr);
  700. // Remove the bucket first node
  701. void
  702. _M_remove_bucket_begin(size_type __bkt, __node_ptr __next_n,
  703. size_type __next_bkt);
  704. // Get the node before __n in the bucket __bkt
  705. __node_base_ptr
  706. _M_get_previous_node(size_type __bkt, __node_ptr __n);
  707. pair<const_iterator, __hash_code>
  708. _M_compute_hash_code(const_iterator __hint, const key_type& __k) const;
  709. // Insert node __n with hash code __code, in bucket __bkt if no
  710. // rehash (assumes no element with same key already present).
  711. // Takes ownership of __n if insertion succeeds, throws otherwise.
  712. iterator
  713. _M_insert_unique_node(size_type __bkt, __hash_code,
  714. __node_ptr __n, size_type __n_elt = 1);
  715. // Insert node __n with key __k and hash code __code.
  716. // Takes ownership of __n if insertion succeeds, throws otherwise.
  717. iterator
  718. _M_insert_multi_node(__node_ptr __hint,
  719. __hash_code __code, __node_ptr __n);
  720. template<typename... _Args>
  721. std::pair<iterator, bool>
  722. _M_emplace(true_type __uks, _Args&&... __args);
  723. template<typename... _Args>
  724. iterator
  725. _M_emplace(false_type __uks, _Args&&... __args)
  726. { return _M_emplace(cend(), __uks, std::forward<_Args>(__args)...); }
  727. // Emplace with hint, useless when keys are unique.
  728. template<typename... _Args>
  729. iterator
  730. _M_emplace(const_iterator, true_type __uks, _Args&&... __args)
  731. { return _M_emplace(__uks, std::forward<_Args>(__args)...).first; }
  732. template<typename... _Args>
  733. iterator
  734. _M_emplace(const_iterator, false_type __uks, _Args&&... __args);
  735. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  736. std::pair<iterator, bool>
  737. _M_insert_unique(_Kt&&, _Arg&&, const _NodeGenerator&);
  738. template<typename _Kt>
  739. static __conditional_t<
  740. __and_<__is_nothrow_invocable<_Hash&, const key_type&>,
  741. __not_<__is_nothrow_invocable<_Hash&, _Kt>>>::value,
  742. key_type, _Kt&&>
  743. _S_forward_key(_Kt&& __k)
  744. { return std::forward<_Kt>(__k); }
  745. static const key_type&
  746. _S_forward_key(const key_type& __k)
  747. { return __k; }
  748. static key_type&&
  749. _S_forward_key(key_type&& __k)
  750. { return std::move(__k); }
  751. template<typename _Arg, typename _NodeGenerator>
  752. std::pair<iterator, bool>
  753. _M_insert(_Arg&& __arg, const _NodeGenerator& __node_gen,
  754. true_type /* __uks */)
  755. {
  756. return _M_insert_unique(
  757. _S_forward_key(_ExtractKey{}(std::forward<_Arg>(__arg))),
  758. std::forward<_Arg>(__arg), __node_gen);
  759. }
  760. template<typename _Arg, typename _NodeGenerator>
  761. iterator
  762. _M_insert(_Arg&& __arg, const _NodeGenerator& __node_gen,
  763. false_type __uks)
  764. {
  765. return _M_insert(cend(), std::forward<_Arg>(__arg), __node_gen,
  766. __uks);
  767. }
  768. // Insert with hint, not used when keys are unique.
  769. template<typename _Arg, typename _NodeGenerator>
  770. iterator
  771. _M_insert(const_iterator, _Arg&& __arg,
  772. const _NodeGenerator& __node_gen, true_type __uks)
  773. {
  774. return
  775. _M_insert(std::forward<_Arg>(__arg), __node_gen, __uks).first;
  776. }
  777. // Insert with hint when keys are not unique.
  778. template<typename _Arg, typename _NodeGenerator>
  779. iterator
  780. _M_insert(const_iterator, _Arg&&,
  781. const _NodeGenerator&, false_type __uks);
  782. size_type
  783. _M_erase(true_type __uks, const key_type&);
  784. size_type
  785. _M_erase(false_type __uks, const key_type&);
  786. iterator
  787. _M_erase(size_type __bkt, __node_base_ptr __prev_n, __node_ptr __n);
  788. public:
  789. // Emplace
  790. template<typename... _Args>
  791. __ireturn_type
  792. emplace(_Args&&... __args)
  793. { return _M_emplace(__unique_keys{}, std::forward<_Args>(__args)...); }
  794. template<typename... _Args>
  795. iterator
  796. emplace_hint(const_iterator __hint, _Args&&... __args)
  797. {
  798. return _M_emplace(__hint, __unique_keys{},
  799. std::forward<_Args>(__args)...);
  800. }
  801. // Insert member functions via inheritance.
  802. // Erase
  803. iterator
  804. erase(const_iterator);
  805. // LWG 2059.
  806. iterator
  807. erase(iterator __it)
  808. { return erase(const_iterator(__it)); }
  809. size_type
  810. erase(const key_type& __k)
  811. { return _M_erase(__unique_keys{}, __k); }
  812. iterator
  813. erase(const_iterator, const_iterator);
  814. void
  815. clear() noexcept;
  816. // Set number of buckets keeping it appropriate for container's number
  817. // of elements.
  818. void rehash(size_type __bkt_count);
  819. // DR 1189.
  820. // reserve, if present, comes from _Rehash_base.
  821. #if __cplusplus > 201402L
  822. /// Re-insert an extracted node into a container with unique keys.
  823. insert_return_type
  824. _M_reinsert_node(node_type&& __nh)
  825. {
  826. insert_return_type __ret;
  827. if (__nh.empty())
  828. __ret.position = end();
  829. else
  830. {
  831. __glibcxx_assert(get_allocator() == __nh.get_allocator());
  832. const key_type& __k = __nh._M_key();
  833. __hash_code __code = this->_M_hash_code(__k);
  834. size_type __bkt = _M_bucket_index(__code);
  835. if (__node_ptr __n = _M_find_node(__bkt, __k, __code))
  836. {
  837. __ret.node = std::move(__nh);
  838. __ret.position = iterator(__n);
  839. __ret.inserted = false;
  840. }
  841. else
  842. {
  843. __ret.position
  844. = _M_insert_unique_node(__bkt, __code, __nh._M_ptr);
  845. __nh._M_ptr = nullptr;
  846. __ret.inserted = true;
  847. }
  848. }
  849. return __ret;
  850. }
  851. /// Re-insert an extracted node into a container with equivalent keys.
  852. iterator
  853. _M_reinsert_node_multi(const_iterator __hint, node_type&& __nh)
  854. {
  855. if (__nh.empty())
  856. return end();
  857. __glibcxx_assert(get_allocator() == __nh.get_allocator());
  858. const key_type& __k = __nh._M_key();
  859. auto __code = this->_M_hash_code(__k);
  860. auto __ret
  861. = _M_insert_multi_node(__hint._M_cur, __code, __nh._M_ptr);
  862. __nh._M_ptr = nullptr;
  863. return __ret;
  864. }
  865. private:
  866. node_type
  867. _M_extract_node(size_t __bkt, __node_base_ptr __prev_n)
  868. {
  869. __node_ptr __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  870. if (__prev_n == _M_buckets[__bkt])
  871. _M_remove_bucket_begin(__bkt, __n->_M_next(),
  872. __n->_M_nxt ? _M_bucket_index(*__n->_M_next()) : 0);
  873. else if (__n->_M_nxt)
  874. {
  875. size_type __next_bkt = _M_bucket_index(*__n->_M_next());
  876. if (__next_bkt != __bkt)
  877. _M_buckets[__next_bkt] = __prev_n;
  878. }
  879. __prev_n->_M_nxt = __n->_M_nxt;
  880. __n->_M_nxt = nullptr;
  881. --_M_element_count;
  882. return { __n, this->_M_node_allocator() };
  883. }
  884. public:
  885. // Extract a node.
  886. node_type
  887. extract(const_iterator __pos)
  888. {
  889. size_t __bkt = _M_bucket_index(*__pos._M_cur);
  890. return _M_extract_node(__bkt,
  891. _M_get_previous_node(__bkt, __pos._M_cur));
  892. }
  893. /// Extract a node.
  894. node_type
  895. extract(const _Key& __k)
  896. {
  897. node_type __nh;
  898. __hash_code __code = this->_M_hash_code(__k);
  899. std::size_t __bkt = _M_bucket_index(__code);
  900. if (__node_base_ptr __prev_node = _M_find_before_node(__bkt, __k, __code))
  901. __nh = _M_extract_node(__bkt, __prev_node);
  902. return __nh;
  903. }
  904. /// Merge from a compatible container into one with unique keys.
  905. template<typename _Compatible_Hashtable>
  906. void
  907. _M_merge_unique(_Compatible_Hashtable& __src)
  908. {
  909. static_assert(is_same_v<typename _Compatible_Hashtable::node_type,
  910. node_type>, "Node types are compatible");
  911. __glibcxx_assert(get_allocator() == __src.get_allocator());
  912. auto __n_elt = __src.size();
  913. for (auto __i = __src.cbegin(), __end = __src.cend(); __i != __end;)
  914. {
  915. auto __pos = __i++;
  916. const key_type& __k = _ExtractKey{}(*__pos);
  917. __hash_code __code
  918. = this->_M_hash_code(__src.hash_function(), *__pos._M_cur);
  919. size_type __bkt = _M_bucket_index(__code);
  920. if (_M_find_node(__bkt, __k, __code) == nullptr)
  921. {
  922. auto __nh = __src.extract(__pos);
  923. _M_insert_unique_node(__bkt, __code, __nh._M_ptr, __n_elt);
  924. __nh._M_ptr = nullptr;
  925. __n_elt = 1;
  926. }
  927. else if (__n_elt != 1)
  928. --__n_elt;
  929. }
  930. }
  931. /// Merge from a compatible container into one with equivalent keys.
  932. template<typename _Compatible_Hashtable>
  933. void
  934. _M_merge_multi(_Compatible_Hashtable& __src)
  935. {
  936. static_assert(is_same_v<typename _Compatible_Hashtable::node_type,
  937. node_type>, "Node types are compatible");
  938. __glibcxx_assert(get_allocator() == __src.get_allocator());
  939. __node_ptr __hint = nullptr;
  940. this->reserve(size() + __src.size());
  941. for (auto __i = __src.cbegin(), __end = __src.cend(); __i != __end;)
  942. {
  943. auto __pos = __i++;
  944. __hash_code __code
  945. = this->_M_hash_code(__src.hash_function(), *__pos._M_cur);
  946. auto __nh = __src.extract(__pos);
  947. __hint = _M_insert_multi_node(__hint, __code, __nh._M_ptr)._M_cur;
  948. __nh._M_ptr = nullptr;
  949. }
  950. }
  951. #endif // C++17
  952. private:
  953. // Helper rehash method used when keys are unique.
  954. void _M_rehash_aux(size_type __bkt_count, true_type __uks);
  955. // Helper rehash method used when keys can be non-unique.
  956. void _M_rehash_aux(size_type __bkt_count, false_type __uks);
  957. // Unconditionally change size of bucket array to n, restore
  958. // hash policy state to __state on exception.
  959. void _M_rehash(size_type __bkt_count, const __rehash_state& __state);
  960. };
  961. // Definitions of class template _Hashtable's out-of-line member functions.
  962. template<typename _Key, typename _Value, typename _Alloc,
  963. typename _ExtractKey, typename _Equal,
  964. typename _Hash, typename _RangeHash, typename _Unused,
  965. typename _RehashPolicy, typename _Traits>
  966. auto
  967. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  968. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  969. _M_bucket_begin(size_type __bkt) const
  970. -> __node_ptr
  971. {
  972. __node_base_ptr __n = _M_buckets[__bkt];
  973. return __n ? static_cast<__node_ptr>(__n->_M_nxt) : nullptr;
  974. }
  975. template<typename _Key, typename _Value, typename _Alloc,
  976. typename _ExtractKey, typename _Equal,
  977. typename _Hash, typename _RangeHash, typename _Unused,
  978. typename _RehashPolicy, typename _Traits>
  979. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  980. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  981. _Hashtable(size_type __bkt_count_hint,
  982. const _Hash& __h, const _Equal& __eq, const allocator_type& __a)
  983. : _Hashtable(__h, __eq, __a)
  984. {
  985. auto __bkt_count = _M_rehash_policy._M_next_bkt(__bkt_count_hint);
  986. if (__bkt_count > _M_bucket_count)
  987. {
  988. _M_buckets = _M_allocate_buckets(__bkt_count);
  989. _M_bucket_count = __bkt_count;
  990. }
  991. }
  992. template<typename _Key, typename _Value, typename _Alloc,
  993. typename _ExtractKey, typename _Equal,
  994. typename _Hash, typename _RangeHash, typename _Unused,
  995. typename _RehashPolicy, typename _Traits>
  996. template<typename _InputIterator>
  997. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  998. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  999. _Hashtable(_InputIterator __f, _InputIterator __l,
  1000. size_type __bkt_count_hint,
  1001. const _Hash& __h, const _Equal& __eq,
  1002. const allocator_type& __a, true_type /* __uks */)
  1003. : _Hashtable(__bkt_count_hint, __h, __eq, __a)
  1004. {
  1005. for (; __f != __l; ++__f)
  1006. this->insert(*__f);
  1007. }
  1008. template<typename _Key, typename _Value, typename _Alloc,
  1009. typename _ExtractKey, typename _Equal,
  1010. typename _Hash, typename _RangeHash, typename _Unused,
  1011. typename _RehashPolicy, typename _Traits>
  1012. template<typename _InputIterator>
  1013. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1014. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1015. _Hashtable(_InputIterator __f, _InputIterator __l,
  1016. size_type __bkt_count_hint,
  1017. const _Hash& __h, const _Equal& __eq,
  1018. const allocator_type& __a, false_type /* __uks */)
  1019. : _Hashtable(__h, __eq, __a)
  1020. {
  1021. auto __nb_elems = __detail::__distance_fw(__f, __l);
  1022. auto __bkt_count =
  1023. _M_rehash_policy._M_next_bkt(
  1024. std::max(_M_rehash_policy._M_bkt_for_elements(__nb_elems),
  1025. __bkt_count_hint));
  1026. if (__bkt_count > _M_bucket_count)
  1027. {
  1028. _M_buckets = _M_allocate_buckets(__bkt_count);
  1029. _M_bucket_count = __bkt_count;
  1030. }
  1031. for (; __f != __l; ++__f)
  1032. this->insert(*__f);
  1033. }
  1034. template<typename _Key, typename _Value, typename _Alloc,
  1035. typename _ExtractKey, typename _Equal,
  1036. typename _Hash, typename _RangeHash, typename _Unused,
  1037. typename _RehashPolicy, typename _Traits>
  1038. auto
  1039. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1040. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1041. operator=(const _Hashtable& __ht)
  1042. -> _Hashtable&
  1043. {
  1044. if (&__ht == this)
  1045. return *this;
  1046. if (__node_alloc_traits::_S_propagate_on_copy_assign())
  1047. {
  1048. auto& __this_alloc = this->_M_node_allocator();
  1049. auto& __that_alloc = __ht._M_node_allocator();
  1050. if (!__node_alloc_traits::_S_always_equal()
  1051. && __this_alloc != __that_alloc)
  1052. {
  1053. // Replacement allocator cannot free existing storage.
  1054. this->_M_deallocate_nodes(_M_begin());
  1055. _M_before_begin._M_nxt = nullptr;
  1056. _M_deallocate_buckets();
  1057. _M_buckets = nullptr;
  1058. std::__alloc_on_copy(__this_alloc, __that_alloc);
  1059. __hashtable_base::operator=(__ht);
  1060. _M_bucket_count = __ht._M_bucket_count;
  1061. _M_element_count = __ht._M_element_count;
  1062. _M_rehash_policy = __ht._M_rehash_policy;
  1063. __alloc_node_gen_t __alloc_node_gen(*this);
  1064. __try
  1065. {
  1066. _M_assign(__ht, __alloc_node_gen);
  1067. }
  1068. __catch(...)
  1069. {
  1070. // _M_assign took care of deallocating all memory. Now we
  1071. // must make sure this instance remains in a usable state.
  1072. _M_reset();
  1073. __throw_exception_again;
  1074. }
  1075. return *this;
  1076. }
  1077. std::__alloc_on_copy(__this_alloc, __that_alloc);
  1078. }
  1079. // Reuse allocated buckets and nodes.
  1080. _M_assign_elements(__ht);
  1081. return *this;
  1082. }
  1083. template<typename _Key, typename _Value, typename _Alloc,
  1084. typename _ExtractKey, typename _Equal,
  1085. typename _Hash, typename _RangeHash, typename _Unused,
  1086. typename _RehashPolicy, typename _Traits>
  1087. template<typename _Ht>
  1088. void
  1089. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1090. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1091. _M_assign_elements(_Ht&& __ht)
  1092. {
  1093. __buckets_ptr __former_buckets = nullptr;
  1094. std::size_t __former_bucket_count = _M_bucket_count;
  1095. const __rehash_state& __former_state = _M_rehash_policy._M_state();
  1096. if (_M_bucket_count != __ht._M_bucket_count)
  1097. {
  1098. __former_buckets = _M_buckets;
  1099. _M_buckets = _M_allocate_buckets(__ht._M_bucket_count);
  1100. _M_bucket_count = __ht._M_bucket_count;
  1101. }
  1102. else
  1103. __builtin_memset(_M_buckets, 0,
  1104. _M_bucket_count * sizeof(__node_base_ptr));
  1105. __try
  1106. {
  1107. __hashtable_base::operator=(std::forward<_Ht>(__ht));
  1108. _M_element_count = __ht._M_element_count;
  1109. _M_rehash_policy = __ht._M_rehash_policy;
  1110. __reuse_or_alloc_node_gen_t __roan(_M_begin(), *this);
  1111. _M_before_begin._M_nxt = nullptr;
  1112. _M_assign(std::forward<_Ht>(__ht), __roan);
  1113. if (__former_buckets)
  1114. _M_deallocate_buckets(__former_buckets, __former_bucket_count);
  1115. }
  1116. __catch(...)
  1117. {
  1118. if (__former_buckets)
  1119. {
  1120. // Restore previous buckets.
  1121. _M_deallocate_buckets();
  1122. _M_rehash_policy._M_reset(__former_state);
  1123. _M_buckets = __former_buckets;
  1124. _M_bucket_count = __former_bucket_count;
  1125. }
  1126. __builtin_memset(_M_buckets, 0,
  1127. _M_bucket_count * sizeof(__node_base_ptr));
  1128. __throw_exception_again;
  1129. }
  1130. }
  1131. template<typename _Key, typename _Value, typename _Alloc,
  1132. typename _ExtractKey, typename _Equal,
  1133. typename _Hash, typename _RangeHash, typename _Unused,
  1134. typename _RehashPolicy, typename _Traits>
  1135. template<typename _Ht, typename _NodeGenerator>
  1136. void
  1137. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1138. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1139. _M_assign(_Ht&& __ht, const _NodeGenerator& __node_gen)
  1140. {
  1141. __buckets_ptr __buckets = nullptr;
  1142. if (!_M_buckets)
  1143. _M_buckets = __buckets = _M_allocate_buckets(_M_bucket_count);
  1144. __try
  1145. {
  1146. if (!__ht._M_before_begin._M_nxt)
  1147. return;
  1148. // First deal with the special first node pointed to by
  1149. // _M_before_begin.
  1150. __node_ptr __ht_n = __ht._M_begin();
  1151. __node_ptr __this_n
  1152. = __node_gen(__fwd_value_for<_Ht>(__ht_n->_M_v()));
  1153. this->_M_copy_code(*__this_n, *__ht_n);
  1154. _M_update_bbegin(__this_n);
  1155. // Then deal with other nodes.
  1156. __node_ptr __prev_n = __this_n;
  1157. for (__ht_n = __ht_n->_M_next(); __ht_n; __ht_n = __ht_n->_M_next())
  1158. {
  1159. __this_n = __node_gen(__fwd_value_for<_Ht>(__ht_n->_M_v()));
  1160. __prev_n->_M_nxt = __this_n;
  1161. this->_M_copy_code(*__this_n, *__ht_n);
  1162. size_type __bkt = _M_bucket_index(*__this_n);
  1163. if (!_M_buckets[__bkt])
  1164. _M_buckets[__bkt] = __prev_n;
  1165. __prev_n = __this_n;
  1166. }
  1167. }
  1168. __catch(...)
  1169. {
  1170. clear();
  1171. if (__buckets)
  1172. _M_deallocate_buckets();
  1173. __throw_exception_again;
  1174. }
  1175. }
  1176. template<typename _Key, typename _Value, typename _Alloc,
  1177. typename _ExtractKey, typename _Equal,
  1178. typename _Hash, typename _RangeHash, typename _Unused,
  1179. typename _RehashPolicy, typename _Traits>
  1180. void
  1181. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1182. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1183. _M_reset() noexcept
  1184. {
  1185. _M_rehash_policy._M_reset();
  1186. _M_bucket_count = 1;
  1187. _M_single_bucket = nullptr;
  1188. _M_buckets = &_M_single_bucket;
  1189. _M_before_begin._M_nxt = nullptr;
  1190. _M_element_count = 0;
  1191. }
  1192. template<typename _Key, typename _Value, typename _Alloc,
  1193. typename _ExtractKey, typename _Equal,
  1194. typename _Hash, typename _RangeHash, typename _Unused,
  1195. typename _RehashPolicy, typename _Traits>
  1196. void
  1197. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1198. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1199. _M_move_assign(_Hashtable&& __ht, true_type)
  1200. {
  1201. if (__builtin_expect(std::__addressof(__ht) == this, false))
  1202. return;
  1203. this->_M_deallocate_nodes(_M_begin());
  1204. _M_deallocate_buckets();
  1205. __hashtable_base::operator=(std::move(__ht));
  1206. _M_rehash_policy = __ht._M_rehash_policy;
  1207. if (!__ht._M_uses_single_bucket())
  1208. _M_buckets = __ht._M_buckets;
  1209. else
  1210. {
  1211. _M_buckets = &_M_single_bucket;
  1212. _M_single_bucket = __ht._M_single_bucket;
  1213. }
  1214. _M_bucket_count = __ht._M_bucket_count;
  1215. _M_before_begin._M_nxt = __ht._M_before_begin._M_nxt;
  1216. _M_element_count = __ht._M_element_count;
  1217. std::__alloc_on_move(this->_M_node_allocator(), __ht._M_node_allocator());
  1218. // Fix bucket containing the _M_before_begin pointer that can't be moved.
  1219. _M_update_bbegin();
  1220. __ht._M_reset();
  1221. }
  1222. template<typename _Key, typename _Value, typename _Alloc,
  1223. typename _ExtractKey, typename _Equal,
  1224. typename _Hash, typename _RangeHash, typename _Unused,
  1225. typename _RehashPolicy, typename _Traits>
  1226. void
  1227. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1228. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1229. _M_move_assign(_Hashtable&& __ht, false_type)
  1230. {
  1231. if (__ht._M_node_allocator() == this->_M_node_allocator())
  1232. _M_move_assign(std::move(__ht), true_type{});
  1233. else
  1234. {
  1235. // Can't move memory, move elements then.
  1236. _M_assign_elements(std::move(__ht));
  1237. __ht.clear();
  1238. }
  1239. }
  1240. template<typename _Key, typename _Value, typename _Alloc,
  1241. typename _ExtractKey, typename _Equal,
  1242. typename _Hash, typename _RangeHash, typename _Unused,
  1243. typename _RehashPolicy, typename _Traits>
  1244. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1245. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1246. _Hashtable(const _Hashtable& __ht)
  1247. : __hashtable_base(__ht),
  1248. __map_base(__ht),
  1249. __rehash_base(__ht),
  1250. __hashtable_alloc(
  1251. __node_alloc_traits::_S_select_on_copy(__ht._M_node_allocator())),
  1252. __enable_default_ctor(__ht),
  1253. _M_buckets(nullptr),
  1254. _M_bucket_count(__ht._M_bucket_count),
  1255. _M_element_count(__ht._M_element_count),
  1256. _M_rehash_policy(__ht._M_rehash_policy)
  1257. {
  1258. __alloc_node_gen_t __alloc_node_gen(*this);
  1259. _M_assign(__ht, __alloc_node_gen);
  1260. }
  1261. template<typename _Key, typename _Value, typename _Alloc,
  1262. typename _ExtractKey, typename _Equal,
  1263. typename _Hash, typename _RangeHash, typename _Unused,
  1264. typename _RehashPolicy, typename _Traits>
  1265. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1266. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1267. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  1268. true_type /* alloc always equal */)
  1269. noexcept(_S_nothrow_move())
  1270. : __hashtable_base(__ht),
  1271. __map_base(__ht),
  1272. __rehash_base(__ht),
  1273. __hashtable_alloc(std::move(__a)),
  1274. __enable_default_ctor(__ht),
  1275. _M_buckets(__ht._M_buckets),
  1276. _M_bucket_count(__ht._M_bucket_count),
  1277. _M_before_begin(__ht._M_before_begin._M_nxt),
  1278. _M_element_count(__ht._M_element_count),
  1279. _M_rehash_policy(__ht._M_rehash_policy)
  1280. {
  1281. // Update buckets if __ht is using its single bucket.
  1282. if (__ht._M_uses_single_bucket())
  1283. {
  1284. _M_buckets = &_M_single_bucket;
  1285. _M_single_bucket = __ht._M_single_bucket;
  1286. }
  1287. // Fix bucket containing the _M_before_begin pointer that can't be moved.
  1288. _M_update_bbegin();
  1289. __ht._M_reset();
  1290. }
  1291. template<typename _Key, typename _Value, typename _Alloc,
  1292. typename _ExtractKey, typename _Equal,
  1293. typename _Hash, typename _RangeHash, typename _Unused,
  1294. typename _RehashPolicy, typename _Traits>
  1295. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1296. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1297. _Hashtable(const _Hashtable& __ht, const allocator_type& __a)
  1298. : __hashtable_base(__ht),
  1299. __map_base(__ht),
  1300. __rehash_base(__ht),
  1301. __hashtable_alloc(__node_alloc_type(__a)),
  1302. __enable_default_ctor(__ht),
  1303. _M_buckets(),
  1304. _M_bucket_count(__ht._M_bucket_count),
  1305. _M_element_count(__ht._M_element_count),
  1306. _M_rehash_policy(__ht._M_rehash_policy)
  1307. {
  1308. __alloc_node_gen_t __alloc_node_gen(*this);
  1309. _M_assign(__ht, __alloc_node_gen);
  1310. }
  1311. template<typename _Key, typename _Value, typename _Alloc,
  1312. typename _ExtractKey, typename _Equal,
  1313. typename _Hash, typename _RangeHash, typename _Unused,
  1314. typename _RehashPolicy, typename _Traits>
  1315. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1316. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1317. _Hashtable(_Hashtable&& __ht, __node_alloc_type&& __a,
  1318. false_type /* alloc always equal */)
  1319. : __hashtable_base(__ht),
  1320. __map_base(__ht),
  1321. __rehash_base(__ht),
  1322. __hashtable_alloc(std::move(__a)),
  1323. __enable_default_ctor(__ht),
  1324. _M_buckets(nullptr),
  1325. _M_bucket_count(__ht._M_bucket_count),
  1326. _M_element_count(__ht._M_element_count),
  1327. _M_rehash_policy(__ht._M_rehash_policy)
  1328. {
  1329. if (__ht._M_node_allocator() == this->_M_node_allocator())
  1330. {
  1331. if (__ht._M_uses_single_bucket())
  1332. {
  1333. _M_buckets = &_M_single_bucket;
  1334. _M_single_bucket = __ht._M_single_bucket;
  1335. }
  1336. else
  1337. _M_buckets = __ht._M_buckets;
  1338. // Fix bucket containing the _M_before_begin pointer that can't be
  1339. // moved.
  1340. _M_update_bbegin(__ht._M_begin());
  1341. __ht._M_reset();
  1342. }
  1343. else
  1344. {
  1345. __alloc_node_gen_t __alloc_gen(*this);
  1346. using _Fwd_Ht = __conditional_t<
  1347. __move_if_noexcept_cond<value_type>::value,
  1348. const _Hashtable&, _Hashtable&&>;
  1349. _M_assign(std::forward<_Fwd_Ht>(__ht), __alloc_gen);
  1350. __ht.clear();
  1351. }
  1352. }
  1353. template<typename _Key, typename _Value, typename _Alloc,
  1354. typename _ExtractKey, typename _Equal,
  1355. typename _Hash, typename _RangeHash, typename _Unused,
  1356. typename _RehashPolicy, typename _Traits>
  1357. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1358. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1359. ~_Hashtable() noexcept
  1360. {
  1361. // Getting a bucket index from a node shall not throw because it is used
  1362. // in methods (erase, swap...) that shall not throw. Need a complete
  1363. // type to check this, so do it in the destructor not at class scope.
  1364. static_assert(noexcept(declval<const __hash_code_base_access&>()
  1365. ._M_bucket_index(declval<const __node_value_type&>(),
  1366. (std::size_t)0)),
  1367. "Cache the hash code or qualify your functors involved"
  1368. " in hash code and bucket index computation with noexcept");
  1369. clear();
  1370. _M_deallocate_buckets();
  1371. }
  1372. template<typename _Key, typename _Value, typename _Alloc,
  1373. typename _ExtractKey, typename _Equal,
  1374. typename _Hash, typename _RangeHash, typename _Unused,
  1375. typename _RehashPolicy, typename _Traits>
  1376. void
  1377. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1378. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1379. swap(_Hashtable& __x)
  1380. noexcept(__and_<__is_nothrow_swappable<_Hash>,
  1381. __is_nothrow_swappable<_Equal>>::value)
  1382. {
  1383. // The only base class with member variables is hash_code_base.
  1384. // We define _Hash_code_base::_M_swap because different
  1385. // specializations have different members.
  1386. this->_M_swap(__x);
  1387. std::__alloc_on_swap(this->_M_node_allocator(), __x._M_node_allocator());
  1388. std::swap(_M_rehash_policy, __x._M_rehash_policy);
  1389. // Deal properly with potentially moved instances.
  1390. if (this->_M_uses_single_bucket())
  1391. {
  1392. if (!__x._M_uses_single_bucket())
  1393. {
  1394. _M_buckets = __x._M_buckets;
  1395. __x._M_buckets = &__x._M_single_bucket;
  1396. }
  1397. }
  1398. else if (__x._M_uses_single_bucket())
  1399. {
  1400. __x._M_buckets = _M_buckets;
  1401. _M_buckets = &_M_single_bucket;
  1402. }
  1403. else
  1404. std::swap(_M_buckets, __x._M_buckets);
  1405. std::swap(_M_bucket_count, __x._M_bucket_count);
  1406. std::swap(_M_before_begin._M_nxt, __x._M_before_begin._M_nxt);
  1407. std::swap(_M_element_count, __x._M_element_count);
  1408. std::swap(_M_single_bucket, __x._M_single_bucket);
  1409. // Fix buckets containing the _M_before_begin pointers that can't be
  1410. // swapped.
  1411. _M_update_bbegin();
  1412. __x._M_update_bbegin();
  1413. }
  1414. template<typename _Key, typename _Value, typename _Alloc,
  1415. typename _ExtractKey, typename _Equal,
  1416. typename _Hash, typename _RangeHash, typename _Unused,
  1417. typename _RehashPolicy, typename _Traits>
  1418. auto
  1419. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1420. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1421. find(const key_type& __k)
  1422. -> iterator
  1423. {
  1424. if (size() <= __small_size_threshold())
  1425. {
  1426. for (auto __it = begin(); __it != end(); ++__it)
  1427. if (this->_M_key_equals(__k, *__it._M_cur))
  1428. return __it;
  1429. return end();
  1430. }
  1431. __hash_code __code = this->_M_hash_code(__k);
  1432. std::size_t __bkt = _M_bucket_index(__code);
  1433. return iterator(_M_find_node(__bkt, __k, __code));
  1434. }
  1435. template<typename _Key, typename _Value, typename _Alloc,
  1436. typename _ExtractKey, typename _Equal,
  1437. typename _Hash, typename _RangeHash, typename _Unused,
  1438. typename _RehashPolicy, typename _Traits>
  1439. auto
  1440. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1441. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1442. find(const key_type& __k) const
  1443. -> const_iterator
  1444. {
  1445. if (size() <= __small_size_threshold())
  1446. {
  1447. for (auto __it = begin(); __it != end(); ++__it)
  1448. if (this->_M_key_equals(__k, *__it._M_cur))
  1449. return __it;
  1450. return end();
  1451. }
  1452. __hash_code __code = this->_M_hash_code(__k);
  1453. std::size_t __bkt = _M_bucket_index(__code);
  1454. return const_iterator(_M_find_node(__bkt, __k, __code));
  1455. }
  1456. #if __cplusplus > 201703L
  1457. template<typename _Key, typename _Value, typename _Alloc,
  1458. typename _ExtractKey, typename _Equal,
  1459. typename _Hash, typename _RangeHash, typename _Unused,
  1460. typename _RehashPolicy, typename _Traits>
  1461. template<typename _Kt, typename, typename>
  1462. auto
  1463. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1464. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1465. _M_find_tr(const _Kt& __k)
  1466. -> iterator
  1467. {
  1468. __hash_code __code = this->_M_hash_code_tr(__k);
  1469. std::size_t __bkt = _M_bucket_index(__code);
  1470. return iterator(_M_find_node_tr(__bkt, __k, __code));
  1471. }
  1472. template<typename _Key, typename _Value, typename _Alloc,
  1473. typename _ExtractKey, typename _Equal,
  1474. typename _Hash, typename _RangeHash, typename _Unused,
  1475. typename _RehashPolicy, typename _Traits>
  1476. template<typename _Kt, typename, typename>
  1477. auto
  1478. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1479. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1480. _M_find_tr(const _Kt& __k) const
  1481. -> const_iterator
  1482. {
  1483. __hash_code __code = this->_M_hash_code_tr(__k);
  1484. std::size_t __bkt = _M_bucket_index(__code);
  1485. return const_iterator(_M_find_node_tr(__bkt, __k, __code));
  1486. }
  1487. #endif
  1488. template<typename _Key, typename _Value, typename _Alloc,
  1489. typename _ExtractKey, typename _Equal,
  1490. typename _Hash, typename _RangeHash, typename _Unused,
  1491. typename _RehashPolicy, typename _Traits>
  1492. auto
  1493. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1494. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1495. count(const key_type& __k) const
  1496. -> size_type
  1497. {
  1498. auto __it = find(__k);
  1499. if (!__it._M_cur)
  1500. return 0;
  1501. if (__unique_keys::value)
  1502. return 1;
  1503. // All equivalent values are next to each other, if we find a
  1504. // non-equivalent value after an equivalent one it means that we won't
  1505. // find any new equivalent value.
  1506. size_type __result = 1;
  1507. for (auto __ref = __it++;
  1508. __it._M_cur && this->_M_node_equals(*__ref._M_cur, *__it._M_cur);
  1509. ++__it)
  1510. ++__result;
  1511. return __result;
  1512. }
  1513. #if __cplusplus > 201703L
  1514. template<typename _Key, typename _Value, typename _Alloc,
  1515. typename _ExtractKey, typename _Equal,
  1516. typename _Hash, typename _RangeHash, typename _Unused,
  1517. typename _RehashPolicy, typename _Traits>
  1518. template<typename _Kt, typename, typename>
  1519. auto
  1520. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1521. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1522. _M_count_tr(const _Kt& __k) const
  1523. -> size_type
  1524. {
  1525. __hash_code __code = this->_M_hash_code_tr(__k);
  1526. std::size_t __bkt = _M_bucket_index(__code);
  1527. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1528. if (!__n)
  1529. return 0;
  1530. // All equivalent values are next to each other, if we find a
  1531. // non-equivalent value after an equivalent one it means that we won't
  1532. // find any new equivalent value.
  1533. iterator __it(__n);
  1534. size_type __result = 1;
  1535. for (++__it;
  1536. __it._M_cur && this->_M_equals_tr(__k, __code, *__it._M_cur);
  1537. ++__it)
  1538. ++__result;
  1539. return __result;
  1540. }
  1541. #endif
  1542. template<typename _Key, typename _Value, typename _Alloc,
  1543. typename _ExtractKey, typename _Equal,
  1544. typename _Hash, typename _RangeHash, typename _Unused,
  1545. typename _RehashPolicy, typename _Traits>
  1546. auto
  1547. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1548. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1549. equal_range(const key_type& __k)
  1550. -> pair<iterator, iterator>
  1551. {
  1552. auto __ite = find(__k);
  1553. if (!__ite._M_cur)
  1554. return { __ite, __ite };
  1555. auto __beg = __ite++;
  1556. if (__unique_keys::value)
  1557. return { __beg, __ite };
  1558. // All equivalent values are next to each other, if we find a
  1559. // non-equivalent value after an equivalent one it means that we won't
  1560. // find any new equivalent value.
  1561. while (__ite._M_cur && this->_M_node_equals(*__beg._M_cur, *__ite._M_cur))
  1562. ++__ite;
  1563. return { __beg, __ite };
  1564. }
  1565. template<typename _Key, typename _Value, typename _Alloc,
  1566. typename _ExtractKey, typename _Equal,
  1567. typename _Hash, typename _RangeHash, typename _Unused,
  1568. typename _RehashPolicy, typename _Traits>
  1569. auto
  1570. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1571. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1572. equal_range(const key_type& __k) const
  1573. -> pair<const_iterator, const_iterator>
  1574. {
  1575. auto __ite = find(__k);
  1576. if (!__ite._M_cur)
  1577. return { __ite, __ite };
  1578. auto __beg = __ite++;
  1579. if (__unique_keys::value)
  1580. return { __beg, __ite };
  1581. // All equivalent values are next to each other, if we find a
  1582. // non-equivalent value after an equivalent one it means that we won't
  1583. // find any new equivalent value.
  1584. while (__ite._M_cur && this->_M_node_equals(*__beg._M_cur, *__ite._M_cur))
  1585. ++__ite;
  1586. return { __beg, __ite };
  1587. }
  1588. #if __cplusplus > 201703L
  1589. template<typename _Key, typename _Value, typename _Alloc,
  1590. typename _ExtractKey, typename _Equal,
  1591. typename _Hash, typename _RangeHash, typename _Unused,
  1592. typename _RehashPolicy, typename _Traits>
  1593. template<typename _Kt, typename, typename>
  1594. auto
  1595. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1596. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1597. _M_equal_range_tr(const _Kt& __k)
  1598. -> pair<iterator, iterator>
  1599. {
  1600. __hash_code __code = this->_M_hash_code_tr(__k);
  1601. std::size_t __bkt = _M_bucket_index(__code);
  1602. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1603. iterator __ite(__n);
  1604. if (!__n)
  1605. return { __ite, __ite };
  1606. // All equivalent values are next to each other, if we find a
  1607. // non-equivalent value after an equivalent one it means that we won't
  1608. // find any new equivalent value.
  1609. auto __beg = __ite++;
  1610. while (__ite._M_cur && this->_M_equals_tr(__k, __code, *__ite._M_cur))
  1611. ++__ite;
  1612. return { __beg, __ite };
  1613. }
  1614. template<typename _Key, typename _Value, typename _Alloc,
  1615. typename _ExtractKey, typename _Equal,
  1616. typename _Hash, typename _RangeHash, typename _Unused,
  1617. typename _RehashPolicy, typename _Traits>
  1618. template<typename _Kt, typename, typename>
  1619. auto
  1620. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1621. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1622. _M_equal_range_tr(const _Kt& __k) const
  1623. -> pair<const_iterator, const_iterator>
  1624. {
  1625. __hash_code __code = this->_M_hash_code_tr(__k);
  1626. std::size_t __bkt = _M_bucket_index(__code);
  1627. auto __n = _M_find_node_tr(__bkt, __k, __code);
  1628. const_iterator __ite(__n);
  1629. if (!__n)
  1630. return { __ite, __ite };
  1631. // All equivalent values are next to each other, if we find a
  1632. // non-equivalent value after an equivalent one it means that we won't
  1633. // find any new equivalent value.
  1634. auto __beg = __ite++;
  1635. while (__ite._M_cur && this->_M_equals_tr(__k, __code, *__ite._M_cur))
  1636. ++__ite;
  1637. return { __beg, __ite };
  1638. }
  1639. #endif
  1640. // Find the node before the one whose key compares equal to k.
  1641. // Return nullptr if no node is found.
  1642. template<typename _Key, typename _Value, typename _Alloc,
  1643. typename _ExtractKey, typename _Equal,
  1644. typename _Hash, typename _RangeHash, typename _Unused,
  1645. typename _RehashPolicy, typename _Traits>
  1646. auto
  1647. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1648. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1649. _M_find_before_node(const key_type& __k)
  1650. -> __node_base_ptr
  1651. {
  1652. __node_base_ptr __prev_p = &_M_before_begin;
  1653. if (!__prev_p->_M_nxt)
  1654. return nullptr;
  1655. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);
  1656. __p != nullptr;
  1657. __p = __p->_M_next())
  1658. {
  1659. if (this->_M_key_equals(__k, *__p))
  1660. return __prev_p;
  1661. __prev_p = __p;
  1662. }
  1663. return nullptr;
  1664. }
  1665. // Find the node before the one whose key compares equal to k in the bucket
  1666. // bkt. Return nullptr if no node is found.
  1667. template<typename _Key, typename _Value, typename _Alloc,
  1668. typename _ExtractKey, typename _Equal,
  1669. typename _Hash, typename _RangeHash, typename _Unused,
  1670. typename _RehashPolicy, typename _Traits>
  1671. auto
  1672. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1673. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1674. _M_find_before_node(size_type __bkt, const key_type& __k,
  1675. __hash_code __code) const
  1676. -> __node_base_ptr
  1677. {
  1678. __node_base_ptr __prev_p = _M_buckets[__bkt];
  1679. if (!__prev_p)
  1680. return nullptr;
  1681. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
  1682. __p = __p->_M_next())
  1683. {
  1684. if (this->_M_equals(__k, __code, *__p))
  1685. return __prev_p;
  1686. if (!__p->_M_nxt || _M_bucket_index(*__p->_M_next()) != __bkt)
  1687. break;
  1688. __prev_p = __p;
  1689. }
  1690. return nullptr;
  1691. }
  1692. template<typename _Key, typename _Value, typename _Alloc,
  1693. typename _ExtractKey, typename _Equal,
  1694. typename _Hash, typename _RangeHash, typename _Unused,
  1695. typename _RehashPolicy, typename _Traits>
  1696. template<typename _Kt>
  1697. auto
  1698. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1699. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1700. _M_find_before_node_tr(size_type __bkt, const _Kt& __k,
  1701. __hash_code __code) const
  1702. -> __node_base_ptr
  1703. {
  1704. __node_base_ptr __prev_p = _M_buckets[__bkt];
  1705. if (!__prev_p)
  1706. return nullptr;
  1707. for (__node_ptr __p = static_cast<__node_ptr>(__prev_p->_M_nxt);;
  1708. __p = __p->_M_next())
  1709. {
  1710. if (this->_M_equals_tr(__k, __code, *__p))
  1711. return __prev_p;
  1712. if (!__p->_M_nxt || _M_bucket_index(*__p->_M_next()) != __bkt)
  1713. break;
  1714. __prev_p = __p;
  1715. }
  1716. return nullptr;
  1717. }
  1718. template<typename _Key, typename _Value, typename _Alloc,
  1719. typename _ExtractKey, typename _Equal,
  1720. typename _Hash, typename _RangeHash, typename _Unused,
  1721. typename _RehashPolicy, typename _Traits>
  1722. void
  1723. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1724. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1725. _M_insert_bucket_begin(size_type __bkt, __node_ptr __node)
  1726. {
  1727. if (_M_buckets[__bkt])
  1728. {
  1729. // Bucket is not empty, we just need to insert the new node
  1730. // after the bucket before begin.
  1731. __node->_M_nxt = _M_buckets[__bkt]->_M_nxt;
  1732. _M_buckets[__bkt]->_M_nxt = __node;
  1733. }
  1734. else
  1735. {
  1736. // The bucket is empty, the new node is inserted at the
  1737. // beginning of the singly-linked list and the bucket will
  1738. // contain _M_before_begin pointer.
  1739. __node->_M_nxt = _M_before_begin._M_nxt;
  1740. _M_before_begin._M_nxt = __node;
  1741. if (__node->_M_nxt)
  1742. // We must update former begin bucket that is pointing to
  1743. // _M_before_begin.
  1744. _M_buckets[_M_bucket_index(*__node->_M_next())] = __node;
  1745. _M_buckets[__bkt] = &_M_before_begin;
  1746. }
  1747. }
  1748. template<typename _Key, typename _Value, typename _Alloc,
  1749. typename _ExtractKey, typename _Equal,
  1750. typename _Hash, typename _RangeHash, typename _Unused,
  1751. typename _RehashPolicy, typename _Traits>
  1752. void
  1753. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1754. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1755. _M_remove_bucket_begin(size_type __bkt, __node_ptr __next,
  1756. size_type __next_bkt)
  1757. {
  1758. if (!__next || __next_bkt != __bkt)
  1759. {
  1760. // Bucket is now empty
  1761. // First update next bucket if any
  1762. if (__next)
  1763. _M_buckets[__next_bkt] = _M_buckets[__bkt];
  1764. // Second update before begin node if necessary
  1765. if (&_M_before_begin == _M_buckets[__bkt])
  1766. _M_before_begin._M_nxt = __next;
  1767. _M_buckets[__bkt] = nullptr;
  1768. }
  1769. }
  1770. template<typename _Key, typename _Value, typename _Alloc,
  1771. typename _ExtractKey, typename _Equal,
  1772. typename _Hash, typename _RangeHash, typename _Unused,
  1773. typename _RehashPolicy, typename _Traits>
  1774. auto
  1775. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1776. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1777. _M_get_previous_node(size_type __bkt, __node_ptr __n)
  1778. -> __node_base_ptr
  1779. {
  1780. __node_base_ptr __prev_n = _M_buckets[__bkt];
  1781. while (__prev_n->_M_nxt != __n)
  1782. __prev_n = __prev_n->_M_nxt;
  1783. return __prev_n;
  1784. }
  1785. template<typename _Key, typename _Value, typename _Alloc,
  1786. typename _ExtractKey, typename _Equal,
  1787. typename _Hash, typename _RangeHash, typename _Unused,
  1788. typename _RehashPolicy, typename _Traits>
  1789. template<typename... _Args>
  1790. auto
  1791. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1792. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1793. _M_emplace(true_type /* __uks */, _Args&&... __args)
  1794. -> pair<iterator, bool>
  1795. {
  1796. // First build the node to get access to the hash code
  1797. _Scoped_node __node { this, std::forward<_Args>(__args)... };
  1798. const key_type& __k = _ExtractKey{}(__node._M_node->_M_v());
  1799. if (size() <= __small_size_threshold())
  1800. {
  1801. for (auto __it = begin(); __it != end(); ++__it)
  1802. if (this->_M_key_equals(__k, *__it._M_cur))
  1803. // There is already an equivalent node, no insertion
  1804. return { __it, false };
  1805. }
  1806. __hash_code __code = this->_M_hash_code(__k);
  1807. size_type __bkt = _M_bucket_index(__code);
  1808. if (size() > __small_size_threshold())
  1809. if (__node_ptr __p = _M_find_node(__bkt, __k, __code))
  1810. // There is already an equivalent node, no insertion
  1811. return { iterator(__p), false };
  1812. // Insert the node
  1813. auto __pos = _M_insert_unique_node(__bkt, __code, __node._M_node);
  1814. __node._M_node = nullptr;
  1815. return { __pos, true };
  1816. }
  1817. template<typename _Key, typename _Value, typename _Alloc,
  1818. typename _ExtractKey, typename _Equal,
  1819. typename _Hash, typename _RangeHash, typename _Unused,
  1820. typename _RehashPolicy, typename _Traits>
  1821. template<typename... _Args>
  1822. auto
  1823. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1824. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1825. _M_emplace(const_iterator __hint, false_type /* __uks */,
  1826. _Args&&... __args)
  1827. -> iterator
  1828. {
  1829. // First build the node to get its hash code.
  1830. _Scoped_node __node { this, std::forward<_Args>(__args)... };
  1831. const key_type& __k = _ExtractKey{}(__node._M_node->_M_v());
  1832. auto __res = this->_M_compute_hash_code(__hint, __k);
  1833. auto __pos
  1834. = _M_insert_multi_node(__res.first._M_cur, __res.second,
  1835. __node._M_node);
  1836. __node._M_node = nullptr;
  1837. return __pos;
  1838. }
  1839. template<typename _Key, typename _Value, typename _Alloc,
  1840. typename _ExtractKey, typename _Equal,
  1841. typename _Hash, typename _RangeHash, typename _Unused,
  1842. typename _RehashPolicy, typename _Traits>
  1843. auto
  1844. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1845. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1846. _M_compute_hash_code(const_iterator __hint, const key_type& __k) const
  1847. -> pair<const_iterator, __hash_code>
  1848. {
  1849. if (size() <= __small_size_threshold())
  1850. {
  1851. if (__hint != cend())
  1852. {
  1853. for (auto __it = __hint; __it != cend(); ++__it)
  1854. if (this->_M_key_equals(__k, *__it._M_cur))
  1855. return { __it, this->_M_hash_code(*__it._M_cur) };
  1856. }
  1857. for (auto __it = cbegin(); __it != __hint; ++__it)
  1858. if (this->_M_key_equals(__k, *__it._M_cur))
  1859. return { __it, this->_M_hash_code(*__it._M_cur) };
  1860. }
  1861. return { __hint, this->_M_hash_code(__k) };
  1862. }
  1863. template<typename _Key, typename _Value, typename _Alloc,
  1864. typename _ExtractKey, typename _Equal,
  1865. typename _Hash, typename _RangeHash, typename _Unused,
  1866. typename _RehashPolicy, typename _Traits>
  1867. auto
  1868. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1869. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1870. _M_insert_unique_node(size_type __bkt, __hash_code __code,
  1871. __node_ptr __node, size_type __n_elt)
  1872. -> iterator
  1873. {
  1874. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  1875. std::pair<bool, std::size_t> __do_rehash
  1876. = _M_rehash_policy._M_need_rehash(_M_bucket_count, _M_element_count,
  1877. __n_elt);
  1878. if (__do_rehash.first)
  1879. {
  1880. _M_rehash(__do_rehash.second, __saved_state);
  1881. __bkt = _M_bucket_index(__code);
  1882. }
  1883. this->_M_store_code(*__node, __code);
  1884. // Always insert at the beginning of the bucket.
  1885. _M_insert_bucket_begin(__bkt, __node);
  1886. ++_M_element_count;
  1887. return iterator(__node);
  1888. }
  1889. template<typename _Key, typename _Value, typename _Alloc,
  1890. typename _ExtractKey, typename _Equal,
  1891. typename _Hash, typename _RangeHash, typename _Unused,
  1892. typename _RehashPolicy, typename _Traits>
  1893. auto
  1894. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1895. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1896. _M_insert_multi_node(__node_ptr __hint,
  1897. __hash_code __code, __node_ptr __node)
  1898. -> iterator
  1899. {
  1900. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  1901. std::pair<bool, std::size_t> __do_rehash
  1902. = _M_rehash_policy._M_need_rehash(_M_bucket_count, _M_element_count, 1);
  1903. if (__do_rehash.first)
  1904. _M_rehash(__do_rehash.second, __saved_state);
  1905. this->_M_store_code(*__node, __code);
  1906. const key_type& __k = _ExtractKey{}(__node->_M_v());
  1907. size_type __bkt = _M_bucket_index(__code);
  1908. // Find the node before an equivalent one or use hint if it exists and
  1909. // if it is equivalent.
  1910. __node_base_ptr __prev
  1911. = __builtin_expect(__hint != nullptr, false)
  1912. && this->_M_equals(__k, __code, *__hint)
  1913. ? __hint
  1914. : _M_find_before_node(__bkt, __k, __code);
  1915. if (__prev)
  1916. {
  1917. // Insert after the node before the equivalent one.
  1918. __node->_M_nxt = __prev->_M_nxt;
  1919. __prev->_M_nxt = __node;
  1920. if (__builtin_expect(__prev == __hint, false))
  1921. // hint might be the last bucket node, in this case we need to
  1922. // update next bucket.
  1923. if (__node->_M_nxt
  1924. && !this->_M_equals(__k, __code, *__node->_M_next()))
  1925. {
  1926. size_type __next_bkt = _M_bucket_index(*__node->_M_next());
  1927. if (__next_bkt != __bkt)
  1928. _M_buckets[__next_bkt] = __node;
  1929. }
  1930. }
  1931. else
  1932. // The inserted node has no equivalent in the hashtable. We must
  1933. // insert the new node at the beginning of the bucket to preserve
  1934. // equivalent elements' relative positions.
  1935. _M_insert_bucket_begin(__bkt, __node);
  1936. ++_M_element_count;
  1937. return iterator(__node);
  1938. }
  1939. // Insert v if no element with its key is already present.
  1940. template<typename _Key, typename _Value, typename _Alloc,
  1941. typename _ExtractKey, typename _Equal,
  1942. typename _Hash, typename _RangeHash, typename _Unused,
  1943. typename _RehashPolicy, typename _Traits>
  1944. template<typename _Kt, typename _Arg, typename _NodeGenerator>
  1945. auto
  1946. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1947. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1948. _M_insert_unique(_Kt&& __k, _Arg&& __v,
  1949. const _NodeGenerator& __node_gen)
  1950. -> pair<iterator, bool>
  1951. {
  1952. if (size() <= __small_size_threshold())
  1953. for (auto __it = begin(); __it != end(); ++__it)
  1954. if (this->_M_key_equals_tr(__k, *__it._M_cur))
  1955. return { __it, false };
  1956. __hash_code __code = this->_M_hash_code_tr(__k);
  1957. size_type __bkt = _M_bucket_index(__code);
  1958. if (size() > __small_size_threshold())
  1959. if (__node_ptr __node = _M_find_node_tr(__bkt, __k, __code))
  1960. return { iterator(__node), false };
  1961. _Scoped_node __node {
  1962. __node_builder_t::_S_build(std::forward<_Kt>(__k),
  1963. std::forward<_Arg>(__v),
  1964. __node_gen),
  1965. this
  1966. };
  1967. auto __pos
  1968. = _M_insert_unique_node(__bkt, __code, __node._M_node);
  1969. __node._M_node = nullptr;
  1970. return { __pos, true };
  1971. }
  1972. // Insert v unconditionally.
  1973. template<typename _Key, typename _Value, typename _Alloc,
  1974. typename _ExtractKey, typename _Equal,
  1975. typename _Hash, typename _RangeHash, typename _Unused,
  1976. typename _RehashPolicy, typename _Traits>
  1977. template<typename _Arg, typename _NodeGenerator>
  1978. auto
  1979. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  1980. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  1981. _M_insert(const_iterator __hint, _Arg&& __v,
  1982. const _NodeGenerator& __node_gen,
  1983. false_type /* __uks */)
  1984. -> iterator
  1985. {
  1986. // First allocate new node so that we don't do anything if it throws.
  1987. _Scoped_node __node{ __node_gen(std::forward<_Arg>(__v)), this };
  1988. // Second compute the hash code so that we don't rehash if it throws.
  1989. auto __res = this->_M_compute_hash_code(
  1990. __hint, _ExtractKey{}(__node._M_node->_M_v()));
  1991. auto __pos
  1992. = _M_insert_multi_node(__res.first._M_cur, __res.second,
  1993. __node._M_node);
  1994. __node._M_node = nullptr;
  1995. return __pos;
  1996. }
  1997. template<typename _Key, typename _Value, typename _Alloc,
  1998. typename _ExtractKey, typename _Equal,
  1999. typename _Hash, typename _RangeHash, typename _Unused,
  2000. typename _RehashPolicy, typename _Traits>
  2001. auto
  2002. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2003. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2004. erase(const_iterator __it)
  2005. -> iterator
  2006. {
  2007. __node_ptr __n = __it._M_cur;
  2008. std::size_t __bkt = _M_bucket_index(*__n);
  2009. // Look for previous node to unlink it from the erased one, this
  2010. // is why we need buckets to contain the before begin to make
  2011. // this search fast.
  2012. __node_base_ptr __prev_n = _M_get_previous_node(__bkt, __n);
  2013. return _M_erase(__bkt, __prev_n, __n);
  2014. }
  2015. template<typename _Key, typename _Value, typename _Alloc,
  2016. typename _ExtractKey, typename _Equal,
  2017. typename _Hash, typename _RangeHash, typename _Unused,
  2018. typename _RehashPolicy, typename _Traits>
  2019. auto
  2020. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2021. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2022. _M_erase(size_type __bkt, __node_base_ptr __prev_n, __node_ptr __n)
  2023. -> iterator
  2024. {
  2025. if (__prev_n == _M_buckets[__bkt])
  2026. _M_remove_bucket_begin(__bkt, __n->_M_next(),
  2027. __n->_M_nxt ? _M_bucket_index(*__n->_M_next()) : 0);
  2028. else if (__n->_M_nxt)
  2029. {
  2030. size_type __next_bkt = _M_bucket_index(*__n->_M_next());
  2031. if (__next_bkt != __bkt)
  2032. _M_buckets[__next_bkt] = __prev_n;
  2033. }
  2034. __prev_n->_M_nxt = __n->_M_nxt;
  2035. iterator __result(__n->_M_next());
  2036. this->_M_deallocate_node(__n);
  2037. --_M_element_count;
  2038. return __result;
  2039. }
  2040. template<typename _Key, typename _Value, typename _Alloc,
  2041. typename _ExtractKey, typename _Equal,
  2042. typename _Hash, typename _RangeHash, typename _Unused,
  2043. typename _RehashPolicy, typename _Traits>
  2044. auto
  2045. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2046. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2047. _M_erase(true_type /* __uks */, const key_type& __k)
  2048. -> size_type
  2049. {
  2050. __node_base_ptr __prev_n;
  2051. __node_ptr __n;
  2052. std::size_t __bkt;
  2053. if (size() <= __small_size_threshold())
  2054. {
  2055. __prev_n = _M_find_before_node(__k);
  2056. if (!__prev_n)
  2057. return 0;
  2058. // We found a matching node, erase it.
  2059. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2060. __bkt = _M_bucket_index(*__n);
  2061. }
  2062. else
  2063. {
  2064. __hash_code __code = this->_M_hash_code(__k);
  2065. __bkt = _M_bucket_index(__code);
  2066. // Look for the node before the first matching node.
  2067. __prev_n = _M_find_before_node(__bkt, __k, __code);
  2068. if (!__prev_n)
  2069. return 0;
  2070. // We found a matching node, erase it.
  2071. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2072. }
  2073. _M_erase(__bkt, __prev_n, __n);
  2074. return 1;
  2075. }
  2076. template<typename _Key, typename _Value, typename _Alloc,
  2077. typename _ExtractKey, typename _Equal,
  2078. typename _Hash, typename _RangeHash, typename _Unused,
  2079. typename _RehashPolicy, typename _Traits>
  2080. auto
  2081. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2082. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2083. _M_erase(false_type /* __uks */, const key_type& __k)
  2084. -> size_type
  2085. {
  2086. std::size_t __bkt;
  2087. __node_base_ptr __prev_n;
  2088. __node_ptr __n;
  2089. if (size() <= __small_size_threshold())
  2090. {
  2091. __prev_n = _M_find_before_node(__k);
  2092. if (!__prev_n)
  2093. return 0;
  2094. // We found a matching node, erase it.
  2095. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2096. __bkt = _M_bucket_index(*__n);
  2097. }
  2098. else
  2099. {
  2100. __hash_code __code = this->_M_hash_code(__k);
  2101. __bkt = _M_bucket_index(__code);
  2102. // Look for the node before the first matching node.
  2103. __prev_n = _M_find_before_node(__bkt, __k, __code);
  2104. if (!__prev_n)
  2105. return 0;
  2106. __n = static_cast<__node_ptr>(__prev_n->_M_nxt);
  2107. }
  2108. // _GLIBCXX_RESOLVE_LIB_DEFECTS
  2109. // 526. Is it undefined if a function in the standard changes
  2110. // in parameters?
  2111. // We use one loop to find all matching nodes and another to deallocate
  2112. // them so that the key stays valid during the first loop. It might be
  2113. // invalidated indirectly when destroying nodes.
  2114. __node_ptr __n_last = __n->_M_next();
  2115. while (__n_last && this->_M_node_equals(*__n, *__n_last))
  2116. __n_last = __n_last->_M_next();
  2117. std::size_t __n_last_bkt = __n_last ? _M_bucket_index(*__n_last) : __bkt;
  2118. // Deallocate nodes.
  2119. size_type __result = 0;
  2120. do
  2121. {
  2122. __node_ptr __p = __n->_M_next();
  2123. this->_M_deallocate_node(__n);
  2124. __n = __p;
  2125. ++__result;
  2126. }
  2127. while (__n != __n_last);
  2128. _M_element_count -= __result;
  2129. if (__prev_n == _M_buckets[__bkt])
  2130. _M_remove_bucket_begin(__bkt, __n_last, __n_last_bkt);
  2131. else if (__n_last_bkt != __bkt)
  2132. _M_buckets[__n_last_bkt] = __prev_n;
  2133. __prev_n->_M_nxt = __n_last;
  2134. return __result;
  2135. }
  2136. template<typename _Key, typename _Value, typename _Alloc,
  2137. typename _ExtractKey, typename _Equal,
  2138. typename _Hash, typename _RangeHash, typename _Unused,
  2139. typename _RehashPolicy, typename _Traits>
  2140. auto
  2141. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2142. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2143. erase(const_iterator __first, const_iterator __last)
  2144. -> iterator
  2145. {
  2146. __node_ptr __n = __first._M_cur;
  2147. __node_ptr __last_n = __last._M_cur;
  2148. if (__n == __last_n)
  2149. return iterator(__n);
  2150. std::size_t __bkt = _M_bucket_index(*__n);
  2151. __node_base_ptr __prev_n = _M_get_previous_node(__bkt, __n);
  2152. bool __is_bucket_begin = __n == _M_bucket_begin(__bkt);
  2153. std::size_t __n_bkt = __bkt;
  2154. for (;;)
  2155. {
  2156. do
  2157. {
  2158. __node_ptr __tmp = __n;
  2159. __n = __n->_M_next();
  2160. this->_M_deallocate_node(__tmp);
  2161. --_M_element_count;
  2162. if (!__n)
  2163. break;
  2164. __n_bkt = _M_bucket_index(*__n);
  2165. }
  2166. while (__n != __last_n && __n_bkt == __bkt);
  2167. if (__is_bucket_begin)
  2168. _M_remove_bucket_begin(__bkt, __n, __n_bkt);
  2169. if (__n == __last_n)
  2170. break;
  2171. __is_bucket_begin = true;
  2172. __bkt = __n_bkt;
  2173. }
  2174. if (__n && (__n_bkt != __bkt || __is_bucket_begin))
  2175. _M_buckets[__n_bkt] = __prev_n;
  2176. __prev_n->_M_nxt = __n;
  2177. return iterator(__n);
  2178. }
  2179. template<typename _Key, typename _Value, typename _Alloc,
  2180. typename _ExtractKey, typename _Equal,
  2181. typename _Hash, typename _RangeHash, typename _Unused,
  2182. typename _RehashPolicy, typename _Traits>
  2183. void
  2184. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2185. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2186. clear() noexcept
  2187. {
  2188. this->_M_deallocate_nodes(_M_begin());
  2189. __builtin_memset(_M_buckets, 0,
  2190. _M_bucket_count * sizeof(__node_base_ptr));
  2191. _M_element_count = 0;
  2192. _M_before_begin._M_nxt = nullptr;
  2193. }
  2194. template<typename _Key, typename _Value, typename _Alloc,
  2195. typename _ExtractKey, typename _Equal,
  2196. typename _Hash, typename _RangeHash, typename _Unused,
  2197. typename _RehashPolicy, typename _Traits>
  2198. void
  2199. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2200. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2201. rehash(size_type __bkt_count)
  2202. {
  2203. const __rehash_state& __saved_state = _M_rehash_policy._M_state();
  2204. __bkt_count
  2205. = std::max(_M_rehash_policy._M_bkt_for_elements(_M_element_count + 1),
  2206. __bkt_count);
  2207. __bkt_count = _M_rehash_policy._M_next_bkt(__bkt_count);
  2208. if (__bkt_count != _M_bucket_count)
  2209. _M_rehash(__bkt_count, __saved_state);
  2210. else
  2211. // No rehash, restore previous state to keep it consistent with
  2212. // container state.
  2213. _M_rehash_policy._M_reset(__saved_state);
  2214. }
  2215. template<typename _Key, typename _Value, typename _Alloc,
  2216. typename _ExtractKey, typename _Equal,
  2217. typename _Hash, typename _RangeHash, typename _Unused,
  2218. typename _RehashPolicy, typename _Traits>
  2219. void
  2220. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2221. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2222. _M_rehash(size_type __bkt_count, const __rehash_state& __state)
  2223. {
  2224. __try
  2225. {
  2226. _M_rehash_aux(__bkt_count, __unique_keys{});
  2227. }
  2228. __catch(...)
  2229. {
  2230. // A failure here means that buckets allocation failed. We only
  2231. // have to restore hash policy previous state.
  2232. _M_rehash_policy._M_reset(__state);
  2233. __throw_exception_again;
  2234. }
  2235. }
  2236. // Rehash when there is no equivalent elements.
  2237. template<typename _Key, typename _Value, typename _Alloc,
  2238. typename _ExtractKey, typename _Equal,
  2239. typename _Hash, typename _RangeHash, typename _Unused,
  2240. typename _RehashPolicy, typename _Traits>
  2241. void
  2242. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2243. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2244. _M_rehash_aux(size_type __bkt_count, true_type /* __uks */)
  2245. {
  2246. __buckets_ptr __new_buckets = _M_allocate_buckets(__bkt_count);
  2247. __node_ptr __p = _M_begin();
  2248. _M_before_begin._M_nxt = nullptr;
  2249. std::size_t __bbegin_bkt = 0;
  2250. while (__p)
  2251. {
  2252. __node_ptr __next = __p->_M_next();
  2253. std::size_t __bkt
  2254. = __hash_code_base::_M_bucket_index(*__p, __bkt_count);
  2255. if (!__new_buckets[__bkt])
  2256. {
  2257. __p->_M_nxt = _M_before_begin._M_nxt;
  2258. _M_before_begin._M_nxt = __p;
  2259. __new_buckets[__bkt] = &_M_before_begin;
  2260. if (__p->_M_nxt)
  2261. __new_buckets[__bbegin_bkt] = __p;
  2262. __bbegin_bkt = __bkt;
  2263. }
  2264. else
  2265. {
  2266. __p->_M_nxt = __new_buckets[__bkt]->_M_nxt;
  2267. __new_buckets[__bkt]->_M_nxt = __p;
  2268. }
  2269. __p = __next;
  2270. }
  2271. _M_deallocate_buckets();
  2272. _M_bucket_count = __bkt_count;
  2273. _M_buckets = __new_buckets;
  2274. }
  2275. // Rehash when there can be equivalent elements, preserve their relative
  2276. // order.
  2277. template<typename _Key, typename _Value, typename _Alloc,
  2278. typename _ExtractKey, typename _Equal,
  2279. typename _Hash, typename _RangeHash, typename _Unused,
  2280. typename _RehashPolicy, typename _Traits>
  2281. void
  2282. _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
  2283. _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>::
  2284. _M_rehash_aux(size_type __bkt_count, false_type /* __uks */)
  2285. {
  2286. __buckets_ptr __new_buckets = _M_allocate_buckets(__bkt_count);
  2287. __node_ptr __p = _M_begin();
  2288. _M_before_begin._M_nxt = nullptr;
  2289. std::size_t __bbegin_bkt = 0;
  2290. std::size_t __prev_bkt = 0;
  2291. __node_ptr __prev_p = nullptr;
  2292. bool __check_bucket = false;
  2293. while (__p)
  2294. {
  2295. __node_ptr __next = __p->_M_next();
  2296. std::size_t __bkt
  2297. = __hash_code_base::_M_bucket_index(*__p, __bkt_count);
  2298. if (__prev_p && __prev_bkt == __bkt)
  2299. {
  2300. // Previous insert was already in this bucket, we insert after
  2301. // the previously inserted one to preserve equivalent elements
  2302. // relative order.
  2303. __p->_M_nxt = __prev_p->_M_nxt;
  2304. __prev_p->_M_nxt = __p;
  2305. // Inserting after a node in a bucket require to check that we
  2306. // haven't change the bucket last node, in this case next
  2307. // bucket containing its before begin node must be updated. We
  2308. // schedule a check as soon as we move out of the sequence of
  2309. // equivalent nodes to limit the number of checks.
  2310. __check_bucket = true;
  2311. }
  2312. else
  2313. {
  2314. if (__check_bucket)
  2315. {
  2316. // Check if we shall update the next bucket because of
  2317. // insertions into __prev_bkt bucket.
  2318. if (__prev_p->_M_nxt)
  2319. {
  2320. std::size_t __next_bkt
  2321. = __hash_code_base::_M_bucket_index(
  2322. *__prev_p->_M_next(), __bkt_count);
  2323. if (__next_bkt != __prev_bkt)
  2324. __new_buckets[__next_bkt] = __prev_p;
  2325. }
  2326. __check_bucket = false;
  2327. }
  2328. if (!__new_buckets[__bkt])
  2329. {
  2330. __p->_M_nxt = _M_before_begin._M_nxt;
  2331. _M_before_begin._M_nxt = __p;
  2332. __new_buckets[__bkt] = &_M_before_begin;
  2333. if (__p->_M_nxt)
  2334. __new_buckets[__bbegin_bkt] = __p;
  2335. __bbegin_bkt = __bkt;
  2336. }
  2337. else
  2338. {
  2339. __p->_M_nxt = __new_buckets[__bkt]->_M_nxt;
  2340. __new_buckets[__bkt]->_M_nxt = __p;
  2341. }
  2342. }
  2343. __prev_p = __p;
  2344. __prev_bkt = __bkt;
  2345. __p = __next;
  2346. }
  2347. if (__check_bucket && __prev_p->_M_nxt)
  2348. {
  2349. std::size_t __next_bkt
  2350. = __hash_code_base::_M_bucket_index(*__prev_p->_M_next(),
  2351. __bkt_count);
  2352. if (__next_bkt != __prev_bkt)
  2353. __new_buckets[__next_bkt] = __prev_p;
  2354. }
  2355. _M_deallocate_buckets();
  2356. _M_bucket_count = __bkt_count;
  2357. _M_buckets = __new_buckets;
  2358. }
  2359. #if __cplusplus > 201402L
  2360. template<typename, typename, typename> class _Hash_merge_helper { };
  2361. #endif // C++17
  2362. #if __cpp_deduction_guides >= 201606
  2363. // Used to constrain deduction guides
  2364. template<typename _Hash>
  2365. using _RequireNotAllocatorOrIntegral
  2366. = __enable_if_t<!__or_<is_integral<_Hash>, __is_allocator<_Hash>>::value>;
  2367. #endif
  2368. /// @endcond
  2369. _GLIBCXX_END_NAMESPACE_VERSION
  2370. } // namespace std
  2371. #endif // _HASHTABLE_H