shared_ptr_atomic.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795
  1. // shared_ptr atomic access -*- C++ -*-
  2. // Copyright (C) 2014-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. /** @file bits/shared_ptr_atomic.h
  21. * This is an internal header file, included by other library headers.
  22. * Do not attempt to use it directly. @headername{memory}
  23. */
  24. #ifndef _SHARED_PTR_ATOMIC_H
  25. #define _SHARED_PTR_ATOMIC_H 1
  26. #include <bits/atomic_base.h>
  27. namespace std _GLIBCXX_VISIBILITY(default)
  28. {
  29. _GLIBCXX_BEGIN_NAMESPACE_VERSION
  30. /**
  31. * @addtogroup pointer_abstractions
  32. * @{
  33. */
  34. /// @relates shared_ptr @{
  35. /// @cond undocumented
  36. struct _Sp_locker
  37. {
  38. _Sp_locker(const _Sp_locker&) = delete;
  39. _Sp_locker& operator=(const _Sp_locker&) = delete;
  40. #ifdef __GTHREADS
  41. explicit
  42. _Sp_locker(const void*) noexcept;
  43. _Sp_locker(const void*, const void*) noexcept;
  44. ~_Sp_locker();
  45. private:
  46. unsigned char _M_key1;
  47. unsigned char _M_key2;
  48. #else
  49. explicit _Sp_locker(const void*, const void* = nullptr) { }
  50. #endif
  51. };
  52. /// @endcond
  53. /**
  54. * @brief Report whether shared_ptr atomic operations are lock-free.
  55. * @param __p A non-null pointer to a shared_ptr object.
  56. * @return True if atomic access to @c *__p is lock-free, false otherwise.
  57. * @{
  58. */
  59. template<typename _Tp, _Lock_policy _Lp>
  60. inline bool
  61. atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>* __p)
  62. {
  63. #ifdef __GTHREADS
  64. return __gthread_active_p() == 0;
  65. #else
  66. return true;
  67. #endif
  68. }
  69. template<typename _Tp>
  70. inline bool
  71. atomic_is_lock_free(const shared_ptr<_Tp>* __p)
  72. { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
  73. /// @}
  74. /**
  75. * @brief Atomic load for shared_ptr objects.
  76. * @param __p A non-null pointer to a shared_ptr object.
  77. * @return @c *__p
  78. *
  79. * The memory order shall not be @c memory_order_release or
  80. * @c memory_order_acq_rel.
  81. * @{
  82. */
  83. template<typename _Tp>
  84. inline shared_ptr<_Tp>
  85. atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order)
  86. {
  87. _Sp_locker __lock{__p};
  88. return *__p;
  89. }
  90. template<typename _Tp>
  91. inline shared_ptr<_Tp>
  92. atomic_load(const shared_ptr<_Tp>* __p)
  93. { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
  94. template<typename _Tp, _Lock_policy _Lp>
  95. inline __shared_ptr<_Tp, _Lp>
  96. atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order)
  97. {
  98. _Sp_locker __lock{__p};
  99. return *__p;
  100. }
  101. template<typename _Tp, _Lock_policy _Lp>
  102. inline __shared_ptr<_Tp, _Lp>
  103. atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
  104. { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
  105. /// @}
  106. /**
  107. * @brief Atomic store for shared_ptr objects.
  108. * @param __p A non-null pointer to a shared_ptr object.
  109. * @param __r The value to store.
  110. *
  111. * The memory order shall not be @c memory_order_acquire or
  112. * @c memory_order_acq_rel.
  113. * @{
  114. */
  115. template<typename _Tp>
  116. inline void
  117. atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
  118. memory_order)
  119. {
  120. _Sp_locker __lock{__p};
  121. __p->swap(__r); // use swap so that **__p not destroyed while lock held
  122. }
  123. template<typename _Tp>
  124. inline void
  125. atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
  126. { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
  127. template<typename _Tp, _Lock_policy _Lp>
  128. inline void
  129. atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
  130. __shared_ptr<_Tp, _Lp> __r,
  131. memory_order)
  132. {
  133. _Sp_locker __lock{__p};
  134. __p->swap(__r); // use swap so that **__p not destroyed while lock held
  135. }
  136. template<typename _Tp, _Lock_policy _Lp>
  137. inline void
  138. atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
  139. { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
  140. /// @}
  141. /**
  142. * @brief Atomic exchange for shared_ptr objects.
  143. * @param __p A non-null pointer to a shared_ptr object.
  144. * @param __r New value to store in @c *__p.
  145. * @return The original value of @c *__p
  146. * @{
  147. */
  148. template<typename _Tp>
  149. inline shared_ptr<_Tp>
  150. atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
  151. memory_order)
  152. {
  153. _Sp_locker __lock{__p};
  154. __p->swap(__r);
  155. return __r;
  156. }
  157. template<typename _Tp>
  158. inline shared_ptr<_Tp>
  159. atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
  160. {
  161. return std::atomic_exchange_explicit(__p, std::move(__r),
  162. memory_order_seq_cst);
  163. }
  164. template<typename _Tp, _Lock_policy _Lp>
  165. inline __shared_ptr<_Tp, _Lp>
  166. atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
  167. __shared_ptr<_Tp, _Lp> __r,
  168. memory_order)
  169. {
  170. _Sp_locker __lock{__p};
  171. __p->swap(__r);
  172. return __r;
  173. }
  174. template<typename _Tp, _Lock_policy _Lp>
  175. inline __shared_ptr<_Tp, _Lp>
  176. atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
  177. {
  178. return std::atomic_exchange_explicit(__p, std::move(__r),
  179. memory_order_seq_cst);
  180. }
  181. /// @}
  182. /**
  183. * @brief Atomic compare-and-swap for shared_ptr objects.
  184. * @param __p A non-null pointer to a shared_ptr object.
  185. * @param __v A non-null pointer to a shared_ptr object.
  186. * @param __w A non-null pointer to a shared_ptr object.
  187. * @return True if @c *__p was equivalent to @c *__v, false otherwise.
  188. *
  189. * The memory order for failure shall not be @c memory_order_release or
  190. * @c memory_order_acq_rel, or stronger than the memory order for success.
  191. * @{
  192. */
  193. template<typename _Tp>
  194. bool
  195. atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p,
  196. shared_ptr<_Tp>* __v,
  197. shared_ptr<_Tp> __w,
  198. memory_order,
  199. memory_order)
  200. {
  201. shared_ptr<_Tp> __x; // goes out of scope after __lock
  202. _Sp_locker __lock{__p, __v};
  203. owner_less<shared_ptr<_Tp>> __less;
  204. if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
  205. {
  206. __x = std::move(*__p);
  207. *__p = std::move(__w);
  208. return true;
  209. }
  210. __x = std::move(*__v);
  211. *__v = *__p;
  212. return false;
  213. }
  214. template<typename _Tp>
  215. inline bool
  216. atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
  217. shared_ptr<_Tp> __w)
  218. {
  219. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  220. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  221. }
  222. template<typename _Tp>
  223. inline bool
  224. atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
  225. shared_ptr<_Tp>* __v,
  226. shared_ptr<_Tp> __w,
  227. memory_order __success,
  228. memory_order __failure)
  229. {
  230. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  231. std::move(__w), __success, __failure);
  232. }
  233. template<typename _Tp>
  234. inline bool
  235. atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
  236. shared_ptr<_Tp> __w)
  237. {
  238. return std::atomic_compare_exchange_weak_explicit(__p, __v,
  239. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  240. }
  241. template<typename _Tp, _Lock_policy _Lp>
  242. bool
  243. atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
  244. __shared_ptr<_Tp, _Lp>* __v,
  245. __shared_ptr<_Tp, _Lp> __w,
  246. memory_order,
  247. memory_order)
  248. {
  249. __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock
  250. _Sp_locker __lock{__p, __v};
  251. owner_less<__shared_ptr<_Tp, _Lp>> __less;
  252. if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
  253. {
  254. __x = std::move(*__p);
  255. *__p = std::move(__w);
  256. return true;
  257. }
  258. __x = std::move(*__v);
  259. *__v = *__p;
  260. return false;
  261. }
  262. template<typename _Tp, _Lock_policy _Lp>
  263. inline bool
  264. atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
  265. __shared_ptr<_Tp, _Lp>* __v,
  266. __shared_ptr<_Tp, _Lp> __w)
  267. {
  268. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  269. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  270. }
  271. template<typename _Tp, _Lock_policy _Lp>
  272. inline bool
  273. atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
  274. __shared_ptr<_Tp, _Lp>* __v,
  275. __shared_ptr<_Tp, _Lp> __w,
  276. memory_order __success,
  277. memory_order __failure)
  278. {
  279. return std::atomic_compare_exchange_strong_explicit(__p, __v,
  280. std::move(__w), __success, __failure);
  281. }
  282. template<typename _Tp, _Lock_policy _Lp>
  283. inline bool
  284. atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
  285. __shared_ptr<_Tp, _Lp>* __v,
  286. __shared_ptr<_Tp, _Lp> __w)
  287. {
  288. return std::atomic_compare_exchange_weak_explicit(__p, __v,
  289. std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
  290. }
  291. /// @}
  292. #if __cplusplus >= 202002L
  293. # define __cpp_lib_atomic_shared_ptr 201711L
  294. template<typename _Tp>
  295. class atomic;
  296. template<typename _Up>
  297. static constexpr bool __is_shared_ptr = false;
  298. template<typename _Up>
  299. static constexpr bool __is_shared_ptr<shared_ptr<_Up>> = true;
  300. template<typename _Tp>
  301. class _Sp_atomic
  302. {
  303. using value_type = _Tp;
  304. friend class atomic<_Tp>;
  305. // An atomic version of __shared_count<> and __weak_count<>.
  306. // Stores a _Sp_counted_base<>* but uses the LSB as a lock.
  307. struct _Atomic_count
  308. {
  309. // Either __shared_count<> or __weak_count<>
  310. using __count_type = decltype(_Tp::_M_refcount);
  311. // _Sp_counted_base<>*
  312. using pointer = decltype(__count_type::_M_pi);
  313. // Ensure we can use the LSB as the lock bit.
  314. static_assert(alignof(remove_pointer_t<pointer>) > 1);
  315. constexpr _Atomic_count() noexcept = default;
  316. explicit
  317. _Atomic_count(__count_type&& __c) noexcept
  318. : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
  319. {
  320. __c._M_pi = nullptr;
  321. }
  322. ~_Atomic_count()
  323. {
  324. auto __val = _M_val.load(memory_order_relaxed);
  325. __glibcxx_assert(!(__val & _S_lock_bit));
  326. if (auto __pi = reinterpret_cast<pointer>(__val))
  327. {
  328. if constexpr (__is_shared_ptr<_Tp>)
  329. __pi->_M_release();
  330. else
  331. __pi->_M_weak_release();
  332. }
  333. }
  334. _Atomic_count(const _Atomic_count&) = delete;
  335. _Atomic_count& operator=(const _Atomic_count&) = delete;
  336. // Precondition: Caller does not hold lock!
  337. // Returns the raw pointer value without the lock bit set.
  338. pointer
  339. lock(memory_order __o) const noexcept
  340. {
  341. // To acquire the lock we flip the LSB from 0 to 1.
  342. auto __current = _M_val.load(memory_order_relaxed);
  343. while (__current & _S_lock_bit)
  344. {
  345. #if __cpp_lib_atomic_wait
  346. __detail::__thread_relax();
  347. #endif
  348. __current = _M_val.load(memory_order_relaxed);
  349. }
  350. while (!_M_val.compare_exchange_strong(__current,
  351. __current | _S_lock_bit,
  352. __o,
  353. memory_order_relaxed))
  354. {
  355. #if __cpp_lib_atomic_wait
  356. __detail::__thread_relax();
  357. #endif
  358. __current = __current & ~_S_lock_bit;
  359. }
  360. return reinterpret_cast<pointer>(__current);
  361. }
  362. // Precondition: caller holds lock!
  363. void
  364. unlock(memory_order __o) const noexcept
  365. {
  366. _M_val.fetch_sub(1, __o);
  367. }
  368. // Swaps the values of *this and __c, and unlocks *this.
  369. // Precondition: caller holds lock!
  370. void
  371. _M_swap_unlock(__count_type& __c, memory_order __o) noexcept
  372. {
  373. if (__o != memory_order_seq_cst)
  374. __o = memory_order_release;
  375. auto __x = reinterpret_cast<uintptr_t>(__c._M_pi);
  376. __x = _M_val.exchange(__x, __o);
  377. __c._M_pi = reinterpret_cast<pointer>(__x & ~_S_lock_bit);
  378. }
  379. #if __cpp_lib_atomic_wait
  380. // Precondition: caller holds lock!
  381. void
  382. _M_wait_unlock(memory_order __o) const noexcept
  383. {
  384. auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
  385. _M_val.wait(__v & ~_S_lock_bit, __o);
  386. }
  387. void
  388. notify_one() noexcept
  389. {
  390. _M_val.notify_one();
  391. }
  392. void
  393. notify_all() noexcept
  394. {
  395. _M_val.notify_all();
  396. }
  397. #endif
  398. private:
  399. mutable __atomic_base<uintptr_t> _M_val{0};
  400. static constexpr uintptr_t _S_lock_bit{1};
  401. };
  402. typename _Tp::element_type* _M_ptr = nullptr;
  403. _Atomic_count _M_refcount;
  404. static typename _Atomic_count::pointer
  405. _S_add_ref(typename _Atomic_count::pointer __p)
  406. {
  407. if (__p)
  408. {
  409. if constexpr (__is_shared_ptr<_Tp>)
  410. __p->_M_add_ref_copy();
  411. else
  412. __p->_M_weak_add_ref();
  413. }
  414. return __p;
  415. }
  416. constexpr _Sp_atomic() noexcept = default;
  417. explicit
  418. _Sp_atomic(value_type __r) noexcept
  419. : _M_ptr(__r._M_ptr), _M_refcount(std::move(__r._M_refcount))
  420. { }
  421. ~_Sp_atomic() = default;
  422. _Sp_atomic(const _Sp_atomic&) = delete;
  423. void operator=(const _Sp_atomic&) = delete;
  424. value_type
  425. load(memory_order __o) const noexcept
  426. {
  427. __glibcxx_assert(__o != memory_order_release
  428. && __o != memory_order_acq_rel);
  429. // Ensure that the correct value of _M_ptr is visible after locking.,
  430. // by upgrading relaxed or consume to acquire.
  431. if (__o != memory_order_seq_cst)
  432. __o = memory_order_acquire;
  433. value_type __ret;
  434. auto __pi = _M_refcount.lock(__o);
  435. __ret._M_ptr = _M_ptr;
  436. __ret._M_refcount._M_pi = _S_add_ref(__pi);
  437. _M_refcount.unlock(memory_order_relaxed);
  438. return __ret;
  439. }
  440. void
  441. swap(value_type& __r, memory_order __o) noexcept
  442. {
  443. _M_refcount.lock(memory_order_acquire);
  444. std::swap(_M_ptr, __r._M_ptr);
  445. _M_refcount._M_swap_unlock(__r._M_refcount, __o);
  446. }
  447. bool
  448. compare_exchange_strong(value_type& __expected, value_type __desired,
  449. memory_order __o, memory_order __o2) noexcept
  450. {
  451. bool __result = true;
  452. auto __pi = _M_refcount.lock(memory_order_acquire);
  453. if (_M_ptr == __expected._M_ptr
  454. && __pi == __expected._M_refcount._M_pi)
  455. {
  456. _M_ptr = __desired._M_ptr;
  457. _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
  458. }
  459. else
  460. {
  461. _Tp __sink = std::move(__expected);
  462. __expected._M_ptr = _M_ptr;
  463. __expected._M_refcount._M_pi = _S_add_ref(__pi);
  464. _M_refcount.unlock(__o2);
  465. __result = false;
  466. }
  467. return __result;
  468. }
  469. #if __cpp_lib_atomic_wait
  470. void
  471. wait(value_type __old, memory_order __o) const noexcept
  472. {
  473. auto __pi = _M_refcount.lock(memory_order_acquire);
  474. if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
  475. _M_refcount._M_wait_unlock(__o);
  476. else
  477. _M_refcount.unlock(memory_order_relaxed);
  478. }
  479. void
  480. notify_one() noexcept
  481. {
  482. _M_refcount.notify_one();
  483. }
  484. void
  485. notify_all() noexcept
  486. {
  487. _M_refcount.notify_all();
  488. }
  489. #endif
  490. };
  491. template<typename _Tp>
  492. class atomic<shared_ptr<_Tp>>
  493. {
  494. public:
  495. using value_type = shared_ptr<_Tp>;
  496. static constexpr bool is_always_lock_free = false;
  497. bool
  498. is_lock_free() const noexcept
  499. { return false; }
  500. constexpr atomic() noexcept = default;
  501. atomic(shared_ptr<_Tp> __r) noexcept
  502. : _M_impl(std::move(__r))
  503. { }
  504. atomic(const atomic&) = delete;
  505. void operator=(const atomic&) = delete;
  506. shared_ptr<_Tp>
  507. load(memory_order __o = memory_order_seq_cst) const noexcept
  508. { return _M_impl.load(__o); }
  509. operator shared_ptr<_Tp>() const noexcept
  510. { return _M_impl.load(memory_order_seq_cst); }
  511. void
  512. store(shared_ptr<_Tp> __desired,
  513. memory_order __o = memory_order_seq_cst) noexcept
  514. { _M_impl.swap(__desired, __o); }
  515. void
  516. operator=(shared_ptr<_Tp> __desired) noexcept
  517. { _M_impl.swap(__desired, memory_order_seq_cst); }
  518. shared_ptr<_Tp>
  519. exchange(shared_ptr<_Tp> __desired,
  520. memory_order __o = memory_order_seq_cst) noexcept
  521. {
  522. _M_impl.swap(__desired, __o);
  523. return __desired;
  524. }
  525. bool
  526. compare_exchange_strong(shared_ptr<_Tp>& __expected,
  527. shared_ptr<_Tp> __desired,
  528. memory_order __o, memory_order __o2) noexcept
  529. {
  530. return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
  531. }
  532. bool
  533. compare_exchange_strong(value_type& __expected, value_type __desired,
  534. memory_order __o = memory_order_seq_cst) noexcept
  535. {
  536. memory_order __o2;
  537. switch (__o)
  538. {
  539. case memory_order_acq_rel:
  540. __o2 = memory_order_acquire;
  541. break;
  542. case memory_order_release:
  543. __o2 = memory_order_relaxed;
  544. break;
  545. default:
  546. __o2 = __o;
  547. }
  548. return compare_exchange_strong(__expected, std::move(__desired),
  549. __o, __o2);
  550. }
  551. bool
  552. compare_exchange_weak(value_type& __expected, value_type __desired,
  553. memory_order __o, memory_order __o2) noexcept
  554. {
  555. return compare_exchange_strong(__expected, std::move(__desired),
  556. __o, __o2);
  557. }
  558. bool
  559. compare_exchange_weak(value_type& __expected, value_type __desired,
  560. memory_order __o = memory_order_seq_cst) noexcept
  561. {
  562. return compare_exchange_strong(__expected, std::move(__desired), __o);
  563. }
  564. #if __cpp_lib_atomic_wait
  565. void
  566. wait(value_type __old,
  567. memory_order __o = memory_order_seq_cst) const noexcept
  568. {
  569. _M_impl.wait(std::move(__old), __o);
  570. }
  571. void
  572. notify_one() noexcept
  573. {
  574. _M_impl.notify_one();
  575. }
  576. void
  577. notify_all() noexcept
  578. {
  579. _M_impl.notify_all();
  580. }
  581. #endif
  582. private:
  583. _Sp_atomic<shared_ptr<_Tp>> _M_impl;
  584. };
  585. template<typename _Tp>
  586. class atomic<weak_ptr<_Tp>>
  587. {
  588. public:
  589. using value_type = weak_ptr<_Tp>;
  590. static constexpr bool is_always_lock_free = false;
  591. bool
  592. is_lock_free() const noexcept
  593. { return false; }
  594. constexpr atomic() noexcept = default;
  595. atomic(weak_ptr<_Tp> __r) noexcept
  596. : _M_impl(move(__r))
  597. { }
  598. atomic(const atomic&) = delete;
  599. void operator=(const atomic&) = delete;
  600. weak_ptr<_Tp>
  601. load(memory_order __o = memory_order_seq_cst) const noexcept
  602. { return _M_impl.load(__o); }
  603. operator weak_ptr<_Tp>() const noexcept
  604. { return _M_impl.load(memory_order_seq_cst); }
  605. void
  606. store(weak_ptr<_Tp> __desired,
  607. memory_order __o = memory_order_seq_cst) noexcept
  608. { _M_impl.swap(__desired, __o); }
  609. void
  610. operator=(weak_ptr<_Tp> __desired) noexcept
  611. { _M_impl.swap(__desired, memory_order_seq_cst); }
  612. weak_ptr<_Tp>
  613. exchange(weak_ptr<_Tp> __desired,
  614. memory_order __o = memory_order_seq_cst) noexcept
  615. {
  616. _M_impl.swap(__desired, __o);
  617. return __desired;
  618. }
  619. bool
  620. compare_exchange_strong(weak_ptr<_Tp>& __expected,
  621. weak_ptr<_Tp> __desired,
  622. memory_order __o, memory_order __o2) noexcept
  623. {
  624. return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
  625. }
  626. bool
  627. compare_exchange_strong(value_type& __expected, value_type __desired,
  628. memory_order __o = memory_order_seq_cst) noexcept
  629. {
  630. memory_order __o2;
  631. switch (__o)
  632. {
  633. case memory_order_acq_rel:
  634. __o2 = memory_order_acquire;
  635. break;
  636. case memory_order_release:
  637. __o2 = memory_order_relaxed;
  638. break;
  639. default:
  640. __o2 = __o;
  641. }
  642. return compare_exchange_strong(__expected, std::move(__desired),
  643. __o, __o2);
  644. }
  645. bool
  646. compare_exchange_weak(value_type& __expected, value_type __desired,
  647. memory_order __o, memory_order __o2) noexcept
  648. {
  649. return compare_exchange_strong(__expected, std::move(__desired),
  650. __o, __o2);
  651. }
  652. bool
  653. compare_exchange_weak(value_type& __expected, value_type __desired,
  654. memory_order __o = memory_order_seq_cst) noexcept
  655. {
  656. return compare_exchange_strong(__expected, std::move(__desired), __o);
  657. }
  658. #if __cpp_lib_atomic_wait
  659. void
  660. wait(value_type __old,
  661. memory_order __o = memory_order_seq_cst) const noexcept
  662. {
  663. _M_impl.wait(std::move(__old), __o);
  664. }
  665. void
  666. notify_one() noexcept
  667. {
  668. _M_impl.notify_one();
  669. }
  670. void
  671. notify_all() noexcept
  672. {
  673. _M_impl.notify_all();
  674. }
  675. #endif
  676. private:
  677. _Sp_atomic<weak_ptr<_Tp>> _M_impl;
  678. };
  679. #endif // C++20
  680. /// @} relates shared_ptr
  681. /// @} group pointer_abstractions
  682. _GLIBCXX_END_NAMESPACE_VERSION
  683. } // namespace
  684. #endif // _SHARED_PTR_ATOMIC_H