local_atomic 58 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008-2022 Free Software Foundation, Inc.
  3. //
  4. // This file is part of the GNU ISO C++ Library. This library is free
  5. // software; you can redistribute it and/or modify it under the
  6. // terms of the GNU General Public License as published by the
  7. // Free Software Foundation; either version 3, or (at your option)
  8. // any later version.
  9. // This library is distributed in the hope that it will be useful,
  10. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. // GNU General Public License for more details.
  13. // Under Section 7 of GPL version 3, you are granted additional
  14. // permissions described in the GCC Runtime Library Exception, version
  15. // 3.1, as published by the Free Software Foundation.
  16. // You should have received a copy of the GNU General Public License and
  17. // a copy of the GCC Runtime Library Exception along with this program;
  18. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  19. // <http://www.gnu.org/licenses/>.
  20. // ????????????????????????????????????????????????????????????????????
  21. //
  22. // This is a copy of the libstdc++ header, with the trivial modification
  23. // of ignoring the c++config.h include. If and when the top-level build is
  24. // fixed so that target libraries can be built using the newly built, we can
  25. // delete this file.
  26. //
  27. // ????????????????????????????????????????????????????????????????????
  28. /** @file include/atomic
  29. * This is a Standard C++ Library header.
  30. */
  31. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  32. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  33. #ifndef _GLIBCXX_ATOMIC
  34. #define _GLIBCXX_ATOMIC 1
  35. #define __libitm_always_inline __attribute__((always_inline))
  36. // #pragma GCC system_header
  37. // #ifndef __GXX_EXPERIMENTAL_CXX0X__
  38. // # include <bits/c++0x_warning.h>
  39. // #endif
  40. // #include <bits/atomic_base.h>
  41. namespace std // _GLIBCXX_VISIBILITY(default)
  42. {
  43. // _GLIBCXX_BEGIN_NAMESPACE_VERSION
  44. /**
  45. * @defgroup atomics Atomics
  46. *
  47. * Components for performing atomic operations.
  48. * @{
  49. */
  50. /// Enumeration for memory_order
  51. typedef enum memory_order
  52. {
  53. memory_order_relaxed,
  54. memory_order_consume,
  55. memory_order_acquire,
  56. memory_order_release,
  57. memory_order_acq_rel,
  58. memory_order_seq_cst
  59. } memory_order;
  60. inline __libitm_always_inline memory_order
  61. __calculate_memory_order(memory_order __m) noexcept
  62. {
  63. const bool __cond1 = __m == memory_order_release;
  64. const bool __cond2 = __m == memory_order_acq_rel;
  65. memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
  66. memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
  67. return __mo2;
  68. }
  69. inline __libitm_always_inline void
  70. atomic_thread_fence(memory_order __m) noexcept
  71. {
  72. __atomic_thread_fence (__m);
  73. }
  74. inline __libitm_always_inline void
  75. atomic_signal_fence(memory_order __m) noexcept
  76. {
  77. __atomic_thread_fence (__m);
  78. }
  79. /// kill_dependency
  80. template<typename _Tp>
  81. inline _Tp
  82. kill_dependency(_Tp __y) noexcept
  83. {
  84. _Tp __ret(__y);
  85. return __ret;
  86. }
  87. /// Lock-free Property
  88. #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  89. #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  90. #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  91. #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  92. #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  93. #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  94. #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  95. #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  96. #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  97. #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  98. // Base types for atomics.
  99. template<typename _IntTp>
  100. struct __atomic_base;
  101. /// atomic_char
  102. typedef __atomic_base<char> atomic_char;
  103. /// atomic_schar
  104. typedef __atomic_base<signed char> atomic_schar;
  105. /// atomic_uchar
  106. typedef __atomic_base<unsigned char> atomic_uchar;
  107. /// atomic_short
  108. typedef __atomic_base<short> atomic_short;
  109. /// atomic_ushort
  110. typedef __atomic_base<unsigned short> atomic_ushort;
  111. /// atomic_int
  112. typedef __atomic_base<int> atomic_int;
  113. /// atomic_uint
  114. typedef __atomic_base<unsigned int> atomic_uint;
  115. /// atomic_long
  116. typedef __atomic_base<long> atomic_long;
  117. /// atomic_ulong
  118. typedef __atomic_base<unsigned long> atomic_ulong;
  119. /// atomic_llong
  120. typedef __atomic_base<long long> atomic_llong;
  121. /// atomic_ullong
  122. typedef __atomic_base<unsigned long long> atomic_ullong;
  123. /// atomic_wchar_t
  124. typedef __atomic_base<wchar_t> atomic_wchar_t;
  125. /// atomic_char16_t
  126. typedef __atomic_base<char16_t> atomic_char16_t;
  127. /// atomic_char32_t
  128. typedef __atomic_base<char32_t> atomic_char32_t;
  129. /// atomic_char32_t
  130. typedef __atomic_base<char32_t> atomic_char32_t;
  131. /// atomic_int_least8_t
  132. typedef __atomic_base<int_least8_t> atomic_int_least8_t;
  133. /// atomic_uint_least8_t
  134. typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
  135. /// atomic_int_least16_t
  136. typedef __atomic_base<int_least16_t> atomic_int_least16_t;
  137. /// atomic_uint_least16_t
  138. typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
  139. /// atomic_int_least32_t
  140. typedef __atomic_base<int_least32_t> atomic_int_least32_t;
  141. /// atomic_uint_least32_t
  142. typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
  143. /// atomic_int_least64_t
  144. typedef __atomic_base<int_least64_t> atomic_int_least64_t;
  145. /// atomic_uint_least64_t
  146. typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
  147. /// atomic_int_fast8_t
  148. typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
  149. /// atomic_uint_fast8_t
  150. typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
  151. /// atomic_int_fast16_t
  152. typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
  153. /// atomic_uint_fast16_t
  154. typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
  155. /// atomic_int_fast32_t
  156. typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
  157. /// atomic_uint_fast32_t
  158. typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
  159. /// atomic_int_fast64_t
  160. typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
  161. /// atomic_uint_fast64_t
  162. typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
  163. /// atomic_intptr_t
  164. typedef __atomic_base<intptr_t> atomic_intptr_t;
  165. /// atomic_uintptr_t
  166. typedef __atomic_base<uintptr_t> atomic_uintptr_t;
  167. /// atomic_size_t
  168. typedef __atomic_base<size_t> atomic_size_t;
  169. /// atomic_intmax_t
  170. typedef __atomic_base<intmax_t> atomic_intmax_t;
  171. /// atomic_uintmax_t
  172. typedef __atomic_base<uintmax_t> atomic_uintmax_t;
  173. /// atomic_ptrdiff_t
  174. typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
  175. #define ATOMIC_VAR_INIT(_VI) { _VI }
  176. template<typename _Tp>
  177. struct atomic;
  178. template<typename _Tp>
  179. struct atomic<_Tp*>;
  180. /**
  181. * @brief Base type for atomic_flag.
  182. *
  183. * Base type is POD with data, allowing atomic_flag to derive from
  184. * it and meet the standard layout type requirement. In addition to
  185. * compatibilty with a C interface, this allows different
  186. * implementations of atomic_flag to use the same atomic operation
  187. * functions, via a standard conversion to the __atomic_flag_base
  188. * argument.
  189. */
  190. // _GLIBCXX_BEGIN_EXTERN_C
  191. struct __atomic_flag_base
  192. {
  193. bool _M_i;
  194. };
  195. // _GLIBCXX_END_EXTERN_C
  196. #define ATOMIC_FLAG_INIT { false }
  197. /// atomic_flag
  198. struct atomic_flag : public __atomic_flag_base
  199. {
  200. atomic_flag() noexcept = default;
  201. ~atomic_flag() noexcept = default;
  202. atomic_flag(const atomic_flag&) = delete;
  203. atomic_flag& operator=(const atomic_flag&) = delete;
  204. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  205. // Conversion to ATOMIC_FLAG_INIT.
  206. atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
  207. __libitm_always_inline bool
  208. test_and_set(memory_order __m = memory_order_seq_cst) noexcept
  209. {
  210. return __atomic_test_and_set (&_M_i, __m);
  211. }
  212. __libitm_always_inline bool
  213. test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
  214. {
  215. return __atomic_test_and_set (&_M_i, __m);
  216. }
  217. __libitm_always_inline void
  218. clear(memory_order __m = memory_order_seq_cst) noexcept
  219. {
  220. // __glibcxx_assert(__m != memory_order_consume);
  221. // __glibcxx_assert(__m != memory_order_acquire);
  222. // __glibcxx_assert(__m != memory_order_acq_rel);
  223. __atomic_clear (&_M_i, __m);
  224. }
  225. __libitm_always_inline void
  226. clear(memory_order __m = memory_order_seq_cst) volatile noexcept
  227. {
  228. // __glibcxx_assert(__m != memory_order_consume);
  229. // __glibcxx_assert(__m != memory_order_acquire);
  230. // __glibcxx_assert(__m != memory_order_acq_rel);
  231. __atomic_clear (&_M_i, __m);
  232. }
  233. };
  234. /// Base class for atomic integrals.
  235. //
  236. // For each of the integral types, define atomic_[integral type] struct
  237. //
  238. // atomic_bool bool
  239. // atomic_char char
  240. // atomic_schar signed char
  241. // atomic_uchar unsigned char
  242. // atomic_short short
  243. // atomic_ushort unsigned short
  244. // atomic_int int
  245. // atomic_uint unsigned int
  246. // atomic_long long
  247. // atomic_ulong unsigned long
  248. // atomic_llong long long
  249. // atomic_ullong unsigned long long
  250. // atomic_char16_t char16_t
  251. // atomic_char32_t char32_t
  252. // atomic_wchar_t wchar_t
  253. //
  254. // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
  255. // 8 bytes, since that is what GCC built-in functions for atomic
  256. // memory access expect.
  257. template<typename _ITp>
  258. struct __atomic_base
  259. {
  260. private:
  261. typedef _ITp __int_type;
  262. __int_type _M_i;
  263. public:
  264. __atomic_base() noexcept = default;
  265. ~__atomic_base() noexcept = default;
  266. __atomic_base(const __atomic_base&) = delete;
  267. __atomic_base& operator=(const __atomic_base&) = delete;
  268. __atomic_base& operator=(const __atomic_base&) volatile = delete;
  269. // Requires __int_type convertible to _M_i.
  270. constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
  271. operator __int_type() const noexcept
  272. { return load(); }
  273. operator __int_type() const volatile noexcept
  274. { return load(); }
  275. __int_type
  276. operator=(__int_type __i) noexcept
  277. {
  278. store(__i);
  279. return __i;
  280. }
  281. __int_type
  282. operator=(__int_type __i) volatile noexcept
  283. {
  284. store(__i);
  285. return __i;
  286. }
  287. __int_type
  288. operator++(int) noexcept
  289. { return fetch_add(1); }
  290. __int_type
  291. operator++(int) volatile noexcept
  292. { return fetch_add(1); }
  293. __int_type
  294. operator--(int) noexcept
  295. { return fetch_sub(1); }
  296. __int_type
  297. operator--(int) volatile noexcept
  298. { return fetch_sub(1); }
  299. __int_type
  300. operator++() noexcept
  301. { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  302. __int_type
  303. operator++() volatile noexcept
  304. { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
  305. __int_type
  306. operator--() noexcept
  307. { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  308. __int_type
  309. operator--() volatile noexcept
  310. { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
  311. __int_type
  312. operator+=(__int_type __i) noexcept
  313. { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  314. __int_type
  315. operator+=(__int_type __i) volatile noexcept
  316. { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
  317. __int_type
  318. operator-=(__int_type __i) noexcept
  319. { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  320. __int_type
  321. operator-=(__int_type __i) volatile noexcept
  322. { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
  323. __int_type
  324. operator&=(__int_type __i) noexcept
  325. { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  326. __int_type
  327. operator&=(__int_type __i) volatile noexcept
  328. { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
  329. __int_type
  330. operator|=(__int_type __i) noexcept
  331. { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  332. __int_type
  333. operator|=(__int_type __i) volatile noexcept
  334. { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
  335. __int_type
  336. operator^=(__int_type __i) noexcept
  337. { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  338. __int_type
  339. operator^=(__int_type __i) volatile noexcept
  340. { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
  341. bool
  342. is_lock_free() const noexcept
  343. { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
  344. bool
  345. is_lock_free() const volatile noexcept
  346. { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
  347. __libitm_always_inline void
  348. store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
  349. {
  350. // __glibcxx_assert(__m != memory_order_acquire);
  351. // __glibcxx_assert(__m != memory_order_acq_rel);
  352. // __glibcxx_assert(__m != memory_order_consume);
  353. __atomic_store_n(&_M_i, __i, __m);
  354. }
  355. __libitm_always_inline void
  356. store(__int_type __i,
  357. memory_order __m = memory_order_seq_cst) volatile noexcept
  358. {
  359. // __glibcxx_assert(__m != memory_order_acquire);
  360. // __glibcxx_assert(__m != memory_order_acq_rel);
  361. // __glibcxx_assert(__m != memory_order_consume);
  362. __atomic_store_n(&_M_i, __i, __m);
  363. }
  364. __libitm_always_inline __int_type
  365. load(memory_order __m = memory_order_seq_cst) const noexcept
  366. {
  367. // __glibcxx_assert(__m != memory_order_release);
  368. // __glibcxx_assert(__m != memory_order_acq_rel);
  369. return __atomic_load_n(&_M_i, __m);
  370. }
  371. __libitm_always_inline __int_type
  372. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  373. {
  374. // __glibcxx_assert(__m != memory_order_release);
  375. // __glibcxx_assert(__m != memory_order_acq_rel);
  376. return __atomic_load_n(&_M_i, __m);
  377. }
  378. __libitm_always_inline __int_type
  379. exchange(__int_type __i,
  380. memory_order __m = memory_order_seq_cst) noexcept
  381. {
  382. return __atomic_exchange_n(&_M_i, __i, __m);
  383. }
  384. __libitm_always_inline __int_type
  385. exchange(__int_type __i,
  386. memory_order __m = memory_order_seq_cst) volatile noexcept
  387. {
  388. return __atomic_exchange_n(&_M_i, __i, __m);
  389. }
  390. __libitm_always_inline bool
  391. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  392. memory_order __m1, memory_order __m2) noexcept
  393. {
  394. // __glibcxx_assert(__m2 != memory_order_release);
  395. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  396. // __glibcxx_assert(__m2 <= __m1);
  397. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
  398. }
  399. __libitm_always_inline bool
  400. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  401. memory_order __m1,
  402. memory_order __m2) volatile noexcept
  403. {
  404. // __glibcxx_assert(__m2 != memory_order_release);
  405. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  406. // __glibcxx_assert(__m2 <= __m1);
  407. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
  408. }
  409. __libitm_always_inline bool
  410. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  411. memory_order __m = memory_order_seq_cst) noexcept
  412. {
  413. return compare_exchange_weak(__i1, __i2, __m,
  414. __calculate_memory_order(__m));
  415. }
  416. __libitm_always_inline bool
  417. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  418. memory_order __m = memory_order_seq_cst) volatile noexcept
  419. {
  420. return compare_exchange_weak(__i1, __i2, __m,
  421. __calculate_memory_order(__m));
  422. }
  423. __libitm_always_inline bool
  424. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  425. memory_order __m1, memory_order __m2) noexcept
  426. {
  427. // __glibcxx_assert(__m2 != memory_order_release);
  428. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  429. // __glibcxx_assert(__m2 <= __m1);
  430. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
  431. }
  432. __libitm_always_inline bool
  433. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  434. memory_order __m1,
  435. memory_order __m2) volatile noexcept
  436. {
  437. // __glibcxx_assert(__m2 != memory_order_release);
  438. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  439. // __glibcxx_assert(__m2 <= __m1);
  440. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
  441. }
  442. __libitm_always_inline bool
  443. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  444. memory_order __m = memory_order_seq_cst) noexcept
  445. {
  446. return compare_exchange_strong(__i1, __i2, __m,
  447. __calculate_memory_order(__m));
  448. }
  449. __libitm_always_inline bool
  450. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  451. memory_order __m = memory_order_seq_cst) volatile noexcept
  452. {
  453. return compare_exchange_strong(__i1, __i2, __m,
  454. __calculate_memory_order(__m));
  455. }
  456. __libitm_always_inline __int_type
  457. fetch_add(__int_type __i,
  458. memory_order __m = memory_order_seq_cst) noexcept
  459. { return __atomic_fetch_add(&_M_i, __i, __m); }
  460. __libitm_always_inline __int_type
  461. fetch_add(__int_type __i,
  462. memory_order __m = memory_order_seq_cst) volatile noexcept
  463. { return __atomic_fetch_add(&_M_i, __i, __m); }
  464. __libitm_always_inline __int_type
  465. fetch_sub(__int_type __i,
  466. memory_order __m = memory_order_seq_cst) noexcept
  467. { return __atomic_fetch_sub(&_M_i, __i, __m); }
  468. __libitm_always_inline __int_type
  469. fetch_sub(__int_type __i,
  470. memory_order __m = memory_order_seq_cst) volatile noexcept
  471. { return __atomic_fetch_sub(&_M_i, __i, __m); }
  472. __libitm_always_inline __int_type
  473. fetch_and(__int_type __i,
  474. memory_order __m = memory_order_seq_cst) noexcept
  475. { return __atomic_fetch_and(&_M_i, __i, __m); }
  476. __libitm_always_inline __int_type
  477. fetch_and(__int_type __i,
  478. memory_order __m = memory_order_seq_cst) volatile noexcept
  479. { return __atomic_fetch_and(&_M_i, __i, __m); }
  480. __libitm_always_inline __int_type
  481. fetch_or(__int_type __i,
  482. memory_order __m = memory_order_seq_cst) noexcept
  483. { return __atomic_fetch_or(&_M_i, __i, __m); }
  484. __libitm_always_inline __int_type
  485. fetch_or(__int_type __i,
  486. memory_order __m = memory_order_seq_cst) volatile noexcept
  487. { return __atomic_fetch_or(&_M_i, __i, __m); }
  488. __libitm_always_inline __int_type
  489. fetch_xor(__int_type __i,
  490. memory_order __m = memory_order_seq_cst) noexcept
  491. { return __atomic_fetch_xor(&_M_i, __i, __m); }
  492. __libitm_always_inline __int_type
  493. fetch_xor(__int_type __i,
  494. memory_order __m = memory_order_seq_cst) volatile noexcept
  495. { return __atomic_fetch_xor(&_M_i, __i, __m); }
  496. };
  497. /// Partial specialization for pointer types.
  498. template<typename _PTp>
  499. struct __atomic_base<_PTp*>
  500. {
  501. private:
  502. typedef _PTp* __pointer_type;
  503. __pointer_type _M_p;
  504. public:
  505. __atomic_base() noexcept = default;
  506. ~__atomic_base() noexcept = default;
  507. __atomic_base(const __atomic_base&) = delete;
  508. __atomic_base& operator=(const __atomic_base&) = delete;
  509. __atomic_base& operator=(const __atomic_base&) volatile = delete;
  510. // Requires __pointer_type convertible to _M_p.
  511. constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
  512. operator __pointer_type() const noexcept
  513. { return load(); }
  514. operator __pointer_type() const volatile noexcept
  515. { return load(); }
  516. __pointer_type
  517. operator=(__pointer_type __p) noexcept
  518. {
  519. store(__p);
  520. return __p;
  521. }
  522. __pointer_type
  523. operator=(__pointer_type __p) volatile noexcept
  524. {
  525. store(__p);
  526. return __p;
  527. }
  528. __pointer_type
  529. operator++(int) noexcept
  530. { return fetch_add(1); }
  531. __pointer_type
  532. operator++(int) volatile noexcept
  533. { return fetch_add(1); }
  534. __pointer_type
  535. operator--(int) noexcept
  536. { return fetch_sub(1); }
  537. __pointer_type
  538. operator--(int) volatile noexcept
  539. { return fetch_sub(1); }
  540. __pointer_type
  541. operator++() noexcept
  542. { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  543. __pointer_type
  544. operator++() volatile noexcept
  545. { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
  546. __pointer_type
  547. operator--() noexcept
  548. { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  549. __pointer_type
  550. operator--() volatile noexcept
  551. { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
  552. __pointer_type
  553. operator+=(ptrdiff_t __d) noexcept
  554. { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  555. __pointer_type
  556. operator+=(ptrdiff_t __d) volatile noexcept
  557. { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
  558. __pointer_type
  559. operator-=(ptrdiff_t __d) noexcept
  560. { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  561. __pointer_type
  562. operator-=(ptrdiff_t __d) volatile noexcept
  563. { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
  564. bool
  565. is_lock_free() const noexcept
  566. { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
  567. bool
  568. is_lock_free() const volatile noexcept
  569. { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
  570. __libitm_always_inline void
  571. store(__pointer_type __p,
  572. memory_order __m = memory_order_seq_cst) noexcept
  573. {
  574. // __glibcxx_assert(__m != memory_order_acquire);
  575. // __glibcxx_assert(__m != memory_order_acq_rel);
  576. // __glibcxx_assert(__m != memory_order_consume);
  577. __atomic_store_n(&_M_p, __p, __m);
  578. }
  579. __libitm_always_inline void
  580. store(__pointer_type __p,
  581. memory_order __m = memory_order_seq_cst) volatile noexcept
  582. {
  583. // __glibcxx_assert(__m != memory_order_acquire);
  584. // __glibcxx_assert(__m != memory_order_acq_rel);
  585. // __glibcxx_assert(__m != memory_order_consume);
  586. __atomic_store_n(&_M_p, __p, __m);
  587. }
  588. __libitm_always_inline __pointer_type
  589. load(memory_order __m = memory_order_seq_cst) const noexcept
  590. {
  591. // __glibcxx_assert(__m != memory_order_release);
  592. // __glibcxx_assert(__m != memory_order_acq_rel);
  593. return __atomic_load_n(&_M_p, __m);
  594. }
  595. __libitm_always_inline __pointer_type
  596. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  597. {
  598. // __glibcxx_assert(__m != memory_order_release);
  599. // __glibcxx_assert(__m != memory_order_acq_rel);
  600. return __atomic_load_n(&_M_p, __m);
  601. }
  602. __libitm_always_inline __pointer_type
  603. exchange(__pointer_type __p,
  604. memory_order __m = memory_order_seq_cst) noexcept
  605. {
  606. return __atomic_exchange_n(&_M_p, __p, __m);
  607. }
  608. __libitm_always_inline __pointer_type
  609. exchange(__pointer_type __p,
  610. memory_order __m = memory_order_seq_cst) volatile noexcept
  611. {
  612. return __atomic_exchange_n(&_M_p, __p, __m);
  613. }
  614. __libitm_always_inline bool
  615. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  616. memory_order __m1,
  617. memory_order __m2) noexcept
  618. {
  619. // __glibcxx_assert(__m2 != memory_order_release);
  620. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  621. // __glibcxx_assert(__m2 <= __m1);
  622. return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
  623. }
  624. __libitm_always_inline bool
  625. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  626. memory_order __m1,
  627. memory_order __m2) volatile noexcept
  628. {
  629. // __glibcxx_assert(__m2 != memory_order_release);
  630. // __glibcxx_assert(__m2 != memory_order_acq_rel);
  631. // __glibcxx_assert(__m2 <= __m1);
  632. return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
  633. }
  634. __libitm_always_inline __pointer_type
  635. fetch_add(ptrdiff_t __d,
  636. memory_order __m = memory_order_seq_cst) noexcept
  637. { return __atomic_fetch_add(&_M_p, __d, __m); }
  638. __libitm_always_inline __pointer_type
  639. fetch_add(ptrdiff_t __d,
  640. memory_order __m = memory_order_seq_cst) volatile noexcept
  641. { return __atomic_fetch_add(&_M_p, __d, __m); }
  642. __libitm_always_inline __pointer_type
  643. fetch_sub(ptrdiff_t __d,
  644. memory_order __m = memory_order_seq_cst) noexcept
  645. { return __atomic_fetch_sub(&_M_p, __d, __m); }
  646. __libitm_always_inline __pointer_type
  647. fetch_sub(ptrdiff_t __d,
  648. memory_order __m = memory_order_seq_cst) volatile noexcept
  649. { return __atomic_fetch_sub(&_M_p, __d, __m); }
  650. };
  651. /**
  652. * @addtogroup atomics
  653. * @{
  654. */
  655. /// atomic_bool
  656. // NB: No operators or fetch-operations for this type.
  657. struct atomic_bool
  658. {
  659. private:
  660. __atomic_base<bool> _M_base;
  661. public:
  662. atomic_bool() noexcept = default;
  663. ~atomic_bool() noexcept = default;
  664. atomic_bool(const atomic_bool&) = delete;
  665. atomic_bool& operator=(const atomic_bool&) = delete;
  666. atomic_bool& operator=(const atomic_bool&) volatile = delete;
  667. constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
  668. bool
  669. operator=(bool __i) noexcept
  670. { return _M_base.operator=(__i); }
  671. operator bool() const noexcept
  672. { return _M_base.load(); }
  673. operator bool() const volatile noexcept
  674. { return _M_base.load(); }
  675. bool
  676. is_lock_free() const noexcept { return _M_base.is_lock_free(); }
  677. bool
  678. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  679. __libitm_always_inline void
  680. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  681. { _M_base.store(__i, __m); }
  682. __libitm_always_inline void
  683. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  684. { _M_base.store(__i, __m); }
  685. __libitm_always_inline bool
  686. load(memory_order __m = memory_order_seq_cst) const noexcept
  687. { return _M_base.load(__m); }
  688. __libitm_always_inline bool
  689. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  690. { return _M_base.load(__m); }
  691. __libitm_always_inline bool
  692. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  693. { return _M_base.exchange(__i, __m); }
  694. __libitm_always_inline bool
  695. exchange(bool __i,
  696. memory_order __m = memory_order_seq_cst) volatile noexcept
  697. { return _M_base.exchange(__i, __m); }
  698. __libitm_always_inline bool
  699. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  700. memory_order __m2) noexcept
  701. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  702. __libitm_always_inline bool
  703. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  704. memory_order __m2) volatile noexcept
  705. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  706. __libitm_always_inline bool
  707. compare_exchange_weak(bool& __i1, bool __i2,
  708. memory_order __m = memory_order_seq_cst) noexcept
  709. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  710. __libitm_always_inline bool
  711. compare_exchange_weak(bool& __i1, bool __i2,
  712. memory_order __m = memory_order_seq_cst) volatile noexcept
  713. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  714. __libitm_always_inline bool
  715. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  716. memory_order __m2) noexcept
  717. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  718. __libitm_always_inline bool
  719. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  720. memory_order __m2) volatile noexcept
  721. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  722. __libitm_always_inline bool
  723. compare_exchange_strong(bool& __i1, bool __i2,
  724. memory_order __m = memory_order_seq_cst) noexcept
  725. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  726. __libitm_always_inline bool
  727. compare_exchange_strong(bool& __i1, bool __i2,
  728. memory_order __m = memory_order_seq_cst) volatile noexcept
  729. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  730. };
  731. /// atomic
  732. /// 29.4.3, Generic atomic type, primary class template.
  733. template<typename _Tp>
  734. struct atomic
  735. {
  736. private:
  737. _Tp _M_i;
  738. public:
  739. atomic() noexcept = default;
  740. ~atomic() noexcept = default;
  741. atomic(const atomic&) = delete;
  742. atomic& operator=(const atomic&) = delete;
  743. atomic& operator=(const atomic&) volatile = delete;
  744. constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
  745. operator _Tp() const noexcept
  746. { return load(); }
  747. operator _Tp() const volatile noexcept
  748. { return load(); }
  749. _Tp
  750. operator=(_Tp __i) noexcept
  751. { store(__i); return __i; }
  752. _Tp
  753. operator=(_Tp __i) volatile noexcept
  754. { store(__i); return __i; }
  755. bool
  756. is_lock_free() const noexcept
  757. { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
  758. bool
  759. is_lock_free() const volatile noexcept
  760. { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
  761. void
  762. store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
  763. { __atomic_store(&_M_i, &__i, _m); }
  764. __libitm_always_inline void
  765. store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
  766. { __atomic_store(&_M_i, &__i, _m); }
  767. __libitm_always_inline _Tp
  768. load(memory_order _m = memory_order_seq_cst) const noexcept
  769. {
  770. _Tp tmp;
  771. __atomic_load(&_M_i, &tmp, _m);
  772. return tmp;
  773. }
  774. __libitm_always_inline _Tp
  775. load(memory_order _m = memory_order_seq_cst) const volatile noexcept
  776. {
  777. _Tp tmp;
  778. __atomic_load(&_M_i, &tmp, _m);
  779. return tmp;
  780. }
  781. __libitm_always_inline _Tp
  782. exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
  783. {
  784. _Tp tmp;
  785. __atomic_exchange(&_M_i, &__i, &tmp, _m);
  786. return tmp;
  787. }
  788. __libitm_always_inline _Tp
  789. exchange(_Tp __i,
  790. memory_order _m = memory_order_seq_cst) volatile noexcept
  791. {
  792. _Tp tmp;
  793. __atomic_exchange(&_M_i, &__i, &tmp, _m);
  794. return tmp;
  795. }
  796. __libitm_always_inline bool
  797. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  798. memory_order __f) noexcept
  799. {
  800. return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
  801. }
  802. __libitm_always_inline bool
  803. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  804. memory_order __f) volatile noexcept
  805. {
  806. return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
  807. }
  808. __libitm_always_inline bool
  809. compare_exchange_weak(_Tp& __e, _Tp __i,
  810. memory_order __m = memory_order_seq_cst) noexcept
  811. { return compare_exchange_weak(__e, __i, __m, __m); }
  812. __libitm_always_inline bool
  813. compare_exchange_weak(_Tp& __e, _Tp __i,
  814. memory_order __m = memory_order_seq_cst) volatile noexcept
  815. { return compare_exchange_weak(__e, __i, __m, __m); }
  816. __libitm_always_inline bool
  817. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  818. memory_order __f) noexcept
  819. {
  820. return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
  821. }
  822. __libitm_always_inline bool
  823. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  824. memory_order __f) volatile noexcept
  825. {
  826. return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
  827. }
  828. __libitm_always_inline bool
  829. compare_exchange_strong(_Tp& __e, _Tp __i,
  830. memory_order __m = memory_order_seq_cst) noexcept
  831. { return compare_exchange_strong(__e, __i, __m, __m); }
  832. __libitm_always_inline bool
  833. compare_exchange_strong(_Tp& __e, _Tp __i,
  834. memory_order __m = memory_order_seq_cst) volatile noexcept
  835. { return compare_exchange_strong(__e, __i, __m, __m); }
  836. };
  837. /// Partial specialization for pointer types.
  838. template<typename _Tp>
  839. struct atomic<_Tp*>
  840. {
  841. typedef _Tp* __pointer_type;
  842. typedef __atomic_base<_Tp*> __base_type;
  843. __base_type _M_b;
  844. atomic() noexcept = default;
  845. ~atomic() noexcept = default;
  846. atomic(const atomic&) = delete;
  847. atomic& operator=(const atomic&) = delete;
  848. atomic& operator=(const atomic&) volatile = delete;
  849. constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
  850. operator __pointer_type() const noexcept
  851. { return __pointer_type(_M_b); }
  852. operator __pointer_type() const volatile noexcept
  853. { return __pointer_type(_M_b); }
  854. __pointer_type
  855. operator=(__pointer_type __p) noexcept
  856. { return _M_b.operator=(__p); }
  857. __pointer_type
  858. operator=(__pointer_type __p) volatile noexcept
  859. { return _M_b.operator=(__p); }
  860. __pointer_type
  861. operator++(int) noexcept
  862. { return _M_b++; }
  863. __pointer_type
  864. operator++(int) volatile noexcept
  865. { return _M_b++; }
  866. __pointer_type
  867. operator--(int) noexcept
  868. { return _M_b--; }
  869. __pointer_type
  870. operator--(int) volatile noexcept
  871. { return _M_b--; }
  872. __pointer_type
  873. operator++() noexcept
  874. { return ++_M_b; }
  875. __pointer_type
  876. operator++() volatile noexcept
  877. { return ++_M_b; }
  878. __pointer_type
  879. operator--() noexcept
  880. { return --_M_b; }
  881. __pointer_type
  882. operator--() volatile noexcept
  883. { return --_M_b; }
  884. __pointer_type
  885. operator+=(ptrdiff_t __d) noexcept
  886. { return _M_b.operator+=(__d); }
  887. __pointer_type
  888. operator+=(ptrdiff_t __d) volatile noexcept
  889. { return _M_b.operator+=(__d); }
  890. __pointer_type
  891. operator-=(ptrdiff_t __d) noexcept
  892. { return _M_b.operator-=(__d); }
  893. __pointer_type
  894. operator-=(ptrdiff_t __d) volatile noexcept
  895. { return _M_b.operator-=(__d); }
  896. bool
  897. is_lock_free() const noexcept
  898. { return _M_b.is_lock_free(); }
  899. bool
  900. is_lock_free() const volatile noexcept
  901. { return _M_b.is_lock_free(); }
  902. __libitm_always_inline void
  903. store(__pointer_type __p,
  904. memory_order __m = memory_order_seq_cst) noexcept
  905. { return _M_b.store(__p, __m); }
  906. __libitm_always_inline void
  907. store(__pointer_type __p,
  908. memory_order __m = memory_order_seq_cst) volatile noexcept
  909. { return _M_b.store(__p, __m); }
  910. __libitm_always_inline __pointer_type
  911. load(memory_order __m = memory_order_seq_cst) const noexcept
  912. { return _M_b.load(__m); }
  913. __libitm_always_inline __pointer_type
  914. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  915. { return _M_b.load(__m); }
  916. __libitm_always_inline __pointer_type
  917. exchange(__pointer_type __p,
  918. memory_order __m = memory_order_seq_cst) noexcept
  919. { return _M_b.exchange(__p, __m); }
  920. __libitm_always_inline __pointer_type
  921. exchange(__pointer_type __p,
  922. memory_order __m = memory_order_seq_cst) volatile noexcept
  923. { return _M_b.exchange(__p, __m); }
  924. __libitm_always_inline bool
  925. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  926. memory_order __m1, memory_order __m2) noexcept
  927. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  928. __libitm_always_inline bool
  929. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  930. memory_order __m1,
  931. memory_order __m2) volatile noexcept
  932. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  933. __libitm_always_inline bool
  934. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  935. memory_order __m = memory_order_seq_cst) noexcept
  936. {
  937. return compare_exchange_weak(__p1, __p2, __m,
  938. __calculate_memory_order(__m));
  939. }
  940. __libitm_always_inline bool
  941. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  942. memory_order __m = memory_order_seq_cst) volatile noexcept
  943. {
  944. return compare_exchange_weak(__p1, __p2, __m,
  945. __calculate_memory_order(__m));
  946. }
  947. __libitm_always_inline bool
  948. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  949. memory_order __m1, memory_order __m2) noexcept
  950. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  951. __libitm_always_inline bool
  952. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  953. memory_order __m1,
  954. memory_order __m2) volatile noexcept
  955. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  956. __libitm_always_inline bool
  957. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  958. memory_order __m = memory_order_seq_cst) noexcept
  959. {
  960. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  961. __calculate_memory_order(__m));
  962. }
  963. __libitm_always_inline bool
  964. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  965. memory_order __m = memory_order_seq_cst) volatile noexcept
  966. {
  967. return _M_b.compare_exchange_strong(__p1, __p2, __m,
  968. __calculate_memory_order(__m));
  969. }
  970. __libitm_always_inline __pointer_type
  971. fetch_add(ptrdiff_t __d,
  972. memory_order __m = memory_order_seq_cst) noexcept
  973. { return _M_b.fetch_add(__d, __m); }
  974. __libitm_always_inline __pointer_type
  975. fetch_add(ptrdiff_t __d,
  976. memory_order __m = memory_order_seq_cst) volatile noexcept
  977. { return _M_b.fetch_add(__d, __m); }
  978. __libitm_always_inline __pointer_type
  979. fetch_sub(ptrdiff_t __d,
  980. memory_order __m = memory_order_seq_cst) noexcept
  981. { return _M_b.fetch_sub(__d, __m); }
  982. __libitm_always_inline __pointer_type
  983. fetch_sub(ptrdiff_t __d,
  984. memory_order __m = memory_order_seq_cst) volatile noexcept
  985. { return _M_b.fetch_sub(__d, __m); }
  986. };
  987. /// Explicit specialization for bool.
  988. template<>
  989. struct atomic<bool> : public atomic_bool
  990. {
  991. typedef bool __integral_type;
  992. typedef atomic_bool __base_type;
  993. atomic() noexcept = default;
  994. ~atomic() noexcept = default;
  995. atomic(const atomic&) = delete;
  996. atomic& operator=(const atomic&) = delete;
  997. atomic& operator=(const atomic&) volatile = delete;
  998. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  999. using __base_type::operator __integral_type;
  1000. using __base_type::operator=;
  1001. };
  1002. /// Explicit specialization for char.
  1003. template<>
  1004. struct atomic<char> : public atomic_char
  1005. {
  1006. typedef char __integral_type;
  1007. typedef atomic_char __base_type;
  1008. atomic() noexcept = default;
  1009. ~atomic() noexcept = default;
  1010. atomic(const atomic&) = delete;
  1011. atomic& operator=(const atomic&) = delete;
  1012. atomic& operator=(const atomic&) volatile = delete;
  1013. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1014. using __base_type::operator __integral_type;
  1015. using __base_type::operator=;
  1016. };
  1017. /// Explicit specialization for signed char.
  1018. template<>
  1019. struct atomic<signed char> : public atomic_schar
  1020. {
  1021. typedef signed char __integral_type;
  1022. typedef atomic_schar __base_type;
  1023. atomic() noexcept= default;
  1024. ~atomic() noexcept = default;
  1025. atomic(const atomic&) = delete;
  1026. atomic& operator=(const atomic&) = delete;
  1027. atomic& operator=(const atomic&) volatile = delete;
  1028. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1029. using __base_type::operator __integral_type;
  1030. using __base_type::operator=;
  1031. };
  1032. /// Explicit specialization for unsigned char.
  1033. template<>
  1034. struct atomic<unsigned char> : public atomic_uchar
  1035. {
  1036. typedef unsigned char __integral_type;
  1037. typedef atomic_uchar __base_type;
  1038. atomic() noexcept= default;
  1039. ~atomic() noexcept = default;
  1040. atomic(const atomic&) = delete;
  1041. atomic& operator=(const atomic&) = delete;
  1042. atomic& operator=(const atomic&) volatile = delete;
  1043. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1044. using __base_type::operator __integral_type;
  1045. using __base_type::operator=;
  1046. };
  1047. /// Explicit specialization for short.
  1048. template<>
  1049. struct atomic<short> : public atomic_short
  1050. {
  1051. typedef short __integral_type;
  1052. typedef atomic_short __base_type;
  1053. atomic() noexcept = default;
  1054. ~atomic() noexcept = default;
  1055. atomic(const atomic&) = delete;
  1056. atomic& operator=(const atomic&) = delete;
  1057. atomic& operator=(const atomic&) volatile = delete;
  1058. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1059. using __base_type::operator __integral_type;
  1060. using __base_type::operator=;
  1061. };
  1062. /// Explicit specialization for unsigned short.
  1063. template<>
  1064. struct atomic<unsigned short> : public atomic_ushort
  1065. {
  1066. typedef unsigned short __integral_type;
  1067. typedef atomic_ushort __base_type;
  1068. atomic() noexcept = default;
  1069. ~atomic() noexcept = default;
  1070. atomic(const atomic&) = delete;
  1071. atomic& operator=(const atomic&) = delete;
  1072. atomic& operator=(const atomic&) volatile = delete;
  1073. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1074. using __base_type::operator __integral_type;
  1075. using __base_type::operator=;
  1076. };
  1077. /// Explicit specialization for int.
  1078. template<>
  1079. struct atomic<int> : atomic_int
  1080. {
  1081. typedef int __integral_type;
  1082. typedef atomic_int __base_type;
  1083. atomic() noexcept = default;
  1084. ~atomic() noexcept = default;
  1085. atomic(const atomic&) = delete;
  1086. atomic& operator=(const atomic&) = delete;
  1087. atomic& operator=(const atomic&) volatile = delete;
  1088. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1089. using __base_type::operator __integral_type;
  1090. using __base_type::operator=;
  1091. };
  1092. /// Explicit specialization for unsigned int.
  1093. template<>
  1094. struct atomic<unsigned int> : public atomic_uint
  1095. {
  1096. typedef unsigned int __integral_type;
  1097. typedef atomic_uint __base_type;
  1098. atomic() noexcept = default;
  1099. ~atomic() noexcept = default;
  1100. atomic(const atomic&) = delete;
  1101. atomic& operator=(const atomic&) = delete;
  1102. atomic& operator=(const atomic&) volatile = delete;
  1103. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1104. using __base_type::operator __integral_type;
  1105. using __base_type::operator=;
  1106. };
  1107. /// Explicit specialization for long.
  1108. template<>
  1109. struct atomic<long> : public atomic_long
  1110. {
  1111. typedef long __integral_type;
  1112. typedef atomic_long __base_type;
  1113. atomic() noexcept = default;
  1114. ~atomic() noexcept = default;
  1115. atomic(const atomic&) = delete;
  1116. atomic& operator=(const atomic&) = delete;
  1117. atomic& operator=(const atomic&) volatile = delete;
  1118. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1119. using __base_type::operator __integral_type;
  1120. using __base_type::operator=;
  1121. };
  1122. /// Explicit specialization for unsigned long.
  1123. template<>
  1124. struct atomic<unsigned long> : public atomic_ulong
  1125. {
  1126. typedef unsigned long __integral_type;
  1127. typedef atomic_ulong __base_type;
  1128. atomic() noexcept = default;
  1129. ~atomic() noexcept = default;
  1130. atomic(const atomic&) = delete;
  1131. atomic& operator=(const atomic&) = delete;
  1132. atomic& operator=(const atomic&) volatile = delete;
  1133. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1134. using __base_type::operator __integral_type;
  1135. using __base_type::operator=;
  1136. };
  1137. /// Explicit specialization for long long.
  1138. template<>
  1139. struct atomic<long long> : public atomic_llong
  1140. {
  1141. typedef long long __integral_type;
  1142. typedef atomic_llong __base_type;
  1143. atomic() noexcept = default;
  1144. ~atomic() noexcept = default;
  1145. atomic(const atomic&) = delete;
  1146. atomic& operator=(const atomic&) = delete;
  1147. atomic& operator=(const atomic&) volatile = delete;
  1148. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1149. using __base_type::operator __integral_type;
  1150. using __base_type::operator=;
  1151. };
  1152. /// Explicit specialization for unsigned long long.
  1153. template<>
  1154. struct atomic<unsigned long long> : public atomic_ullong
  1155. {
  1156. typedef unsigned long long __integral_type;
  1157. typedef atomic_ullong __base_type;
  1158. atomic() noexcept = default;
  1159. ~atomic() noexcept = default;
  1160. atomic(const atomic&) = delete;
  1161. atomic& operator=(const atomic&) = delete;
  1162. atomic& operator=(const atomic&) volatile = delete;
  1163. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1164. using __base_type::operator __integral_type;
  1165. using __base_type::operator=;
  1166. };
  1167. /// Explicit specialization for wchar_t.
  1168. template<>
  1169. struct atomic<wchar_t> : public atomic_wchar_t
  1170. {
  1171. typedef wchar_t __integral_type;
  1172. typedef atomic_wchar_t __base_type;
  1173. atomic() noexcept = default;
  1174. ~atomic() noexcept = default;
  1175. atomic(const atomic&) = delete;
  1176. atomic& operator=(const atomic&) = delete;
  1177. atomic& operator=(const atomic&) volatile = delete;
  1178. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1179. using __base_type::operator __integral_type;
  1180. using __base_type::operator=;
  1181. };
  1182. /// Explicit specialization for char16_t.
  1183. template<>
  1184. struct atomic<char16_t> : public atomic_char16_t
  1185. {
  1186. typedef char16_t __integral_type;
  1187. typedef atomic_char16_t __base_type;
  1188. atomic() noexcept = default;
  1189. ~atomic() noexcept = default;
  1190. atomic(const atomic&) = delete;
  1191. atomic& operator=(const atomic&) = delete;
  1192. atomic& operator=(const atomic&) volatile = delete;
  1193. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1194. using __base_type::operator __integral_type;
  1195. using __base_type::operator=;
  1196. };
  1197. /// Explicit specialization for char32_t.
  1198. template<>
  1199. struct atomic<char32_t> : public atomic_char32_t
  1200. {
  1201. typedef char32_t __integral_type;
  1202. typedef atomic_char32_t __base_type;
  1203. atomic() noexcept = default;
  1204. ~atomic() noexcept = default;
  1205. atomic(const atomic&) = delete;
  1206. atomic& operator=(const atomic&) = delete;
  1207. atomic& operator=(const atomic&) volatile = delete;
  1208. constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
  1209. using __base_type::operator __integral_type;
  1210. using __base_type::operator=;
  1211. };
  1212. // Function definitions, atomic_flag operations.
  1213. inline __libitm_always_inline bool
  1214. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  1215. memory_order __m) noexcept
  1216. { return __a->test_and_set(__m); }
  1217. inline __libitm_always_inline bool
  1218. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  1219. memory_order __m) noexcept
  1220. { return __a->test_and_set(__m); }
  1221. inline __libitm_always_inline void
  1222. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  1223. { __a->clear(__m); }
  1224. inline __libitm_always_inline void
  1225. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  1226. memory_order __m) noexcept
  1227. { __a->clear(__m); }
  1228. inline __libitm_always_inline bool
  1229. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  1230. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  1231. inline __libitm_always_inline bool
  1232. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  1233. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  1234. inline __libitm_always_inline void
  1235. atomic_flag_clear(atomic_flag* __a) noexcept
  1236. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  1237. inline __libitm_always_inline void
  1238. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  1239. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  1240. // Function templates generally applicable to atomic types.
  1241. template<typename _ITp>
  1242. __libitm_always_inline bool
  1243. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  1244. { return __a->is_lock_free(); }
  1245. template<typename _ITp>
  1246. __libitm_always_inline bool
  1247. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  1248. { return __a->is_lock_free(); }
  1249. template<typename _ITp>
  1250. __libitm_always_inline void
  1251. atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
  1252. template<typename _ITp>
  1253. __libitm_always_inline void
  1254. atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
  1255. template<typename _ITp>
  1256. __libitm_always_inline void
  1257. atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
  1258. memory_order __m) noexcept
  1259. { __a->store(__i, __m); }
  1260. template<typename _ITp>
  1261. __libitm_always_inline void
  1262. atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
  1263. memory_order __m) noexcept
  1264. { __a->store(__i, __m); }
  1265. template<typename _ITp>
  1266. __libitm_always_inline _ITp
  1267. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  1268. { return __a->load(__m); }
  1269. template<typename _ITp>
  1270. __libitm_always_inline _ITp
  1271. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  1272. memory_order __m) noexcept
  1273. { return __a->load(__m); }
  1274. template<typename _ITp>
  1275. __libitm_always_inline _ITp
  1276. atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
  1277. memory_order __m) noexcept
  1278. { return __a->exchange(__i, __m); }
  1279. template<typename _ITp>
  1280. __libitm_always_inline _ITp
  1281. atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
  1282. memory_order __m) noexcept
  1283. { return __a->exchange(__i, __m); }
  1284. template<typename _ITp>
  1285. __libitm_always_inline bool
  1286. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  1287. _ITp* __i1, _ITp __i2,
  1288. memory_order __m1,
  1289. memory_order __m2) noexcept
  1290. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1291. template<typename _ITp>
  1292. __libitm_always_inline bool
  1293. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  1294. _ITp* __i1, _ITp __i2,
  1295. memory_order __m1,
  1296. memory_order __m2) noexcept
  1297. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  1298. template<typename _ITp>
  1299. __libitm_always_inline bool
  1300. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  1301. _ITp* __i1, _ITp __i2,
  1302. memory_order __m1,
  1303. memory_order __m2) noexcept
  1304. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1305. template<typename _ITp>
  1306. __libitm_always_inline bool
  1307. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  1308. _ITp* __i1, _ITp __i2,
  1309. memory_order __m1,
  1310. memory_order __m2) noexcept
  1311. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  1312. template<typename _ITp>
  1313. __libitm_always_inline void
  1314. atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
  1315. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1316. template<typename _ITp>
  1317. __libitm_always_inline void
  1318. atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
  1319. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  1320. template<typename _ITp>
  1321. __libitm_always_inline _ITp
  1322. atomic_load(const atomic<_ITp>* __a) noexcept
  1323. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1324. template<typename _ITp>
  1325. __libitm_always_inline _ITp
  1326. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  1327. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  1328. template<typename _ITp>
  1329. __libitm_always_inline _ITp
  1330. atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
  1331. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1332. template<typename _ITp>
  1333. __libitm_always_inline _ITp
  1334. atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
  1335. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  1336. template<typename _ITp>
  1337. __libitm_always_inline bool
  1338. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  1339. _ITp* __i1, _ITp __i2) noexcept
  1340. {
  1341. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1342. memory_order_seq_cst,
  1343. memory_order_seq_cst);
  1344. }
  1345. template<typename _ITp>
  1346. __libitm_always_inline bool
  1347. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  1348. _ITp* __i1, _ITp __i2) noexcept
  1349. {
  1350. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  1351. memory_order_seq_cst,
  1352. memory_order_seq_cst);
  1353. }
  1354. template<typename _ITp>
  1355. __libitm_always_inline bool
  1356. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  1357. _ITp* __i1, _ITp __i2) noexcept
  1358. {
  1359. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1360. memory_order_seq_cst,
  1361. memory_order_seq_cst);
  1362. }
  1363. template<typename _ITp>
  1364. __libitm_always_inline bool
  1365. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  1366. _ITp* __i1, _ITp __i2) noexcept
  1367. {
  1368. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  1369. memory_order_seq_cst,
  1370. memory_order_seq_cst);
  1371. }
  1372. // Function templates for atomic_integral operations only, using
  1373. // __atomic_base. Template argument should be constricted to
  1374. // intergral types as specified in the standard, excluding address
  1375. // types.
  1376. template<typename _ITp>
  1377. __libitm_always_inline _ITp
  1378. atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  1379. memory_order __m) noexcept
  1380. { return __a->fetch_add(__i, __m); }
  1381. template<typename _ITp>
  1382. __libitm_always_inline _ITp
  1383. atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  1384. memory_order __m) noexcept
  1385. { return __a->fetch_add(__i, __m); }
  1386. template<typename _ITp>
  1387. __libitm_always_inline _ITp
  1388. atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  1389. memory_order __m) noexcept
  1390. { return __a->fetch_sub(__i, __m); }
  1391. template<typename _ITp>
  1392. __libitm_always_inline _ITp
  1393. atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  1394. memory_order __m) noexcept
  1395. { return __a->fetch_sub(__i, __m); }
  1396. template<typename _ITp>
  1397. __libitm_always_inline _ITp
  1398. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  1399. memory_order __m) noexcept
  1400. { return __a->fetch_and(__i, __m); }
  1401. template<typename _ITp>
  1402. __libitm_always_inline _ITp
  1403. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  1404. memory_order __m) noexcept
  1405. { return __a->fetch_and(__i, __m); }
  1406. template<typename _ITp>
  1407. __libitm_always_inline _ITp
  1408. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  1409. memory_order __m) noexcept
  1410. { return __a->fetch_or(__i, __m); }
  1411. template<typename _ITp>
  1412. __libitm_always_inline _ITp
  1413. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  1414. memory_order __m) noexcept
  1415. { return __a->fetch_or(__i, __m); }
  1416. template<typename _ITp>
  1417. __libitm_always_inline _ITp
  1418. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  1419. memory_order __m) noexcept
  1420. { return __a->fetch_xor(__i, __m); }
  1421. template<typename _ITp>
  1422. __libitm_always_inline _ITp
  1423. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  1424. memory_order __m) noexcept
  1425. { return __a->fetch_xor(__i, __m); }
  1426. template<typename _ITp>
  1427. __libitm_always_inline _ITp
  1428. atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  1429. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1430. template<typename _ITp>
  1431. __libitm_always_inline _ITp
  1432. atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  1433. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  1434. template<typename _ITp>
  1435. __libitm_always_inline _ITp
  1436. atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  1437. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1438. template<typename _ITp>
  1439. __libitm_always_inline _ITp
  1440. atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  1441. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  1442. template<typename _ITp>
  1443. __libitm_always_inline _ITp
  1444. atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  1445. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1446. template<typename _ITp>
  1447. __libitm_always_inline _ITp
  1448. atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  1449. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  1450. template<typename _ITp>
  1451. __libitm_always_inline _ITp
  1452. atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  1453. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1454. template<typename _ITp>
  1455. __libitm_always_inline _ITp
  1456. atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  1457. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  1458. template<typename _ITp>
  1459. __libitm_always_inline _ITp
  1460. atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  1461. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1462. template<typename _ITp>
  1463. __libitm_always_inline _ITp
  1464. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  1465. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  1466. // Partial specializations for pointers.
  1467. template<typename _ITp>
  1468. __libitm_always_inline _ITp*
  1469. atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
  1470. memory_order __m) noexcept
  1471. { return __a->fetch_add(__d, __m); }
  1472. template<typename _ITp>
  1473. __libitm_always_inline _ITp*
  1474. atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
  1475. memory_order __m) noexcept
  1476. { return __a->fetch_add(__d, __m); }
  1477. template<typename _ITp>
  1478. __libitm_always_inline _ITp*
  1479. atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  1480. { return __a->fetch_add(__d); }
  1481. template<typename _ITp>
  1482. __libitm_always_inline _ITp*
  1483. atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  1484. { return __a->fetch_add(__d); }
  1485. template<typename _ITp>
  1486. __libitm_always_inline _ITp*
  1487. atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
  1488. ptrdiff_t __d, memory_order __m) noexcept
  1489. { return __a->fetch_sub(__d, __m); }
  1490. template<typename _ITp>
  1491. __libitm_always_inline _ITp*
  1492. atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
  1493. memory_order __m) noexcept
  1494. { return __a->fetch_sub(__d, __m); }
  1495. template<typename _ITp>
  1496. __libitm_always_inline _ITp*
  1497. atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  1498. { return __a->fetch_sub(__d); }
  1499. template<typename _ITp>
  1500. __libitm_always_inline _ITp*
  1501. atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  1502. { return __a->fetch_sub(__d); }
  1503. // @} group atomics
  1504. // _GLIBCXX_END_NAMESPACE_VERSION
  1505. } // namespace
  1506. #endif