atomic 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865
  1. // -*- C++ -*-
  2. //===--------------------------- atomic -----------------------------------===//
  3. //
  4. // The LLVM Compiler Infrastructure
  5. //
  6. // This file is distributed under the University of Illinois Open Source
  7. // License. See LICENSE.TXT for details.
  8. //
  9. //===----------------------------------------------------------------------===//
  10. #ifndef _LIBCPP_ATOMIC
  11. #define _LIBCPP_ATOMIC
  12. /*
  13. atomic synopsis
  14. namespace std
  15. {
  16. // feature test macro
  17. #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
  18. // order and consistency
  19. typedef enum memory_order
  20. {
  21. memory_order_relaxed,
  22. memory_order_consume, // load-consume
  23. memory_order_acquire, // load-acquire
  24. memory_order_release, // store-release
  25. memory_order_acq_rel, // store-release load-acquire
  26. memory_order_seq_cst // store-release load-acquire
  27. } memory_order;
  28. template <class T> T kill_dependency(T y) noexcept;
  29. // lock-free property
  30. #define ATOMIC_BOOL_LOCK_FREE unspecified
  31. #define ATOMIC_CHAR_LOCK_FREE unspecified
  32. #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
  33. #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
  34. #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
  35. #define ATOMIC_SHORT_LOCK_FREE unspecified
  36. #define ATOMIC_INT_LOCK_FREE unspecified
  37. #define ATOMIC_LONG_LOCK_FREE unspecified
  38. #define ATOMIC_LLONG_LOCK_FREE unspecified
  39. #define ATOMIC_POINTER_LOCK_FREE unspecified
  40. // flag type and operations
  41. typedef struct atomic_flag
  42. {
  43. bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
  44. bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
  45. void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
  46. void clear(memory_order m = memory_order_seq_cst) noexcept;
  47. atomic_flag() noexcept = default;
  48. atomic_flag(const atomic_flag&) = delete;
  49. atomic_flag& operator=(const atomic_flag&) = delete;
  50. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  51. } atomic_flag;
  52. bool
  53. atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
  54. bool
  55. atomic_flag_test_and_set(atomic_flag* obj) noexcept;
  56. bool
  57. atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
  58. memory_order m) noexcept;
  59. bool
  60. atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
  61. void
  62. atomic_flag_clear(volatile atomic_flag* obj) noexcept;
  63. void
  64. atomic_flag_clear(atomic_flag* obj) noexcept;
  65. void
  66. atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
  67. void
  68. atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
  69. #define ATOMIC_FLAG_INIT see below
  70. #define ATOMIC_VAR_INIT(value) see below
  71. template <class T>
  72. struct atomic
  73. {
  74. static constexpr bool is_always_lock_free;
  75. bool is_lock_free() const volatile noexcept;
  76. bool is_lock_free() const noexcept;
  77. void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  78. void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
  79. T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  80. T load(memory_order m = memory_order_seq_cst) const noexcept;
  81. operator T() const volatile noexcept;
  82. operator T() const noexcept;
  83. T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  84. T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
  85. bool compare_exchange_weak(T& expc, T desr,
  86. memory_order s, memory_order f) volatile noexcept;
  87. bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
  88. bool compare_exchange_strong(T& expc, T desr,
  89. memory_order s, memory_order f) volatile noexcept;
  90. bool compare_exchange_strong(T& expc, T desr,
  91. memory_order s, memory_order f) noexcept;
  92. bool compare_exchange_weak(T& expc, T desr,
  93. memory_order m = memory_order_seq_cst) volatile noexcept;
  94. bool compare_exchange_weak(T& expc, T desr,
  95. memory_order m = memory_order_seq_cst) noexcept;
  96. bool compare_exchange_strong(T& expc, T desr,
  97. memory_order m = memory_order_seq_cst) volatile noexcept;
  98. bool compare_exchange_strong(T& expc, T desr,
  99. memory_order m = memory_order_seq_cst) noexcept;
  100. atomic() noexcept = default;
  101. constexpr atomic(T desr) noexcept;
  102. atomic(const atomic&) = delete;
  103. atomic& operator=(const atomic&) = delete;
  104. atomic& operator=(const atomic&) volatile = delete;
  105. T operator=(T) volatile noexcept;
  106. T operator=(T) noexcept;
  107. };
  108. template <>
  109. struct atomic<integral>
  110. {
  111. static constexpr bool is_always_lock_free;
  112. bool is_lock_free() const volatile noexcept;
  113. bool is_lock_free() const noexcept;
  114. void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  115. void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  116. integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  117. integral load(memory_order m = memory_order_seq_cst) const noexcept;
  118. operator integral() const volatile noexcept;
  119. operator integral() const noexcept;
  120. integral exchange(integral desr,
  121. memory_order m = memory_order_seq_cst) volatile noexcept;
  122. integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  123. bool compare_exchange_weak(integral& expc, integral desr,
  124. memory_order s, memory_order f) volatile noexcept;
  125. bool compare_exchange_weak(integral& expc, integral desr,
  126. memory_order s, memory_order f) noexcept;
  127. bool compare_exchange_strong(integral& expc, integral desr,
  128. memory_order s, memory_order f) volatile noexcept;
  129. bool compare_exchange_strong(integral& expc, integral desr,
  130. memory_order s, memory_order f) noexcept;
  131. bool compare_exchange_weak(integral& expc, integral desr,
  132. memory_order m = memory_order_seq_cst) volatile noexcept;
  133. bool compare_exchange_weak(integral& expc, integral desr,
  134. memory_order m = memory_order_seq_cst) noexcept;
  135. bool compare_exchange_strong(integral& expc, integral desr,
  136. memory_order m = memory_order_seq_cst) volatile noexcept;
  137. bool compare_exchange_strong(integral& expc, integral desr,
  138. memory_order m = memory_order_seq_cst) noexcept;
  139. integral
  140. fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  141. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
  142. integral
  143. fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  144. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
  145. integral
  146. fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  147. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
  148. integral
  149. fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  150. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
  151. integral
  152. fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  153. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
  154. atomic() noexcept = default;
  155. constexpr atomic(integral desr) noexcept;
  156. atomic(const atomic&) = delete;
  157. atomic& operator=(const atomic&) = delete;
  158. atomic& operator=(const atomic&) volatile = delete;
  159. integral operator=(integral desr) volatile noexcept;
  160. integral operator=(integral desr) noexcept;
  161. integral operator++(int) volatile noexcept;
  162. integral operator++(int) noexcept;
  163. integral operator--(int) volatile noexcept;
  164. integral operator--(int) noexcept;
  165. integral operator++() volatile noexcept;
  166. integral operator++() noexcept;
  167. integral operator--() volatile noexcept;
  168. integral operator--() noexcept;
  169. integral operator+=(integral op) volatile noexcept;
  170. integral operator+=(integral op) noexcept;
  171. integral operator-=(integral op) volatile noexcept;
  172. integral operator-=(integral op) noexcept;
  173. integral operator&=(integral op) volatile noexcept;
  174. integral operator&=(integral op) noexcept;
  175. integral operator|=(integral op) volatile noexcept;
  176. integral operator|=(integral op) noexcept;
  177. integral operator^=(integral op) volatile noexcept;
  178. integral operator^=(integral op) noexcept;
  179. };
  180. template <class T>
  181. struct atomic<T*>
  182. {
  183. static constexpr bool is_always_lock_free;
  184. bool is_lock_free() const volatile noexcept;
  185. bool is_lock_free() const noexcept;
  186. void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  187. void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  188. T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  189. T* load(memory_order m = memory_order_seq_cst) const noexcept;
  190. operator T*() const volatile noexcept;
  191. operator T*() const noexcept;
  192. T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  193. T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  194. bool compare_exchange_weak(T*& expc, T* desr,
  195. memory_order s, memory_order f) volatile noexcept;
  196. bool compare_exchange_weak(T*& expc, T* desr,
  197. memory_order s, memory_order f) noexcept;
  198. bool compare_exchange_strong(T*& expc, T* desr,
  199. memory_order s, memory_order f) volatile noexcept;
  200. bool compare_exchange_strong(T*& expc, T* desr,
  201. memory_order s, memory_order f) noexcept;
  202. bool compare_exchange_weak(T*& expc, T* desr,
  203. memory_order m = memory_order_seq_cst) volatile noexcept;
  204. bool compare_exchange_weak(T*& expc, T* desr,
  205. memory_order m = memory_order_seq_cst) noexcept;
  206. bool compare_exchange_strong(T*& expc, T* desr,
  207. memory_order m = memory_order_seq_cst) volatile noexcept;
  208. bool compare_exchange_strong(T*& expc, T* desr,
  209. memory_order m = memory_order_seq_cst) noexcept;
  210. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  211. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  212. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  213. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  214. atomic() noexcept = default;
  215. constexpr atomic(T* desr) noexcept;
  216. atomic(const atomic&) = delete;
  217. atomic& operator=(const atomic&) = delete;
  218. atomic& operator=(const atomic&) volatile = delete;
  219. T* operator=(T*) volatile noexcept;
  220. T* operator=(T*) noexcept;
  221. T* operator++(int) volatile noexcept;
  222. T* operator++(int) noexcept;
  223. T* operator--(int) volatile noexcept;
  224. T* operator--(int) noexcept;
  225. T* operator++() volatile noexcept;
  226. T* operator++() noexcept;
  227. T* operator--() volatile noexcept;
  228. T* operator--() noexcept;
  229. T* operator+=(ptrdiff_t op) volatile noexcept;
  230. T* operator+=(ptrdiff_t op) noexcept;
  231. T* operator-=(ptrdiff_t op) volatile noexcept;
  232. T* operator-=(ptrdiff_t op) noexcept;
  233. };
  234. template <class T>
  235. bool
  236. atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
  237. template <class T>
  238. bool
  239. atomic_is_lock_free(const atomic<T>* obj) noexcept;
  240. template <class T>
  241. void
  242. atomic_init(volatile atomic<T>* obj, T desr) noexcept;
  243. template <class T>
  244. void
  245. atomic_init(atomic<T>* obj, T desr) noexcept;
  246. template <class T>
  247. void
  248. atomic_store(volatile atomic<T>* obj, T desr) noexcept;
  249. template <class T>
  250. void
  251. atomic_store(atomic<T>* obj, T desr) noexcept;
  252. template <class T>
  253. void
  254. atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
  255. template <class T>
  256. void
  257. atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
  258. template <class T>
  259. T
  260. atomic_load(const volatile atomic<T>* obj) noexcept;
  261. template <class T>
  262. T
  263. atomic_load(const atomic<T>* obj) noexcept;
  264. template <class T>
  265. T
  266. atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
  267. template <class T>
  268. T
  269. atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
  270. template <class T>
  271. T
  272. atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
  273. template <class T>
  274. T
  275. atomic_exchange(atomic<T>* obj, T desr) noexcept;
  276. template <class T>
  277. T
  278. atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
  279. template <class T>
  280. T
  281. atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
  282. template <class T>
  283. bool
  284. atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
  285. template <class T>
  286. bool
  287. atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
  288. template <class T>
  289. bool
  290. atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
  291. template <class T>
  292. bool
  293. atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
  294. template <class T>
  295. bool
  296. atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
  297. T desr,
  298. memory_order s, memory_order f) noexcept;
  299. template <class T>
  300. bool
  301. atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
  302. memory_order s, memory_order f) noexcept;
  303. template <class T>
  304. bool
  305. atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
  306. T* expc, T desr,
  307. memory_order s, memory_order f) noexcept;
  308. template <class T>
  309. bool
  310. atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
  311. T desr,
  312. memory_order s, memory_order f) noexcept;
  313. template <class Integral>
  314. Integral
  315. atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
  316. template <class Integral>
  317. Integral
  318. atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
  319. template <class Integral>
  320. Integral
  321. atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
  322. memory_order m) noexcept;
  323. template <class Integral>
  324. Integral
  325. atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
  326. memory_order m) noexcept;
  327. template <class Integral>
  328. Integral
  329. atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
  330. template <class Integral>
  331. Integral
  332. atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
  333. template <class Integral>
  334. Integral
  335. atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
  336. memory_order m) noexcept;
  337. template <class Integral>
  338. Integral
  339. atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
  340. memory_order m) noexcept;
  341. template <class Integral>
  342. Integral
  343. atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
  344. template <class Integral>
  345. Integral
  346. atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
  347. template <class Integral>
  348. Integral
  349. atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
  350. memory_order m) noexcept;
  351. template <class Integral>
  352. Integral
  353. atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
  354. memory_order m) noexcept;
  355. template <class Integral>
  356. Integral
  357. atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
  358. template <class Integral>
  359. Integral
  360. atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
  361. template <class Integral>
  362. Integral
  363. atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
  364. memory_order m) noexcept;
  365. template <class Integral>
  366. Integral
  367. atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
  368. memory_order m) noexcept;
  369. template <class Integral>
  370. Integral
  371. atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
  372. template <class Integral>
  373. Integral
  374. atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
  375. template <class Integral>
  376. Integral
  377. atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
  378. memory_order m) noexcept;
  379. template <class Integral>
  380. Integral
  381. atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
  382. memory_order m) noexcept;
  383. template <class T>
  384. T*
  385. atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
  386. template <class T>
  387. T*
  388. atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
  389. template <class T>
  390. T*
  391. atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
  392. memory_order m) noexcept;
  393. template <class T>
  394. T*
  395. atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
  396. template <class T>
  397. T*
  398. atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
  399. template <class T>
  400. T*
  401. atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
  402. template <class T>
  403. T*
  404. atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
  405. memory_order m) noexcept;
  406. template <class T>
  407. T*
  408. atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
  409. // Atomics for standard typedef types
  410. typedef atomic<bool> atomic_bool;
  411. typedef atomic<char> atomic_char;
  412. typedef atomic<signed char> atomic_schar;
  413. typedef atomic<unsigned char> atomic_uchar;
  414. typedef atomic<short> atomic_short;
  415. typedef atomic<unsigned short> atomic_ushort;
  416. typedef atomic<int> atomic_int;
  417. typedef atomic<unsigned int> atomic_uint;
  418. typedef atomic<long> atomic_long;
  419. typedef atomic<unsigned long> atomic_ulong;
  420. typedef atomic<long long> atomic_llong;
  421. typedef atomic<unsigned long long> atomic_ullong;
  422. typedef atomic<char16_t> atomic_char16_t;
  423. typedef atomic<char32_t> atomic_char32_t;
  424. typedef atomic<wchar_t> atomic_wchar_t;
  425. typedef atomic<int_least8_t> atomic_int_least8_t;
  426. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  427. typedef atomic<int_least16_t> atomic_int_least16_t;
  428. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  429. typedef atomic<int_least32_t> atomic_int_least32_t;
  430. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  431. typedef atomic<int_least64_t> atomic_int_least64_t;
  432. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  433. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  434. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  435. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  436. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  437. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  438. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  439. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  440. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  441. typedef atomic<int8_t> atomic_int8_t;
  442. typedef atomic<uint8_t> atomic_uint8_t;
  443. typedef atomic<int16_t> atomic_int16_t;
  444. typedef atomic<uint16_t> atomic_uint16_t;
  445. typedef atomic<int32_t> atomic_int32_t;
  446. typedef atomic<uint32_t> atomic_uint32_t;
  447. typedef atomic<int64_t> atomic_int64_t;
  448. typedef atomic<uint64_t> atomic_uint64_t;
  449. typedef atomic<intptr_t> atomic_intptr_t;
  450. typedef atomic<uintptr_t> atomic_uintptr_t;
  451. typedef atomic<size_t> atomic_size_t;
  452. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  453. typedef atomic<intmax_t> atomic_intmax_t;
  454. typedef atomic<uintmax_t> atomic_uintmax_t;
  455. // fences
  456. void atomic_thread_fence(memory_order m) noexcept;
  457. void atomic_signal_fence(memory_order m) noexcept;
  458. } // std
  459. */
  460. #include <__config>
  461. #include <cstddef>
  462. #include <cstdint>
  463. #include <type_traits>
  464. #if defined(_LIBCPP_SGX_CONFIG)
  465. #include <sgx_spinlock.h>
  466. #include <cstring>
  467. #include <cstdlib>
  468. #endif // defined(_LIBCPP_SGX_CONFIG)
  469. #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
  470. #pragma GCC system_header
  471. #endif
  472. #if defined(_LIBCPP_HAS_NO_THREADS) && defined(_LIBCPP_SGX_HAS_NO_ATOMIC)
  473. #error <atomic> is not supported on this single threaded system
  474. #endif
  475. #if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
  476. #error <atomic> is not implemented
  477. #endif
  478. #if _LIBCPP_STD_VER > 14
  479. // FIXME: use the right feature test macro value as chose by SG10.
  480. # define __cpp_lib_atomic_is_always_lock_free 201603L
  481. #endif
  482. _LIBCPP_BEGIN_NAMESPACE_STD
  483. typedef enum memory_order
  484. {
  485. memory_order_relaxed, memory_order_consume, memory_order_acquire,
  486. memory_order_release, memory_order_acq_rel, memory_order_seq_cst
  487. } memory_order;
  488. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
  489. namespace __gcc_atomic {
  490. template <typename _Tp>
  491. struct __gcc_atomic_t {
  492. #if _GNUC_VER >= 501
  493. static_assert(is_trivially_copyable<_Tp>::value,
  494. "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
  495. #endif
  496. _LIBCPP_INLINE_VISIBILITY
  497. #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  498. __gcc_atomic_t() _NOEXCEPT = default;
  499. #else
  500. __gcc_atomic_t() _NOEXCEPT : __a_value() {}
  501. #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  502. _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
  503. : __a_value(value) {}
  504. _Tp __a_value;
  505. };
  506. #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
  507. template <typename _Tp> _Tp __create();
  508. template <typename _Tp, typename _Td>
  509. typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
  510. __test_atomic_assignable(int);
  511. template <typename _Tp, typename _Up>
  512. __two __test_atomic_assignable(...);
  513. template <typename _Tp, typename _Td>
  514. struct __can_assign {
  515. static const bool value =
  516. sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
  517. };
  518. static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
  519. // Avoid switch statement to make this a constexpr.
  520. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  521. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  522. (__order == memory_order_release ? __ATOMIC_RELEASE:
  523. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  524. (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
  525. __ATOMIC_CONSUME))));
  526. }
  527. static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
  528. // Avoid switch statement to make this a constexpr.
  529. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  530. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  531. (__order == memory_order_release ? __ATOMIC_RELAXED:
  532. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  533. (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
  534. __ATOMIC_CONSUME))));
  535. }
  536. } // namespace __gcc_atomic
  537. template <typename _Tp>
  538. static inline
  539. typename enable_if<
  540. __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
  541. __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
  542. __a->__a_value = __val;
  543. }
  544. template <typename _Tp>
  545. static inline
  546. typename enable_if<
  547. !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
  548. __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
  549. __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
  550. // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
  551. // the default operator= in an object is not volatile, a byte-by-byte copy
  552. // is required.
  553. volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
  554. volatile char* end = to + sizeof(_Tp);
  555. char* from = reinterpret_cast<char*>(&__val);
  556. while (to != end) {
  557. *to++ = *from++;
  558. }
  559. }
  560. template <typename _Tp>
  561. static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
  562. __a->__a_value = __val;
  563. }
  564. static inline void __c11_atomic_thread_fence(memory_order __order) {
  565. __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
  566. }
  567. static inline void __c11_atomic_signal_fence(memory_order __order) {
  568. __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
  569. }
  570. template <typename _Tp>
  571. static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
  572. memory_order __order) {
  573. return __atomic_store(&__a->__a_value, &__val,
  574. __gcc_atomic::__to_gcc_order(__order));
  575. }
  576. template <typename _Tp>
  577. static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
  578. memory_order __order) {
  579. __atomic_store(&__a->__a_value, &__val,
  580. __gcc_atomic::__to_gcc_order(__order));
  581. }
  582. template <typename _Tp>
  583. static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
  584. memory_order __order) {
  585. _Tp __ret;
  586. __atomic_load(&__a->__a_value, &__ret,
  587. __gcc_atomic::__to_gcc_order(__order));
  588. return __ret;
  589. }
  590. template <typename _Tp>
  591. static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
  592. _Tp __ret;
  593. __atomic_load(&__a->__a_value, &__ret,
  594. __gcc_atomic::__to_gcc_order(__order));
  595. return __ret;
  596. }
  597. template <typename _Tp>
  598. static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
  599. _Tp __value, memory_order __order) {
  600. _Tp __ret;
  601. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  602. __gcc_atomic::__to_gcc_order(__order));
  603. return __ret;
  604. }
  605. template <typename _Tp>
  606. static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
  607. memory_order __order) {
  608. _Tp __ret;
  609. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  610. __gcc_atomic::__to_gcc_order(__order));
  611. return __ret;
  612. }
  613. template <typename _Tp>
  614. static inline bool __c11_atomic_compare_exchange_strong(
  615. volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
  616. memory_order __success, memory_order __failure) {
  617. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  618. false,
  619. __gcc_atomic::__to_gcc_order(__success),
  620. __gcc_atomic::__to_gcc_failure_order(__failure));
  621. }
  622. template <typename _Tp>
  623. static inline bool __c11_atomic_compare_exchange_strong(
  624. _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
  625. memory_order __failure) {
  626. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  627. false,
  628. __gcc_atomic::__to_gcc_order(__success),
  629. __gcc_atomic::__to_gcc_failure_order(__failure));
  630. }
  631. template <typename _Tp>
  632. static inline bool __c11_atomic_compare_exchange_weak(
  633. volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
  634. memory_order __success, memory_order __failure) {
  635. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  636. true,
  637. __gcc_atomic::__to_gcc_order(__success),
  638. __gcc_atomic::__to_gcc_failure_order(__failure));
  639. }
  640. template <typename _Tp>
  641. static inline bool __c11_atomic_compare_exchange_weak(
  642. _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
  643. memory_order __failure) {
  644. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  645. true,
  646. __gcc_atomic::__to_gcc_order(__success),
  647. __gcc_atomic::__to_gcc_failure_order(__failure));
  648. }
  649. template <typename _Tp>
  650. struct __skip_amt { enum {value = 1}; };
  651. template <typename _Tp>
  652. struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
  653. // FIXME: Haven't figured out what the spec says about using arrays with
  654. // atomic_fetch_add. Force a failure rather than creating bad behavior.
  655. template <typename _Tp>
  656. struct __skip_amt<_Tp[]> { };
  657. template <typename _Tp, int n>
  658. struct __skip_amt<_Tp[n]> { };
  659. template <typename _Tp, typename _Td>
  660. static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
  661. _Td __delta, memory_order __order) {
  662. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  663. __gcc_atomic::__to_gcc_order(__order));
  664. }
  665. template <typename _Tp, typename _Td>
  666. static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
  667. memory_order __order) {
  668. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  669. __gcc_atomic::__to_gcc_order(__order));
  670. }
  671. template <typename _Tp, typename _Td>
  672. static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
  673. _Td __delta, memory_order __order) {
  674. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  675. __gcc_atomic::__to_gcc_order(__order));
  676. }
  677. template <typename _Tp, typename _Td>
  678. static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
  679. memory_order __order) {
  680. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  681. __gcc_atomic::__to_gcc_order(__order));
  682. }
  683. template <typename _Tp>
  684. static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
  685. _Tp __pattern, memory_order __order) {
  686. return __atomic_fetch_and(&__a->__a_value, __pattern,
  687. __gcc_atomic::__to_gcc_order(__order));
  688. }
  689. template <typename _Tp>
  690. static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
  691. _Tp __pattern, memory_order __order) {
  692. return __atomic_fetch_and(&__a->__a_value, __pattern,
  693. __gcc_atomic::__to_gcc_order(__order));
  694. }
  695. template <typename _Tp>
  696. static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
  697. _Tp __pattern, memory_order __order) {
  698. return __atomic_fetch_or(&__a->__a_value, __pattern,
  699. __gcc_atomic::__to_gcc_order(__order));
  700. }
  701. template <typename _Tp>
  702. static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
  703. memory_order __order) {
  704. return __atomic_fetch_or(&__a->__a_value, __pattern,
  705. __gcc_atomic::__to_gcc_order(__order));
  706. }
  707. template <typename _Tp>
  708. static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
  709. _Tp __pattern, memory_order __order) {
  710. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  711. __gcc_atomic::__to_gcc_order(__order));
  712. }
  713. template <typename _Tp>
  714. static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
  715. memory_order __order) {
  716. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  717. __gcc_atomic::__to_gcc_order(__order));
  718. }
  719. #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
  720. template <class _Tp>
  721. inline _LIBCPP_INLINE_VISIBILITY
  722. _Tp
  723. kill_dependency(_Tp __y) _NOEXCEPT
  724. {
  725. return __y;
  726. }
  727. #if defined(_LIBCPP_SGX_CONFIG)
  728. #define ATOMIC_XXX_LOCK_FREE 0
  729. #define ATOMIC_BOOL_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  730. #define ATOMIC_CHAR_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  731. #define ATOMIC_CHAR16_T_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  732. #define ATOMIC_CHAR32_T_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  733. #define ATOMIC_WCHAR_T_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  734. #define ATOMIC_SHORT_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  735. #define ATOMIC_INT_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  736. #define ATOMIC_LONG_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  737. #define ATOMIC_LLONG_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  738. #define ATOMIC_POINTER_LOCK_FREE ATOMIC_XXX_LOCK_FREE
  739. #else // defined(_LIBCPP_SGX_CONFIG)
  740. #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  741. #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  742. #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  743. #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  744. #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  745. #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  746. #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  747. #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  748. #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  749. #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  750. #endif // defined(_LIBCPP_SGX_CONFIG)
  751. template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
  752. struct __atomic_base // false
  753. {
  754. mutable _Atomic(_Tp) __a_;
  755. #if defined(__cpp_lib_atomic_is_always_lock_free)
  756. static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
  757. #endif
  758. _LIBCPP_INLINE_VISIBILITY
  759. bool is_lock_free() const volatile _NOEXCEPT
  760. {
  761. #if defined(_LIBCPP_SGX_CONFIG)
  762. return false;
  763. #else
  764. #if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
  765. return __c11_atomic_is_lock_free(sizeof(_Tp));
  766. #else
  767. return __atomic_is_lock_free(sizeof(_Tp), 0);
  768. #endif
  769. #endif
  770. }
  771. _LIBCPP_INLINE_VISIBILITY
  772. bool is_lock_free() const _NOEXCEPT
  773. {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
  774. _LIBCPP_INLINE_VISIBILITY
  775. void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  776. {__c11_atomic_store(&__a_, __d, __m);}
  777. _LIBCPP_INLINE_VISIBILITY
  778. void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  779. {__c11_atomic_store(&__a_, __d, __m);}
  780. _LIBCPP_INLINE_VISIBILITY
  781. _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  782. {return __c11_atomic_load(&__a_, __m);}
  783. _LIBCPP_INLINE_VISIBILITY
  784. _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  785. {return __c11_atomic_load(&__a_, __m);}
  786. _LIBCPP_INLINE_VISIBILITY
  787. operator _Tp() const volatile _NOEXCEPT {return load();}
  788. _LIBCPP_INLINE_VISIBILITY
  789. operator _Tp() const _NOEXCEPT {return load();}
  790. _LIBCPP_INLINE_VISIBILITY
  791. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  792. {return __c11_atomic_exchange(&__a_, __d, __m);}
  793. _LIBCPP_INLINE_VISIBILITY
  794. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  795. {return __c11_atomic_exchange(&__a_, __d, __m);}
  796. _LIBCPP_INLINE_VISIBILITY
  797. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  798. memory_order __s, memory_order __f) volatile _NOEXCEPT
  799. {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  800. _LIBCPP_INLINE_VISIBILITY
  801. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  802. memory_order __s, memory_order __f) _NOEXCEPT
  803. {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  804. _LIBCPP_INLINE_VISIBILITY
  805. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  806. memory_order __s, memory_order __f) volatile _NOEXCEPT
  807. {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  808. _LIBCPP_INLINE_VISIBILITY
  809. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  810. memory_order __s, memory_order __f) _NOEXCEPT
  811. {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  812. _LIBCPP_INLINE_VISIBILITY
  813. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  814. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  815. {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  816. _LIBCPP_INLINE_VISIBILITY
  817. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  818. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  819. {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  820. _LIBCPP_INLINE_VISIBILITY
  821. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  822. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  823. {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  824. _LIBCPP_INLINE_VISIBILITY
  825. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  826. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  827. {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  828. _LIBCPP_INLINE_VISIBILITY
  829. #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  830. __atomic_base() _NOEXCEPT = default;
  831. #else
  832. __atomic_base() _NOEXCEPT : __a_() {}
  833. #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  834. _LIBCPP_INLINE_VISIBILITY
  835. _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
  836. #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  837. __atomic_base(const __atomic_base&) = delete;
  838. __atomic_base& operator=(const __atomic_base&) = delete;
  839. __atomic_base& operator=(const __atomic_base&) volatile = delete;
  840. #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  841. private:
  842. __atomic_base(const __atomic_base&);
  843. __atomic_base& operator=(const __atomic_base&);
  844. __atomic_base& operator=(const __atomic_base&) volatile;
  845. #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  846. };
  847. #if defined(__cpp_lib_atomic_is_always_lock_free)
  848. template <class _Tp, bool __b>
  849. _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
  850. #endif
  851. // atomic<Integral>
  852. template <class _Tp>
  853. struct __atomic_base<_Tp, true>
  854. : public __atomic_base<_Tp, false>
  855. {
  856. typedef __atomic_base<_Tp, false> __base;
  857. _LIBCPP_INLINE_VISIBILITY
  858. __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
  859. _LIBCPP_INLINE_VISIBILITY
  860. _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
  861. _LIBCPP_INLINE_VISIBILITY
  862. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  863. {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
  864. _LIBCPP_INLINE_VISIBILITY
  865. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  866. {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
  867. _LIBCPP_INLINE_VISIBILITY
  868. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  869. {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
  870. _LIBCPP_INLINE_VISIBILITY
  871. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  872. {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
  873. _LIBCPP_INLINE_VISIBILITY
  874. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  875. {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
  876. _LIBCPP_INLINE_VISIBILITY
  877. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  878. {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
  879. _LIBCPP_INLINE_VISIBILITY
  880. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  881. {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
  882. _LIBCPP_INLINE_VISIBILITY
  883. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  884. {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
  885. _LIBCPP_INLINE_VISIBILITY
  886. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  887. {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
  888. _LIBCPP_INLINE_VISIBILITY
  889. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  890. {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
  891. _LIBCPP_INLINE_VISIBILITY
  892. _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
  893. _LIBCPP_INLINE_VISIBILITY
  894. _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
  895. _LIBCPP_INLINE_VISIBILITY
  896. _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
  897. _LIBCPP_INLINE_VISIBILITY
  898. _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
  899. _LIBCPP_INLINE_VISIBILITY
  900. _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  901. _LIBCPP_INLINE_VISIBILITY
  902. _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  903. _LIBCPP_INLINE_VISIBILITY
  904. _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  905. _LIBCPP_INLINE_VISIBILITY
  906. _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  907. _LIBCPP_INLINE_VISIBILITY
  908. _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  909. _LIBCPP_INLINE_VISIBILITY
  910. _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  911. _LIBCPP_INLINE_VISIBILITY
  912. _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  913. _LIBCPP_INLINE_VISIBILITY
  914. _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  915. _LIBCPP_INLINE_VISIBILITY
  916. _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
  917. _LIBCPP_INLINE_VISIBILITY
  918. _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
  919. _LIBCPP_INLINE_VISIBILITY
  920. _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
  921. _LIBCPP_INLINE_VISIBILITY
  922. _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
  923. _LIBCPP_INLINE_VISIBILITY
  924. _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  925. _LIBCPP_INLINE_VISIBILITY
  926. _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  927. };
  928. // atomic<T>
  929. template <class _Tp>
  930. struct atomic
  931. : public __atomic_base<_Tp>
  932. {
  933. typedef __atomic_base<_Tp> __base;
  934. _LIBCPP_INLINE_VISIBILITY
  935. atomic() _NOEXCEPT _LIBCPP_DEFAULT
  936. _LIBCPP_INLINE_VISIBILITY
  937. _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
  938. _LIBCPP_INLINE_VISIBILITY
  939. _Tp operator=(_Tp __d) volatile _NOEXCEPT
  940. {__base::store(__d); return __d;}
  941. _LIBCPP_INLINE_VISIBILITY
  942. _Tp operator=(_Tp __d) _NOEXCEPT
  943. {__base::store(__d); return __d;}
  944. };
  945. // atomic<T*>
  946. template <class _Tp>
  947. struct atomic<_Tp*>
  948. : public __atomic_base<_Tp*>
  949. {
  950. typedef __atomic_base<_Tp*> __base;
  951. _LIBCPP_INLINE_VISIBILITY
  952. atomic() _NOEXCEPT _LIBCPP_DEFAULT
  953. _LIBCPP_INLINE_VISIBILITY
  954. _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
  955. _LIBCPP_INLINE_VISIBILITY
  956. _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
  957. {__base::store(__d); return __d;}
  958. _LIBCPP_INLINE_VISIBILITY
  959. _Tp* operator=(_Tp* __d) _NOEXCEPT
  960. {__base::store(__d); return __d;}
  961. _LIBCPP_INLINE_VISIBILITY
  962. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
  963. volatile _NOEXCEPT
  964. {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
  965. _LIBCPP_INLINE_VISIBILITY
  966. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  967. {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
  968. _LIBCPP_INLINE_VISIBILITY
  969. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
  970. volatile _NOEXCEPT
  971. {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
  972. _LIBCPP_INLINE_VISIBILITY
  973. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  974. {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
  975. _LIBCPP_INLINE_VISIBILITY
  976. _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
  977. _LIBCPP_INLINE_VISIBILITY
  978. _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
  979. _LIBCPP_INLINE_VISIBILITY
  980. _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
  981. _LIBCPP_INLINE_VISIBILITY
  982. _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
  983. _LIBCPP_INLINE_VISIBILITY
  984. _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
  985. _LIBCPP_INLINE_VISIBILITY
  986. _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
  987. _LIBCPP_INLINE_VISIBILITY
  988. _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
  989. _LIBCPP_INLINE_VISIBILITY
  990. _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
  991. _LIBCPP_INLINE_VISIBILITY
  992. _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  993. _LIBCPP_INLINE_VISIBILITY
  994. _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  995. _LIBCPP_INLINE_VISIBILITY
  996. _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  997. _LIBCPP_INLINE_VISIBILITY
  998. _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  999. };
  1000. // atomic_is_lock_free
  1001. template <class _Tp>
  1002. inline _LIBCPP_INLINE_VISIBILITY
  1003. bool
  1004. atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1005. {
  1006. return __o->is_lock_free();
  1007. }
  1008. template <class _Tp>
  1009. inline _LIBCPP_INLINE_VISIBILITY
  1010. bool
  1011. atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
  1012. {
  1013. return __o->is_lock_free();
  1014. }
  1015. // atomic_init
  1016. template <class _Tp>
  1017. inline _LIBCPP_INLINE_VISIBILITY
  1018. void
  1019. atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1020. {
  1021. __c11_atomic_init(&__o->__a_, __d);
  1022. }
  1023. template <class _Tp>
  1024. inline _LIBCPP_INLINE_VISIBILITY
  1025. void
  1026. atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1027. {
  1028. __c11_atomic_init(&__o->__a_, __d);
  1029. }
  1030. // atomic_store
  1031. template <class _Tp>
  1032. inline _LIBCPP_INLINE_VISIBILITY
  1033. void
  1034. atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1035. {
  1036. __o->store(__d);
  1037. }
  1038. template <class _Tp>
  1039. inline _LIBCPP_INLINE_VISIBILITY
  1040. void
  1041. atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1042. {
  1043. __o->store(__d);
  1044. }
  1045. // atomic_store_explicit
  1046. template <class _Tp>
  1047. inline _LIBCPP_INLINE_VISIBILITY
  1048. void
  1049. atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1050. {
  1051. __o->store(__d, __m);
  1052. }
  1053. template <class _Tp>
  1054. inline _LIBCPP_INLINE_VISIBILITY
  1055. void
  1056. atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1057. {
  1058. __o->store(__d, __m);
  1059. }
  1060. // atomic_load
  1061. template <class _Tp>
  1062. inline _LIBCPP_INLINE_VISIBILITY
  1063. _Tp
  1064. atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1065. {
  1066. return __o->load();
  1067. }
  1068. template <class _Tp>
  1069. inline _LIBCPP_INLINE_VISIBILITY
  1070. _Tp
  1071. atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
  1072. {
  1073. return __o->load();
  1074. }
  1075. // atomic_load_explicit
  1076. template <class _Tp>
  1077. inline _LIBCPP_INLINE_VISIBILITY
  1078. _Tp
  1079. atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1080. {
  1081. return __o->load(__m);
  1082. }
  1083. template <class _Tp>
  1084. inline _LIBCPP_INLINE_VISIBILITY
  1085. _Tp
  1086. atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1087. {
  1088. return __o->load(__m);
  1089. }
  1090. // atomic_exchange
  1091. template <class _Tp>
  1092. inline _LIBCPP_INLINE_VISIBILITY
  1093. _Tp
  1094. atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1095. {
  1096. return __o->exchange(__d);
  1097. }
  1098. template <class _Tp>
  1099. inline _LIBCPP_INLINE_VISIBILITY
  1100. _Tp
  1101. atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1102. {
  1103. return __o->exchange(__d);
  1104. }
  1105. // atomic_exchange_explicit
  1106. template <class _Tp>
  1107. inline _LIBCPP_INLINE_VISIBILITY
  1108. _Tp
  1109. atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1110. {
  1111. return __o->exchange(__d, __m);
  1112. }
  1113. template <class _Tp>
  1114. inline _LIBCPP_INLINE_VISIBILITY
  1115. _Tp
  1116. atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1117. {
  1118. return __o->exchange(__d, __m);
  1119. }
  1120. // atomic_compare_exchange_weak
  1121. template <class _Tp>
  1122. inline _LIBCPP_INLINE_VISIBILITY
  1123. bool
  1124. atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1125. {
  1126. return __o->compare_exchange_weak(*__e, __d);
  1127. }
  1128. template <class _Tp>
  1129. inline _LIBCPP_INLINE_VISIBILITY
  1130. bool
  1131. atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1132. {
  1133. return __o->compare_exchange_weak(*__e, __d);
  1134. }
  1135. // atomic_compare_exchange_strong
  1136. template <class _Tp>
  1137. inline _LIBCPP_INLINE_VISIBILITY
  1138. bool
  1139. atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1140. {
  1141. return __o->compare_exchange_strong(*__e, __d);
  1142. }
  1143. template <class _Tp>
  1144. inline _LIBCPP_INLINE_VISIBILITY
  1145. bool
  1146. atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1147. {
  1148. return __o->compare_exchange_strong(*__e, __d);
  1149. }
  1150. // atomic_compare_exchange_weak_explicit
  1151. template <class _Tp>
  1152. inline _LIBCPP_INLINE_VISIBILITY
  1153. bool
  1154. atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
  1155. _Tp __d,
  1156. memory_order __s, memory_order __f) _NOEXCEPT
  1157. {
  1158. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1159. }
  1160. template <class _Tp>
  1161. inline _LIBCPP_INLINE_VISIBILITY
  1162. bool
  1163. atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
  1164. memory_order __s, memory_order __f) _NOEXCEPT
  1165. {
  1166. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1167. }
  1168. // atomic_compare_exchange_strong_explicit
  1169. template <class _Tp>
  1170. inline _LIBCPP_INLINE_VISIBILITY
  1171. bool
  1172. atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
  1173. _Tp* __e, _Tp __d,
  1174. memory_order __s, memory_order __f) _NOEXCEPT
  1175. {
  1176. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1177. }
  1178. template <class _Tp>
  1179. inline _LIBCPP_INLINE_VISIBILITY
  1180. bool
  1181. atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
  1182. _Tp __d,
  1183. memory_order __s, memory_order __f) _NOEXCEPT
  1184. {
  1185. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1186. }
  1187. // atomic_fetch_add
  1188. template <class _Tp>
  1189. inline _LIBCPP_INLINE_VISIBILITY
  1190. typename enable_if
  1191. <
  1192. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1193. _Tp
  1194. >::type
  1195. atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1196. {
  1197. return __o->fetch_add(__op);
  1198. }
  1199. template <class _Tp>
  1200. inline _LIBCPP_INLINE_VISIBILITY
  1201. typename enable_if
  1202. <
  1203. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1204. _Tp
  1205. >::type
  1206. atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1207. {
  1208. return __o->fetch_add(__op);
  1209. }
  1210. template <class _Tp>
  1211. inline _LIBCPP_INLINE_VISIBILITY
  1212. _Tp*
  1213. atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1214. {
  1215. return __o->fetch_add(__op);
  1216. }
  1217. template <class _Tp>
  1218. inline _LIBCPP_INLINE_VISIBILITY
  1219. _Tp*
  1220. atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1221. {
  1222. return __o->fetch_add(__op);
  1223. }
  1224. // atomic_fetch_add_explicit
  1225. template <class _Tp>
  1226. inline _LIBCPP_INLINE_VISIBILITY
  1227. typename enable_if
  1228. <
  1229. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1230. _Tp
  1231. >::type
  1232. atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1233. {
  1234. return __o->fetch_add(__op, __m);
  1235. }
  1236. template <class _Tp>
  1237. inline _LIBCPP_INLINE_VISIBILITY
  1238. typename enable_if
  1239. <
  1240. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1241. _Tp
  1242. >::type
  1243. atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1244. {
  1245. return __o->fetch_add(__op, __m);
  1246. }
  1247. template <class _Tp>
  1248. inline _LIBCPP_INLINE_VISIBILITY
  1249. _Tp*
  1250. atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
  1251. memory_order __m) _NOEXCEPT
  1252. {
  1253. return __o->fetch_add(__op, __m);
  1254. }
  1255. template <class _Tp>
  1256. inline _LIBCPP_INLINE_VISIBILITY
  1257. _Tp*
  1258. atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
  1259. {
  1260. return __o->fetch_add(__op, __m);
  1261. }
  1262. // atomic_fetch_sub
  1263. template <class _Tp>
  1264. inline _LIBCPP_INLINE_VISIBILITY
  1265. typename enable_if
  1266. <
  1267. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1268. _Tp
  1269. >::type
  1270. atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1271. {
  1272. return __o->fetch_sub(__op);
  1273. }
  1274. template <class _Tp>
  1275. inline _LIBCPP_INLINE_VISIBILITY
  1276. typename enable_if
  1277. <
  1278. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1279. _Tp
  1280. >::type
  1281. atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1282. {
  1283. return __o->fetch_sub(__op);
  1284. }
  1285. template <class _Tp>
  1286. inline _LIBCPP_INLINE_VISIBILITY
  1287. _Tp*
  1288. atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1289. {
  1290. return __o->fetch_sub(__op);
  1291. }
  1292. template <class _Tp>
  1293. inline _LIBCPP_INLINE_VISIBILITY
  1294. _Tp*
  1295. atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1296. {
  1297. return __o->fetch_sub(__op);
  1298. }
  1299. // atomic_fetch_sub_explicit
  1300. template <class _Tp>
  1301. inline _LIBCPP_INLINE_VISIBILITY
  1302. typename enable_if
  1303. <
  1304. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1305. _Tp
  1306. >::type
  1307. atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1308. {
  1309. return __o->fetch_sub(__op, __m);
  1310. }
  1311. template <class _Tp>
  1312. inline _LIBCPP_INLINE_VISIBILITY
  1313. typename enable_if
  1314. <
  1315. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1316. _Tp
  1317. >::type
  1318. atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1319. {
  1320. return __o->fetch_sub(__op, __m);
  1321. }
  1322. template <class _Tp>
  1323. inline _LIBCPP_INLINE_VISIBILITY
  1324. _Tp*
  1325. atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
  1326. memory_order __m) _NOEXCEPT
  1327. {
  1328. return __o->fetch_sub(__op, __m);
  1329. }
  1330. template <class _Tp>
  1331. inline _LIBCPP_INLINE_VISIBILITY
  1332. _Tp*
  1333. atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
  1334. {
  1335. return __o->fetch_sub(__op, __m);
  1336. }
  1337. // atomic_fetch_and
  1338. template <class _Tp>
  1339. inline _LIBCPP_INLINE_VISIBILITY
  1340. typename enable_if
  1341. <
  1342. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1343. _Tp
  1344. >::type
  1345. atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1346. {
  1347. return __o->fetch_and(__op);
  1348. }
  1349. template <class _Tp>
  1350. inline _LIBCPP_INLINE_VISIBILITY
  1351. typename enable_if
  1352. <
  1353. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1354. _Tp
  1355. >::type
  1356. atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1357. {
  1358. return __o->fetch_and(__op);
  1359. }
  1360. // atomic_fetch_and_explicit
  1361. template <class _Tp>
  1362. inline _LIBCPP_INLINE_VISIBILITY
  1363. typename enable_if
  1364. <
  1365. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1366. _Tp
  1367. >::type
  1368. atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1369. {
  1370. return __o->fetch_and(__op, __m);
  1371. }
  1372. template <class _Tp>
  1373. inline _LIBCPP_INLINE_VISIBILITY
  1374. typename enable_if
  1375. <
  1376. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1377. _Tp
  1378. >::type
  1379. atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1380. {
  1381. return __o->fetch_and(__op, __m);
  1382. }
  1383. // atomic_fetch_or
  1384. template <class _Tp>
  1385. inline _LIBCPP_INLINE_VISIBILITY
  1386. typename enable_if
  1387. <
  1388. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1389. _Tp
  1390. >::type
  1391. atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1392. {
  1393. return __o->fetch_or(__op);
  1394. }
  1395. template <class _Tp>
  1396. inline _LIBCPP_INLINE_VISIBILITY
  1397. typename enable_if
  1398. <
  1399. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1400. _Tp
  1401. >::type
  1402. atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1403. {
  1404. return __o->fetch_or(__op);
  1405. }
  1406. // atomic_fetch_or_explicit
  1407. template <class _Tp>
  1408. inline _LIBCPP_INLINE_VISIBILITY
  1409. typename enable_if
  1410. <
  1411. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1412. _Tp
  1413. >::type
  1414. atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1415. {
  1416. return __o->fetch_or(__op, __m);
  1417. }
  1418. template <class _Tp>
  1419. inline _LIBCPP_INLINE_VISIBILITY
  1420. typename enable_if
  1421. <
  1422. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1423. _Tp
  1424. >::type
  1425. atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1426. {
  1427. return __o->fetch_or(__op, __m);
  1428. }
  1429. // atomic_fetch_xor
  1430. template <class _Tp>
  1431. inline _LIBCPP_INLINE_VISIBILITY
  1432. typename enable_if
  1433. <
  1434. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1435. _Tp
  1436. >::type
  1437. atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1438. {
  1439. return __o->fetch_xor(__op);
  1440. }
  1441. template <class _Tp>
  1442. inline _LIBCPP_INLINE_VISIBILITY
  1443. typename enable_if
  1444. <
  1445. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1446. _Tp
  1447. >::type
  1448. atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1449. {
  1450. return __o->fetch_xor(__op);
  1451. }
  1452. // atomic_fetch_xor_explicit
  1453. template <class _Tp>
  1454. inline _LIBCPP_INLINE_VISIBILITY
  1455. typename enable_if
  1456. <
  1457. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1458. _Tp
  1459. >::type
  1460. atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1461. {
  1462. return __o->fetch_xor(__op, __m);
  1463. }
  1464. template <class _Tp>
  1465. inline _LIBCPP_INLINE_VISIBILITY
  1466. typename enable_if
  1467. <
  1468. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1469. _Tp
  1470. >::type
  1471. atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1472. {
  1473. return __o->fetch_xor(__op, __m);
  1474. }
  1475. // flag type and operations
  1476. typedef struct atomic_flag
  1477. {
  1478. _Atomic(bool) __a_;
  1479. _LIBCPP_INLINE_VISIBILITY
  1480. bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1481. {return __c11_atomic_exchange(&__a_, true, __m);}
  1482. _LIBCPP_INLINE_VISIBILITY
  1483. bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1484. {return __c11_atomic_exchange(&__a_, true, __m);}
  1485. _LIBCPP_INLINE_VISIBILITY
  1486. void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1487. {__c11_atomic_store(&__a_, false, __m);}
  1488. _LIBCPP_INLINE_VISIBILITY
  1489. void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1490. {__c11_atomic_store(&__a_, false, __m);}
  1491. _LIBCPP_INLINE_VISIBILITY
  1492. #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  1493. atomic_flag() _NOEXCEPT = default;
  1494. #else
  1495. atomic_flag() _NOEXCEPT : __a_() {}
  1496. #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
  1497. _LIBCPP_INLINE_VISIBILITY
  1498. atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
  1499. #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  1500. atomic_flag(const atomic_flag&) = delete;
  1501. atomic_flag& operator=(const atomic_flag&) = delete;
  1502. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  1503. #else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  1504. private:
  1505. atomic_flag(const atomic_flag&);
  1506. atomic_flag& operator=(const atomic_flag&);
  1507. atomic_flag& operator=(const atomic_flag&) volatile;
  1508. #endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
  1509. } atomic_flag;
  1510. inline _LIBCPP_INLINE_VISIBILITY
  1511. bool
  1512. atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
  1513. {
  1514. return __o->test_and_set();
  1515. }
  1516. inline _LIBCPP_INLINE_VISIBILITY
  1517. bool
  1518. atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
  1519. {
  1520. return __o->test_and_set();
  1521. }
  1522. inline _LIBCPP_INLINE_VISIBILITY
  1523. bool
  1524. atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  1525. {
  1526. return __o->test_and_set(__m);
  1527. }
  1528. inline _LIBCPP_INLINE_VISIBILITY
  1529. bool
  1530. atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  1531. {
  1532. return __o->test_and_set(__m);
  1533. }
  1534. inline _LIBCPP_INLINE_VISIBILITY
  1535. void
  1536. atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
  1537. {
  1538. __o->clear();
  1539. }
  1540. inline _LIBCPP_INLINE_VISIBILITY
  1541. void
  1542. atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
  1543. {
  1544. __o->clear();
  1545. }
  1546. inline _LIBCPP_INLINE_VISIBILITY
  1547. void
  1548. atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  1549. {
  1550. __o->clear(__m);
  1551. }
  1552. inline _LIBCPP_INLINE_VISIBILITY
  1553. void
  1554. atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  1555. {
  1556. __o->clear(__m);
  1557. }
  1558. // fences
  1559. inline _LIBCPP_INLINE_VISIBILITY
  1560. void
  1561. atomic_thread_fence(memory_order __m) _NOEXCEPT
  1562. {
  1563. __c11_atomic_thread_fence(__m);
  1564. }
  1565. inline _LIBCPP_INLINE_VISIBILITY
  1566. void
  1567. atomic_signal_fence(memory_order __m) _NOEXCEPT
  1568. {
  1569. __c11_atomic_signal_fence(__m);
  1570. }
  1571. // Atomics for standard typedef types
  1572. typedef atomic<bool> atomic_bool;
  1573. typedef atomic<char> atomic_char;
  1574. typedef atomic<signed char> atomic_schar;
  1575. typedef atomic<unsigned char> atomic_uchar;
  1576. typedef atomic<short> atomic_short;
  1577. typedef atomic<unsigned short> atomic_ushort;
  1578. typedef atomic<int> atomic_int;
  1579. typedef atomic<unsigned int> atomic_uint;
  1580. typedef atomic<long> atomic_long;
  1581. typedef atomic<unsigned long> atomic_ulong;
  1582. typedef atomic<long long> atomic_llong;
  1583. typedef atomic<unsigned long long> atomic_ullong;
  1584. typedef atomic<char16_t> atomic_char16_t;
  1585. typedef atomic<char32_t> atomic_char32_t;
  1586. typedef atomic<wchar_t> atomic_wchar_t;
  1587. typedef atomic<int_least8_t> atomic_int_least8_t;
  1588. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  1589. typedef atomic<int_least16_t> atomic_int_least16_t;
  1590. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  1591. typedef atomic<int_least32_t> atomic_int_least32_t;
  1592. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  1593. typedef atomic<int_least64_t> atomic_int_least64_t;
  1594. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  1595. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  1596. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  1597. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  1598. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  1599. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  1600. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  1601. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  1602. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  1603. typedef atomic< int8_t> atomic_int8_t;
  1604. typedef atomic<uint8_t> atomic_uint8_t;
  1605. typedef atomic< int16_t> atomic_int16_t;
  1606. typedef atomic<uint16_t> atomic_uint16_t;
  1607. typedef atomic< int32_t> atomic_int32_t;
  1608. typedef atomic<uint32_t> atomic_uint32_t;
  1609. typedef atomic< int64_t> atomic_int64_t;
  1610. typedef atomic<uint64_t> atomic_uint64_t;
  1611. typedef atomic<intptr_t> atomic_intptr_t;
  1612. typedef atomic<uintptr_t> atomic_uintptr_t;
  1613. typedef atomic<size_t> atomic_size_t;
  1614. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  1615. typedef atomic<intmax_t> atomic_intmax_t;
  1616. typedef atomic<uintmax_t> atomic_uintmax_t;
  1617. #define ATOMIC_FLAG_INIT {false}
  1618. #define ATOMIC_VAR_INIT(__v) {__v}
  1619. _LIBCPP_END_NAMESPACE_STD
  1620. #endif // _LIBCPP_ATOMIC