extra_ops_gcc_aarch64.hpp 46 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2020 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_aarch64.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for AArch64.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_traits.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_aarch64_common.hpp>
  24. #include <boost/atomic/detail/capabilities.hpp>
  25. #include <boost/atomic/detail/header.hpp>
  26. #ifdef BOOST_HAS_PRAGMA_ONCE
  27. #pragma once
  28. #endif
  29. namespace boost {
  30. namespace atomics {
  31. namespace detail {
  32. template< typename Base >
  33. struct extra_operations_gcc_aarch64_common :
  34. public Base
  35. {
  36. typedef Base base_type;
  37. typedef typename base_type::storage_type storage_type;
  38. // Note: For opaque operations prefer operations returning the resulting values instead of the original values
  39. // as these operations require less registers. That is unless LSE is available, in which case
  40. // it is better to use the dedicated atomic instructions. The LSE check is done in the base_type,
  41. // where needed (e.g. for 128-bit operations there are no LSE instructions).
  42. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  43. {
  44. base_type::negate(storage, order);
  45. }
  46. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  47. {
  48. base_type::bitwise_complement(storage, order);
  49. }
  50. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  51. {
  52. base_type::add(storage, v, order);
  53. }
  54. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  55. {
  56. base_type::sub(storage, v, order);
  57. }
  58. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  59. {
  60. base_type::bitwise_and(storage, v, order);
  61. }
  62. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  63. {
  64. base_type::bitwise_or(storage, v, order);
  65. }
  66. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  67. {
  68. base_type::bitwise_xor(storage, v, order);
  69. }
  70. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  71. {
  72. return !!base_type::negate(storage, order);
  73. }
  74. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  75. {
  76. return !!base_type::add(storage, v, order);
  77. }
  78. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  79. {
  80. return !!base_type::sub(storage, v, order);
  81. }
  82. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  83. {
  84. return !!base_type::bitwise_and(storage, v, order);
  85. }
  86. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  87. {
  88. return !!base_type::bitwise_or(storage, v, order);
  89. }
  90. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  91. {
  92. return !!base_type::bitwise_xor(storage, v, order);
  93. }
  94. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  95. {
  96. return !!base_type::bitwise_complement(storage, order);
  97. }
  98. };
  99. template< typename Base, std::size_t Size, bool Signed >
  100. struct extra_operations_gcc_aarch64;
  101. template< typename Base, bool Signed >
  102. struct extra_operations_gcc_aarch64< Base, 1u, Signed > :
  103. public extra_operations_generic< Base, 1u, Signed >
  104. {
  105. typedef extra_operations_generic< Base, 1u, Signed > base_type;
  106. typedef typename base_type::storage_type storage_type;
  107. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  108. {
  109. storage_type original, result;
  110. uint32_t tmp;
  111. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  112. __asm__ __volatile__\
  113. (\
  114. "1:\n\t"\
  115. "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
  116. "neg %w[result], %w[original]\n\t"\
  117. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  118. "cbnz %w[tmp], 1b\n\t"\
  119. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  120. : \
  121. : "memory"\
  122. );
  123. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  124. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  125. return original;
  126. }
  127. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  128. {
  129. storage_type result;
  130. uint32_t tmp;
  131. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  132. __asm__ __volatile__\
  133. (\
  134. "1:\n\t"\
  135. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  136. "neg %w[result], %w[result]\n\t"\
  137. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  138. "cbnz %w[tmp], 1b\n\t"\
  139. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  140. : \
  141. : "memory"\
  142. );
  143. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  144. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  145. return result;
  146. }
  147. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  148. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  149. {
  150. storage_type result;
  151. uint32_t tmp;
  152. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  153. __asm__ __volatile__\
  154. (\
  155. "1:\n\t"\
  156. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  157. "add %w[result], %w[result], %w[value]\n\t"\
  158. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  159. "cbnz %w[tmp], 1b\n\t"\
  160. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  161. : [value] "Ir" (v)\
  162. : "memory"\
  163. );
  164. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  165. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  166. return result;
  167. }
  168. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  169. {
  170. storage_type result;
  171. uint32_t tmp;
  172. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  173. __asm__ __volatile__\
  174. (\
  175. "1:\n\t"\
  176. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  177. "sub %w[result], %w[result], %w[value]\n\t"\
  178. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  179. "cbnz %w[tmp], 1b\n\t"\
  180. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  181. : [value] "Ir" (v)\
  182. : "memory"\
  183. );
  184. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  185. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  186. return result;
  187. }
  188. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  189. {
  190. storage_type result;
  191. uint32_t tmp;
  192. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  193. __asm__ __volatile__\
  194. (\
  195. "1:\n\t"\
  196. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  197. "and %w[result], %w[result], %w[value]\n\t"\
  198. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  199. "cbnz %w[tmp], 1b\n\t"\
  200. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  201. : [value] "Kr" (v)\
  202. : "memory"\
  203. );
  204. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  205. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  206. return result;
  207. }
  208. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  209. {
  210. storage_type result;
  211. uint32_t tmp;
  212. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  213. __asm__ __volatile__\
  214. (\
  215. "1:\n\t"\
  216. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  217. "orr %w[result], %w[result], %w[value]\n\t"\
  218. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  219. "cbnz %w[tmp], 1b\n\t"\
  220. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  221. : [value] "Kr" (v)\
  222. : "memory"\
  223. );
  224. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  225. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  226. return result;
  227. }
  228. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  229. {
  230. storage_type result;
  231. uint32_t tmp;
  232. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  233. __asm__ __volatile__\
  234. (\
  235. "1:\n\t"\
  236. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  237. "eor %w[result], %w[result], %w[value]\n\t"\
  238. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  239. "cbnz %w[tmp], 1b\n\t"\
  240. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  241. : [value] "Kr" (v)\
  242. : "memory"\
  243. );
  244. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  245. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  246. return result;
  247. }
  248. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  249. {
  250. storage_type original, result;
  251. uint32_t tmp;
  252. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  253. __asm__ __volatile__\
  254. (\
  255. "1:\n\t"\
  256. "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
  257. "mvn %w[result], %w[original]\n\t"\
  258. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  259. "cbnz %w[tmp], 1b\n\t"\
  260. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  261. : \
  262. : "memory"\
  263. );
  264. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  265. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  266. return original;
  267. }
  268. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  269. {
  270. storage_type result;
  271. uint32_t tmp;
  272. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  273. __asm__ __volatile__\
  274. (\
  275. "1:\n\t"\
  276. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  277. "mvn %w[result], %w[result]\n\t"\
  278. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  279. "cbnz %w[tmp], 1b\n\t"\
  280. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  281. : \
  282. : "memory"\
  283. );
  284. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  285. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  286. return result;
  287. }
  288. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  289. };
  290. template< typename Base, bool Signed >
  291. struct extra_operations< Base, 1u, Signed, true > :
  292. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 1u, Signed > >
  293. {
  294. };
  295. template< typename Base, bool Signed >
  296. struct extra_operations_gcc_aarch64< Base, 2u, Signed > :
  297. public extra_operations_generic< Base, 2u, Signed >
  298. {
  299. typedef extra_operations_generic< Base, 2u, Signed > base_type;
  300. typedef typename base_type::storage_type storage_type;
  301. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  302. {
  303. storage_type original, result;
  304. uint32_t tmp;
  305. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  306. __asm__ __volatile__\
  307. (\
  308. "1:\n\t"\
  309. "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
  310. "neg %w[result], %w[original]\n\t"\
  311. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  312. "cbnz %w[tmp], 1b\n\t"\
  313. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  314. : \
  315. : "memory"\
  316. );
  317. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  318. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  319. return original;
  320. }
  321. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  322. {
  323. storage_type result;
  324. uint32_t tmp;
  325. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  326. __asm__ __volatile__\
  327. (\
  328. "1:\n\t"\
  329. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  330. "neg %w[result], %w[result]\n\t"\
  331. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  332. "cbnz %w[tmp], 1b\n\t"\
  333. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  334. : \
  335. : "memory"\
  336. );
  337. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  338. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  339. return result;
  340. }
  341. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  342. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  343. {
  344. storage_type result;
  345. uint32_t tmp;
  346. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  347. __asm__ __volatile__\
  348. (\
  349. "1:\n\t"\
  350. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  351. "add %w[result], %w[result], %w[value]\n\t"\
  352. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  353. "cbnz %w[tmp], 1b\n\t"\
  354. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  355. : [value] "Ir" (v)\
  356. : "memory"\
  357. );
  358. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  359. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  360. return result;
  361. }
  362. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  363. {
  364. storage_type result;
  365. uint32_t tmp;
  366. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  367. __asm__ __volatile__\
  368. (\
  369. "1:\n\t"\
  370. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  371. "sub %w[result], %w[result], %w[value]\n\t"\
  372. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  373. "cbnz %w[tmp], 1b\n\t"\
  374. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  375. : [value] "Ir" (v)\
  376. : "memory"\
  377. );
  378. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  379. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  380. return result;
  381. }
  382. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  383. {
  384. storage_type result;
  385. uint32_t tmp;
  386. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  387. __asm__ __volatile__\
  388. (\
  389. "1:\n\t"\
  390. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  391. "and %w[result], %w[result], %w[value]\n\t"\
  392. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  393. "cbnz %w[tmp], 1b\n\t"\
  394. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  395. : [value] "Kr" (v)\
  396. : "memory"\
  397. );
  398. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  399. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  400. return result;
  401. }
  402. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  403. {
  404. storage_type result;
  405. uint32_t tmp;
  406. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  407. __asm__ __volatile__\
  408. (\
  409. "1:\n\t"\
  410. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  411. "orr %w[result], %w[result], %w[value]\n\t"\
  412. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  413. "cbnz %w[tmp], 1b\n\t"\
  414. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  415. : [value] "Kr" (v)\
  416. : "memory"\
  417. );
  418. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  419. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  420. return result;
  421. }
  422. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  423. {
  424. storage_type result;
  425. uint32_t tmp;
  426. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  427. __asm__ __volatile__\
  428. (\
  429. "1:\n\t"\
  430. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  431. "eor %w[result], %w[result], %w[value]\n\t"\
  432. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  433. "cbnz %w[tmp], 1b\n\t"\
  434. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  435. : [value] "Kr" (v)\
  436. : "memory"\
  437. );
  438. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  439. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  440. return result;
  441. }
  442. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  443. {
  444. storage_type original, result;
  445. uint32_t tmp;
  446. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  447. __asm__ __volatile__\
  448. (\
  449. "1:\n\t"\
  450. "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
  451. "mvn %w[result], %w[original]\n\t"\
  452. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  453. "cbnz %w[tmp], 1b\n\t"\
  454. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  455. : \
  456. : "memory"\
  457. );
  458. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  459. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  460. return original;
  461. }
  462. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  463. {
  464. storage_type result;
  465. uint32_t tmp;
  466. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  467. __asm__ __volatile__\
  468. (\
  469. "1:\n\t"\
  470. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  471. "mvn %w[result], %w[result]\n\t"\
  472. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  473. "cbnz %w[tmp], 1b\n\t"\
  474. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  475. : \
  476. : "memory"\
  477. );
  478. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  479. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  480. return result;
  481. }
  482. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  483. };
  484. template< typename Base, bool Signed >
  485. struct extra_operations< Base, 2u, Signed, true > :
  486. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 2u, Signed > >
  487. {
  488. };
  489. template< typename Base, bool Signed >
  490. struct extra_operations_gcc_aarch64< Base, 4u, Signed > :
  491. public extra_operations_generic< Base, 4u, Signed >
  492. {
  493. typedef extra_operations_generic< Base, 4u, Signed > base_type;
  494. typedef typename base_type::storage_type storage_type;
  495. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  496. {
  497. storage_type original, result;
  498. uint32_t tmp;
  499. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  500. __asm__ __volatile__\
  501. (\
  502. "1:\n\t"\
  503. "ld" ld_mo "xr %w[original], %[storage]\n\t"\
  504. "neg %w[result], %w[original]\n\t"\
  505. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  506. "cbnz %w[tmp], 1b\n\t"\
  507. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  508. : \
  509. : "memory"\
  510. );
  511. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  512. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  513. return original;
  514. }
  515. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  516. {
  517. storage_type result;
  518. uint32_t tmp;
  519. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  520. __asm__ __volatile__\
  521. (\
  522. "1:\n\t"\
  523. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  524. "neg %w[result], %w[result]\n\t"\
  525. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  526. "cbnz %w[tmp], 1b\n\t"\
  527. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  528. : \
  529. : "memory"\
  530. );
  531. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  532. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  533. return result;
  534. }
  535. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  536. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  537. {
  538. storage_type result;
  539. uint32_t tmp;
  540. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  541. __asm__ __volatile__\
  542. (\
  543. "1:\n\t"\
  544. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  545. "add %w[result], %w[result], %w[value]\n\t"\
  546. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  547. "cbnz %w[tmp], 1b\n\t"\
  548. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  549. : [value] "Ir" (v)\
  550. : "memory"\
  551. );
  552. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  553. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  554. return result;
  555. }
  556. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  557. {
  558. storage_type result;
  559. uint32_t tmp;
  560. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  561. __asm__ __volatile__\
  562. (\
  563. "1:\n\t"\
  564. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  565. "sub %w[result], %w[result], %w[value]\n\t"\
  566. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  567. "cbnz %w[tmp], 1b\n\t"\
  568. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  569. : [value] "Ir" (v)\
  570. : "memory"\
  571. );
  572. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  573. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  574. return result;
  575. }
  576. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  577. {
  578. storage_type result;
  579. uint32_t tmp;
  580. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  581. __asm__ __volatile__\
  582. (\
  583. "1:\n\t"\
  584. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  585. "and %w[result], %w[result], %w[value]\n\t"\
  586. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  587. "cbnz %w[tmp], 1b\n\t"\
  588. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  589. : [value] "Kr" (v)\
  590. : "memory"\
  591. );
  592. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  593. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  594. return result;
  595. }
  596. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  597. {
  598. storage_type result;
  599. uint32_t tmp;
  600. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  601. __asm__ __volatile__\
  602. (\
  603. "1:\n\t"\
  604. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  605. "orr %w[result], %w[result], %w[value]\n\t"\
  606. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  607. "cbnz %w[tmp], 1b\n\t"\
  608. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  609. : [value] "Kr" (v)\
  610. : "memory"\
  611. );
  612. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  613. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  614. return result;
  615. }
  616. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  617. {
  618. storage_type result;
  619. uint32_t tmp;
  620. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  621. __asm__ __volatile__\
  622. (\
  623. "1:\n\t"\
  624. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  625. "eor %w[result], %w[result], %w[value]\n\t"\
  626. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  627. "cbnz %w[tmp], 1b\n\t"\
  628. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  629. : [value] "Kr" (v)\
  630. : "memory"\
  631. );
  632. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  633. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  634. return result;
  635. }
  636. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  637. {
  638. storage_type original, result;
  639. uint32_t tmp;
  640. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  641. __asm__ __volatile__\
  642. (\
  643. "1:\n\t"\
  644. "ld" ld_mo "xr %w[original], %[storage]\n\t"\
  645. "mvn %w[result], %w[original]\n\t"\
  646. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  647. "cbnz %w[tmp], 1b\n\t"\
  648. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  649. : \
  650. : "memory"\
  651. );
  652. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  653. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  654. return original;
  655. }
  656. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  657. {
  658. storage_type result;
  659. uint32_t tmp;
  660. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  661. __asm__ __volatile__\
  662. (\
  663. "1:\n\t"\
  664. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  665. "mvn %w[result], %w[result]\n\t"\
  666. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  667. "cbnz %w[tmp], 1b\n\t"\
  668. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  669. : \
  670. : "memory"\
  671. );
  672. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  673. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  674. return result;
  675. }
  676. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  677. };
  678. template< typename Base, bool Signed >
  679. struct extra_operations< Base, 4u, Signed, true > :
  680. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 4u, Signed > >
  681. {
  682. };
  683. template< typename Base, bool Signed >
  684. struct extra_operations_gcc_aarch64< Base, 8u, Signed > :
  685. public extra_operations_generic< Base, 8u, Signed >
  686. {
  687. typedef extra_operations_generic< Base, 8u, Signed > base_type;
  688. typedef typename base_type::storage_type storage_type;
  689. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  690. {
  691. storage_type original, result;
  692. uint32_t tmp;
  693. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  694. __asm__ __volatile__\
  695. (\
  696. "1:\n\t"\
  697. "ld" ld_mo "xr %x[original], %[storage]\n\t"\
  698. "neg %x[result], %x[original]\n\t"\
  699. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  700. "cbnz %w[tmp], 1b\n\t"\
  701. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  702. : \
  703. : "memory"\
  704. );
  705. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  706. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  707. return original;
  708. }
  709. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  710. {
  711. storage_type result;
  712. uint32_t tmp;
  713. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  714. __asm__ __volatile__\
  715. (\
  716. "1:\n\t"\
  717. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  718. "neg %x[result], %x[result]\n\t"\
  719. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  720. "cbnz %w[tmp], 1b\n\t"\
  721. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  722. : \
  723. : "memory"\
  724. );
  725. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  726. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  727. return result;
  728. }
  729. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  730. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  731. {
  732. storage_type result;
  733. uint32_t tmp;
  734. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  735. __asm__ __volatile__\
  736. (\
  737. "1:\n\t"\
  738. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  739. "add %x[result], %x[result], %x[value]\n\t"\
  740. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  741. "cbnz %w[tmp], 1b\n\t"\
  742. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  743. : [value] "Ir" (v)\
  744. : "memory"\
  745. );
  746. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  747. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  748. return result;
  749. }
  750. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  751. {
  752. storage_type result;
  753. uint32_t tmp;
  754. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  755. __asm__ __volatile__\
  756. (\
  757. "1:\n\t"\
  758. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  759. "sub %x[result], %x[result], %x[value]\n\t"\
  760. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  761. "cbnz %w[tmp], 1b\n\t"\
  762. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  763. : [value] "Ir" (v)\
  764. : "memory"\
  765. );
  766. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  767. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  768. return result;
  769. }
  770. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  771. {
  772. storage_type result;
  773. uint32_t tmp;
  774. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  775. __asm__ __volatile__\
  776. (\
  777. "1:\n\t"\
  778. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  779. "and %x[result], %x[result], %x[value]\n\t"\
  780. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  781. "cbnz %w[tmp], 1b\n\t"\
  782. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  783. : [value] "Lr" (v)\
  784. : "memory"\
  785. );
  786. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  787. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  788. return result;
  789. }
  790. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  791. {
  792. storage_type result;
  793. uint32_t tmp;
  794. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  795. __asm__ __volatile__\
  796. (\
  797. "1:\n\t"\
  798. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  799. "orr %x[result], %x[result], %x[value]\n\t"\
  800. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  801. "cbnz %w[tmp], 1b\n\t"\
  802. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  803. : [value] "Lr" (v)\
  804. : "memory"\
  805. );
  806. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  807. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  808. return result;
  809. }
  810. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  811. {
  812. storage_type result;
  813. uint32_t tmp;
  814. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  815. __asm__ __volatile__\
  816. (\
  817. "1:\n\t"\
  818. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  819. "eor %x[result], %x[result], %x[value]\n\t"\
  820. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  821. "cbnz %w[tmp], 1b\n\t"\
  822. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  823. : [value] "Lr" (v)\
  824. : "memory"\
  825. );
  826. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  827. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  828. return result;
  829. }
  830. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  831. {
  832. storage_type original, result;
  833. uint32_t tmp;
  834. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  835. __asm__ __volatile__\
  836. (\
  837. "1:\n\t"\
  838. "ld" ld_mo "xr %x[original], %[storage]\n\t"\
  839. "mvn %x[result], %x[original]\n\t"\
  840. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  841. "cbnz %w[tmp], 1b\n\t"\
  842. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  843. : \
  844. : "memory"\
  845. );
  846. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  847. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  848. return original;
  849. }
  850. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  851. {
  852. storage_type result;
  853. uint32_t tmp;
  854. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  855. __asm__ __volatile__\
  856. (\
  857. "1:\n\t"\
  858. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  859. "mvn %x[result], %x[result]\n\t"\
  860. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  861. "cbnz %w[tmp], 1b\n\t"\
  862. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  863. : \
  864. : "memory"\
  865. );
  866. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  867. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  868. return result;
  869. }
  870. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  871. };
  872. template< typename Base, bool Signed >
  873. struct extra_operations< Base, 8u, Signed, true > :
  874. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 8u, Signed > >
  875. {
  876. };
  877. template< typename Base, bool Signed >
  878. struct extra_operations_gcc_aarch64< Base, 16u, Signed > :
  879. public extra_operations_generic< Base, 16u, Signed >
  880. {
  881. typedef extra_operations_generic< Base, 16u, Signed > base_type;
  882. typedef typename base_type::storage_type storage_type;
  883. typedef typename base_type::storage_union storage_union;
  884. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  885. {
  886. storage_union original;
  887. storage_union result;
  888. uint32_t tmp;
  889. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  890. __asm__ __volatile__\
  891. (\
  892. "1:\n\t"\
  893. "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
  894. "mvn %x[result_0], %x[original_0]\n\t"\
  895. "mvn %x[result_1], %x[original_1]\n\t"\
  896. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
  897. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
  898. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  899. "cbnz %w[tmp], 1b\n\t"\
  900. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  901. [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
  902. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  903. : \
  904. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  905. );
  906. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  907. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  908. return original.as_storage;
  909. }
  910. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  911. {
  912. storage_union result;
  913. uint32_t tmp;
  914. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  915. __asm__ __volatile__\
  916. (\
  917. "1:\n\t"\
  918. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  919. "mvn %x[result_0], %x[result_0]\n\t"\
  920. "mvn %x[result_1], %x[result_1]\n\t"\
  921. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
  922. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
  923. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  924. "cbnz %w[tmp], 1b\n\t"\
  925. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  926. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  927. : \
  928. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  929. );
  930. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  931. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  932. return result.as_storage;
  933. }
  934. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  935. {
  936. storage_union result;
  937. storage_union value = { v };
  938. uint32_t tmp;
  939. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  940. __asm__ __volatile__\
  941. (\
  942. "1:\n\t"\
  943. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  944. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
  945. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
  946. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  947. "cbnz %w[tmp], 1b\n\t"\
  948. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  949. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  950. : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
  951. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  952. );
  953. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  954. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  955. return result.as_storage;
  956. }
  957. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  958. {
  959. storage_union result;
  960. storage_union value = { v };
  961. uint32_t tmp;
  962. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  963. __asm__ __volatile__\
  964. (\
  965. "1:\n\t"\
  966. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  967. "subs %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
  968. "sbc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
  969. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  970. "cbnz %w[tmp], 1b\n\t"\
  971. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  972. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  973. : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
  974. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  975. );
  976. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  977. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  978. return result.as_storage;
  979. }
  980. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  981. {
  982. storage_union result;
  983. storage_union value = { v };
  984. uint32_t tmp;
  985. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  986. __asm__ __volatile__\
  987. (\
  988. "1:\n\t"\
  989. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  990. "and %x[result_0], %x[result_0], %x[value_0]\n\t"\
  991. "and %x[result_1], %x[result_1], %x[value_1]\n\t"\
  992. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  993. "cbnz %w[tmp], 1b\n\t"\
  994. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  995. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  996. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  997. : "memory"\
  998. );
  999. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1000. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1001. return result.as_storage;
  1002. }
  1003. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1004. {
  1005. storage_union result;
  1006. storage_union value = { v };
  1007. uint32_t tmp;
  1008. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1009. __asm__ __volatile__\
  1010. (\
  1011. "1:\n\t"\
  1012. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1013. "orr %x[result_0], %x[result_0], %x[value_0]\n\t"\
  1014. "orr %x[result_1], %x[result_1], %x[value_1]\n\t"\
  1015. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1016. "cbnz %w[tmp], 1b\n\t"\
  1017. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1018. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1019. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  1020. : "memory"\
  1021. );
  1022. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1023. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1024. return result.as_storage;
  1025. }
  1026. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1027. {
  1028. storage_union result;
  1029. storage_union value = { v };
  1030. uint32_t tmp;
  1031. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1032. __asm__ __volatile__\
  1033. (\
  1034. "1:\n\t"\
  1035. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1036. "eor %x[result_0], %x[result_0], %x[value_0]\n\t"\
  1037. "eor %x[result_1], %x[result_1], %x[value_1]\n\t"\
  1038. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1039. "cbnz %w[tmp], 1b\n\t"\
  1040. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1041. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1042. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  1043. : "memory"\
  1044. );
  1045. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1046. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1047. return result.as_storage;
  1048. }
  1049. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1050. {
  1051. storage_union original;
  1052. storage_union result;
  1053. uint32_t tmp;
  1054. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1055. __asm__ __volatile__\
  1056. (\
  1057. "1:\n\t"\
  1058. "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
  1059. "mvn %x[result_0], %x[original_0]\n\t"\
  1060. "mvn %x[result_1], %x[original_1]\n\t"\
  1061. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1062. "cbnz %w[tmp], 1b\n\t"\
  1063. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1064. [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
  1065. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1066. : \
  1067. : "memory"\
  1068. );
  1069. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1070. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1071. return original.as_storage;
  1072. }
  1073. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1074. {
  1075. storage_union result;
  1076. uint32_t tmp;
  1077. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1078. __asm__ __volatile__\
  1079. (\
  1080. "1:\n\t"\
  1081. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1082. "mvn %x[result_0], %x[result_0]\n\t"\
  1083. "mvn %x[result_1], %x[result_1]\n\t"\
  1084. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1085. "cbnz %w[tmp], 1b\n\t"\
  1086. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1087. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1088. : \
  1089. : "memory"\
  1090. );
  1091. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1092. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1093. return result.as_storage;
  1094. }
  1095. };
  1096. template< typename Base, bool Signed >
  1097. struct extra_operations< Base, 16u, Signed, true > :
  1098. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 16u, Signed > >
  1099. {
  1100. };
  1101. } // namespace detail
  1102. } // namespace atomics
  1103. } // namespace boost
  1104. #include <boost/atomic/detail/footer.hpp>
  1105. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_