extra_ops_gcc_aarch32.hpp 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2020 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_aarch32.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for AArch32.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH32_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH32_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_traits.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_aarch32_common.hpp>
  24. #include <boost/atomic/detail/capabilities.hpp>
  25. #include <boost/atomic/detail/header.hpp>
  26. #ifdef BOOST_HAS_PRAGMA_ONCE
  27. #pragma once
  28. #endif
  29. namespace boost {
  30. namespace atomics {
  31. namespace detail {
  32. template< typename Base >
  33. struct extra_operations_gcc_aarch32_common :
  34. public Base
  35. {
  36. typedef Base base_type;
  37. typedef typename base_type::storage_type storage_type;
  38. // Note: For opaque operations prefer operations returning the resulting values instead of the original values
  39. // as these operations require less registers.
  40. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  41. {
  42. base_type::negate(storage, order);
  43. }
  44. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  45. {
  46. base_type::bitwise_complement(storage, order);
  47. }
  48. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  49. {
  50. base_type::add(storage, v, order);
  51. }
  52. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  53. {
  54. base_type::sub(storage, v, order);
  55. }
  56. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  57. {
  58. base_type::bitwise_and(storage, v, order);
  59. }
  60. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  61. {
  62. base_type::bitwise_or(storage, v, order);
  63. }
  64. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  65. {
  66. base_type::bitwise_xor(storage, v, order);
  67. }
  68. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  69. {
  70. return !!base_type::negate(storage, order);
  71. }
  72. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  73. {
  74. return !!base_type::add(storage, v, order);
  75. }
  76. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  77. {
  78. return !!base_type::sub(storage, v, order);
  79. }
  80. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  81. {
  82. return !!base_type::bitwise_and(storage, v, order);
  83. }
  84. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  85. {
  86. return !!base_type::bitwise_or(storage, v, order);
  87. }
  88. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  89. {
  90. return !!base_type::bitwise_xor(storage, v, order);
  91. }
  92. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  93. {
  94. return !!base_type::bitwise_complement(storage, order);
  95. }
  96. };
  97. template< typename Base, std::size_t Size, bool Signed >
  98. struct extra_operations_gcc_aarch32;
  99. template< typename Base, bool Signed >
  100. struct extra_operations_gcc_aarch32< Base, 1u, Signed > :
  101. public extra_operations_generic< Base, 1u, Signed >
  102. {
  103. typedef extra_operations_generic< Base, 1u, Signed > base_type;
  104. typedef typename base_type::storage_type storage_type;
  105. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  106. {
  107. storage_type original, result;
  108. uint32_t tmp;
  109. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  110. __asm__ __volatile__\
  111. (\
  112. "1:\n\t"\
  113. "ld" ld_mo "exb %[original], %[storage]\n\t"\
  114. "rsb %[result], %[original], #0\n\t"\
  115. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  116. "teq %[tmp], #0\n\t"\
  117. "bne 1b\n\t"\
  118. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  119. : \
  120. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  121. );
  122. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  123. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  124. return original;
  125. }
  126. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  127. {
  128. storage_type result;
  129. uint32_t tmp;
  130. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  131. __asm__ __volatile__\
  132. (\
  133. "1:\n\t"\
  134. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  135. "rsb %[result], #0\n\t"\
  136. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  137. "teq %[tmp], #0\n\t"\
  138. "bne 1b\n\t"\
  139. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  140. : \
  141. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  142. );
  143. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  144. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  145. return result;
  146. }
  147. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  148. {
  149. storage_type result;
  150. uint32_t tmp;
  151. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  152. __asm__ __volatile__\
  153. (\
  154. "1:\n\t"\
  155. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  156. "add %[result], %[value]\n\t"\
  157. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  158. "teq %[tmp], #0\n\t"\
  159. "bne 1b\n\t"\
  160. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  161. : [value] "Ir" (v)\
  162. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  163. );
  164. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  165. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  166. return result;
  167. }
  168. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  169. {
  170. storage_type result;
  171. uint32_t tmp;
  172. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  173. __asm__ __volatile__\
  174. (\
  175. "1:\n\t"\
  176. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  177. "sub %[result], %[value]\n\t"\
  178. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  179. "teq %[tmp], #0\n\t"\
  180. "bne 1b\n\t"\
  181. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  182. : [value] "Ir" (v)\
  183. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  184. );
  185. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  186. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  187. return result;
  188. }
  189. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  190. {
  191. storage_type result;
  192. uint32_t tmp;
  193. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  194. __asm__ __volatile__\
  195. (\
  196. "1:\n\t"\
  197. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  198. "and %[result], %[value]\n\t"\
  199. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  200. "teq %[tmp], #0\n\t"\
  201. "bne 1b\n\t"\
  202. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  203. : [value] "Ir" (v)\
  204. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  205. );
  206. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  207. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  208. return result;
  209. }
  210. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  211. {
  212. storage_type result;
  213. uint32_t tmp;
  214. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  215. __asm__ __volatile__\
  216. (\
  217. "1:\n\t"\
  218. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  219. "orr %[result], %[value]\n\t"\
  220. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  221. "teq %[tmp], #0\n\t"\
  222. "bne 1b\n\t"\
  223. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  224. : [value] "Ir" (v)\
  225. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  226. );
  227. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  228. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  229. return result;
  230. }
  231. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  232. {
  233. storage_type result;
  234. uint32_t tmp;
  235. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  236. __asm__ __volatile__\
  237. (\
  238. "1:\n\t"\
  239. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  240. "eor %[result], %[value]\n\t"\
  241. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  242. "teq %[tmp], #0\n\t"\
  243. "bne 1b\n\t"\
  244. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  245. : [value] "Ir" (v)\
  246. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  247. );
  248. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  249. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  250. return result;
  251. }
  252. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  253. {
  254. storage_type original, result;
  255. uint32_t tmp;
  256. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  257. __asm__ __volatile__\
  258. (\
  259. "1:\n\t"\
  260. "ld" ld_mo "exb %[original], %[storage]\n\t"\
  261. "mvn %[result], %[original]\n\t"\
  262. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  263. "teq %[tmp], #0\n\t"\
  264. "bne 1b\n\t"\
  265. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  266. : \
  267. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  268. );
  269. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  270. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  271. return original;
  272. }
  273. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  274. {
  275. storage_type result;
  276. uint32_t tmp;
  277. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  278. __asm__ __volatile__\
  279. (\
  280. "1:\n\t"\
  281. "ld" ld_mo "exb %[result], %[storage]\n\t"\
  282. "mvn %[result], %[result]\n\t"\
  283. "st" st_mo "exb %[tmp], %[result], %[storage]\n\t"\
  284. "teq %[tmp], #0\n\t"\
  285. "bne 1b\n\t"\
  286. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  287. : \
  288. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  289. );
  290. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  291. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  292. return result;
  293. }
  294. };
  295. template< typename Base, bool Signed >
  296. struct extra_operations< Base, 1u, Signed, true > :
  297. public extra_operations_gcc_aarch32_common< extra_operations_gcc_aarch32< Base, 1u, Signed > >
  298. {
  299. };
  300. template< typename Base, bool Signed >
  301. struct extra_operations_gcc_aarch32< Base, 2u, Signed > :
  302. public extra_operations_generic< Base, 2u, Signed >
  303. {
  304. typedef extra_operations_generic< Base, 2u, Signed > base_type;
  305. typedef typename base_type::storage_type storage_type;
  306. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  307. {
  308. storage_type original, result;
  309. uint32_t tmp;
  310. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  311. __asm__ __volatile__\
  312. (\
  313. "1:\n\t"\
  314. "ld" ld_mo "exh %[original], %[storage]\n\t"\
  315. "rsb %[result], %[original], #0\n\t"\
  316. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  317. "teq %[tmp], #0\n\t"\
  318. "bne 1b\n\t"\
  319. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  320. : \
  321. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  322. );
  323. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  324. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  325. return original;
  326. }
  327. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  328. {
  329. storage_type result;
  330. uint32_t tmp;
  331. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  332. __asm__ __volatile__\
  333. (\
  334. "1:\n\t"\
  335. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  336. "rsb %[result], #0\n\t"\
  337. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  338. "teq %[tmp], #0\n\t"\
  339. "bne 1b\n\t"\
  340. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  341. : \
  342. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  343. );
  344. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  345. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  346. return result;
  347. }
  348. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  349. {
  350. storage_type result;
  351. uint32_t tmp;
  352. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  353. __asm__ __volatile__\
  354. (\
  355. "1:\n\t"\
  356. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  357. "add %[result], %[value]\n\t"\
  358. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  359. "teq %[tmp], #0\n\t"\
  360. "bne 1b\n\t"\
  361. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  362. : [value] "Ir" (v)\
  363. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  364. );
  365. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  366. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  367. return result;
  368. }
  369. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  370. {
  371. storage_type result;
  372. uint32_t tmp;
  373. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  374. __asm__ __volatile__\
  375. (\
  376. "1:\n\t"\
  377. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  378. "sub %[result], %[value]\n\t"\
  379. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  380. "teq %[tmp], #0\n\t"\
  381. "bne 1b\n\t"\
  382. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  383. : [value] "Ir" (v)\
  384. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  385. );
  386. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  387. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  388. return result;
  389. }
  390. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  391. {
  392. storage_type result;
  393. uint32_t tmp;
  394. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  395. __asm__ __volatile__\
  396. (\
  397. "1:\n\t"\
  398. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  399. "and %[result], %[value]\n\t"\
  400. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  401. "teq %[tmp], #0\n\t"\
  402. "bne 1b\n\t"\
  403. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  404. : [value] "Ir" (v)\
  405. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  406. );
  407. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  408. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  409. return result;
  410. }
  411. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  412. {
  413. storage_type result;
  414. uint32_t tmp;
  415. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  416. __asm__ __volatile__\
  417. (\
  418. "1:\n\t"\
  419. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  420. "orr %[result], %[value]\n\t"\
  421. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  422. "teq %[tmp], #0\n\t"\
  423. "bne 1b\n\t"\
  424. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  425. : [value] "Ir" (v)\
  426. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  427. );
  428. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  429. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  430. return result;
  431. }
  432. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  433. {
  434. storage_type result;
  435. uint32_t tmp;
  436. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  437. __asm__ __volatile__\
  438. (\
  439. "1:\n\t"\
  440. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  441. "eor %[result], %[value]\n\t"\
  442. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  443. "teq %[tmp], #0\n\t"\
  444. "bne 1b\n\t"\
  445. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  446. : [value] "Ir" (v)\
  447. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  448. );
  449. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  450. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  451. return result;
  452. }
  453. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  454. {
  455. storage_type original, result;
  456. uint32_t tmp;
  457. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  458. __asm__ __volatile__\
  459. (\
  460. "1:\n\t"\
  461. "ld" ld_mo "exh %[original], %[storage]\n\t"\
  462. "mvn %[result], %[original]\n\t"\
  463. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  464. "teq %[tmp], #0\n\t"\
  465. "bne 1b\n\t"\
  466. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  467. : \
  468. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  469. );
  470. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  471. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  472. return original;
  473. }
  474. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  475. {
  476. storage_type result;
  477. uint32_t tmp;
  478. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  479. __asm__ __volatile__\
  480. (\
  481. "1:\n\t"\
  482. "ld" ld_mo "exh %[result], %[storage]\n\t"\
  483. "mvn %[result], %[result]\n\t"\
  484. "st" st_mo "exh %[tmp], %[result], %[storage]\n\t"\
  485. "teq %[tmp], #0\n\t"\
  486. "bne 1b\n\t"\
  487. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  488. : \
  489. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  490. );
  491. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  492. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  493. return result;
  494. }
  495. };
  496. template< typename Base, bool Signed >
  497. struct extra_operations< Base, 2u, Signed, true > :
  498. public extra_operations_gcc_aarch32_common< extra_operations_gcc_aarch32< Base, 2u, Signed > >
  499. {
  500. };
  501. template< typename Base, bool Signed >
  502. struct extra_operations_gcc_aarch32< Base, 4u, Signed > :
  503. public extra_operations_generic< Base, 4u, Signed >
  504. {
  505. typedef extra_operations_generic< Base, 4u, Signed > base_type;
  506. typedef typename base_type::storage_type storage_type;
  507. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  508. {
  509. storage_type original, result;
  510. uint32_t tmp;
  511. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  512. __asm__ __volatile__\
  513. (\
  514. "1:\n\t"\
  515. "ld" ld_mo "ex %[original], %[storage]\n\t"\
  516. "rsb %[result], %[original], #0\n\t"\
  517. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  518. "teq %[tmp], #0\n\t"\
  519. "bne 1b\n\t"\
  520. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  521. : \
  522. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  523. );
  524. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  525. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  526. return original;
  527. }
  528. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  529. {
  530. storage_type result;
  531. uint32_t tmp;
  532. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  533. __asm__ __volatile__\
  534. (\
  535. "1:\n\t"\
  536. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  537. "rsb %[result], #0\n\t"\
  538. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  539. "teq %[tmp], #0\n\t"\
  540. "bne 1b\n\t"\
  541. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  542. : \
  543. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  544. );
  545. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  546. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  547. return result;
  548. }
  549. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  550. {
  551. storage_type result;
  552. uint32_t tmp;
  553. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  554. __asm__ __volatile__\
  555. (\
  556. "1:\n\t"\
  557. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  558. "add %[result], %[value]\n\t"\
  559. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  560. "teq %[tmp], #0\n\t"\
  561. "bne 1b\n\t"\
  562. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  563. : [value] "Ir" (v)\
  564. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  565. );
  566. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  567. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  568. return result;
  569. }
  570. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  571. {
  572. storage_type result;
  573. uint32_t tmp;
  574. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  575. __asm__ __volatile__\
  576. (\
  577. "1:\n\t"\
  578. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  579. "sub %[result], %[value]\n\t"\
  580. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  581. "teq %[tmp], #0\n\t"\
  582. "bne 1b\n\t"\
  583. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  584. : [value] "Ir" (v)\
  585. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  586. );
  587. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  588. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  589. return result;
  590. }
  591. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  592. {
  593. storage_type result;
  594. uint32_t tmp;
  595. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  596. __asm__ __volatile__\
  597. (\
  598. "1:\n\t"\
  599. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  600. "and %[result], %[value]\n\t"\
  601. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  602. "teq %[tmp], #0\n\t"\
  603. "bne 1b\n\t"\
  604. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  605. : [value] "Ir" (v)\
  606. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  607. );
  608. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  609. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  610. return result;
  611. }
  612. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  613. {
  614. storage_type result;
  615. uint32_t tmp;
  616. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  617. __asm__ __volatile__\
  618. (\
  619. "1:\n\t"\
  620. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  621. "orr %[result], %[value]\n\t"\
  622. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  623. "teq %[tmp], #0\n\t"\
  624. "bne 1b\n\t"\
  625. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  626. : [value] "Ir" (v)\
  627. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  628. );
  629. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  630. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  631. return result;
  632. }
  633. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  634. {
  635. storage_type result;
  636. uint32_t tmp;
  637. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  638. __asm__ __volatile__\
  639. (\
  640. "1:\n\t"\
  641. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  642. "eor %[result], %[value]\n\t"\
  643. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  644. "teq %[tmp], #0\n\t"\
  645. "bne 1b\n\t"\
  646. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  647. : [value] "Ir" (v)\
  648. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  649. );
  650. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  651. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  652. return result;
  653. }
  654. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  655. {
  656. storage_type original, result;
  657. uint32_t tmp;
  658. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  659. __asm__ __volatile__\
  660. (\
  661. "1:\n\t"\
  662. "ld" ld_mo "ex %[original], %[storage]\n\t"\
  663. "mvn %[result], %[original]\n\t"\
  664. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  665. "teq %[tmp], #0\n\t"\
  666. "bne 1b\n\t"\
  667. : [original] "=&r" (original), [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  668. : \
  669. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  670. );
  671. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  672. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  673. return original;
  674. }
  675. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  676. {
  677. storage_type result;
  678. uint32_t tmp;
  679. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  680. __asm__ __volatile__\
  681. (\
  682. "1:\n\t"\
  683. "ld" ld_mo "ex %[result], %[storage]\n\t"\
  684. "mvn %[result], %[result]\n\t"\
  685. "st" st_mo "ex %[tmp], %[result], %[storage]\n\t"\
  686. "teq %[tmp], #0\n\t"\
  687. "bne 1b\n\t"\
  688. : [result] "=&r" (result), [tmp] "=&r" (tmp), [storage] "+Q" (storage)\
  689. : \
  690. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  691. );
  692. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  693. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  694. return result;
  695. }
  696. };
  697. template< typename Base, bool Signed >
  698. struct extra_operations< Base, 4u, Signed, true > :
  699. public extra_operations_gcc_aarch32_common< extra_operations_gcc_aarch32< Base, 4u, Signed > >
  700. {
  701. };
  702. template< typename Base, bool Signed >
  703. struct extra_operations_gcc_aarch32< Base, 8u, Signed > :
  704. public extra_operations_generic< Base, 8u, Signed >
  705. {
  706. typedef extra_operations_generic< Base, 8u, Signed > base_type;
  707. typedef typename base_type::storage_type storage_type;
  708. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  709. {
  710. storage_type original, result;
  711. uint32_t tmp;
  712. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  713. __asm__ __volatile__\
  714. (\
  715. "1:\n\t"\
  716. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  717. "mvn %3, %0\n\t"\
  718. "mvn %H3, %H0\n\t"\
  719. "adds " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(3) ", #1\n\t"\
  720. "adc " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(3) ", #0\n\t"\
  721. "st" st_mo "exd %1, %3, %H3, %2\n\t"\
  722. "teq %1, #0\n\t"\
  723. "bne 1b\n\t"\
  724. : "=&r" (original), "=&r" (tmp), "+Q" (storage), "=&r" (result)\
  725. : \
  726. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  727. );
  728. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  729. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  730. return original;
  731. }
  732. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  733. {
  734. storage_type result;
  735. uint32_t tmp;
  736. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  737. __asm__ __volatile__\
  738. (\
  739. "1:\n\t"\
  740. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  741. "mvn %0, %0\n\t"\
  742. "mvn %H0, %H0\n\t"\
  743. "adds " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(0) ", #1\n\t"\
  744. "adc " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(0) ", #0\n\t"\
  745. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  746. "teq %1, #0\n\t"\
  747. "bne 1b\n\t"\
  748. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  749. : \
  750. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  751. );
  752. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  753. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  754. return result;
  755. }
  756. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  757. {
  758. storage_type result;
  759. uint32_t tmp;
  760. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  761. __asm__ __volatile__\
  762. (\
  763. "1:\n\t"\
  764. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  765. "adds " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(0) ", " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(3) "\n\t"\
  766. "adc " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(0) ", " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(3) "\n\t"\
  767. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  768. "teq %1, #0\n\t"\
  769. "bne 1b\n\t"\
  770. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  771. : "r" (v)\
  772. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  773. );
  774. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  775. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  776. return result;
  777. }
  778. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  779. {
  780. storage_type result;
  781. uint32_t tmp;
  782. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  783. __asm__ __volatile__\
  784. (\
  785. "1:\n\t"\
  786. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  787. "subs " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(0) ", " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_LO(3) "\n\t"\
  788. "sbc " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(0) ", " BOOST_ATOMIC_DETAIL_AARCH32_ASM_ARG_HI(3) "\n\t"\
  789. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  790. "teq %1, #0\n\t"\
  791. "bne 1b\n\t"\
  792. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  793. : "r" (v)\
  794. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  795. );
  796. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  797. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  798. return result;
  799. }
  800. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  801. {
  802. storage_type result;
  803. uint32_t tmp;
  804. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  805. __asm__ __volatile__\
  806. (\
  807. "1:\n\t"\
  808. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  809. "and %0, %3\n\t"\
  810. "and %H0, %H3\n\t"\
  811. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  812. "teq %1, #0\n\t"\
  813. "bne 1b\n\t"\
  814. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  815. : "r" (v)\
  816. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  817. );
  818. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  819. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  820. return result;
  821. }
  822. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  823. {
  824. storage_type result;
  825. uint32_t tmp;
  826. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  827. __asm__ __volatile__\
  828. (\
  829. "1:\n\t"\
  830. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  831. "orr %0, %3\n\t"\
  832. "orr %H0, %H3\n\t"\
  833. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  834. "teq %1, #0\n\t"\
  835. "bne 1b\n\t"\
  836. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  837. : "r" (v)\
  838. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  839. );
  840. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  841. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  842. return result;
  843. }
  844. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  845. {
  846. storage_type result;
  847. uint32_t tmp;
  848. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  849. __asm__ __volatile__\
  850. (\
  851. "1:\n\t"\
  852. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  853. "eor %0, %3\n\t"\
  854. "eor %H0, %H3\n\t"\
  855. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  856. "teq %1, #0\n\t"\
  857. "bne 1b\n\t"\
  858. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  859. : "r" (v)\
  860. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  861. );
  862. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  863. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  864. return result;
  865. }
  866. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  867. {
  868. storage_type original, result;
  869. uint32_t tmp;
  870. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  871. __asm__ __volatile__\
  872. (\
  873. "1:\n\t"\
  874. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  875. "mvn %3, %0\n\t"\
  876. "mvn %H3, %H0\n\t"\
  877. "st" st_mo "exd %1, %3, %H3, %2\n\t"\
  878. "teq %1, #0\n\t"\
  879. "bne 1b\n\t"\
  880. : "=&r" (original), "=&r" (tmp), "+Q" (storage), "=&r" (result)\
  881. : \
  882. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  883. );
  884. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  885. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  886. return original;
  887. }
  888. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  889. {
  890. storage_type result;
  891. uint32_t tmp;
  892. #define BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN(ld_mo, st_mo)\
  893. __asm__ __volatile__\
  894. (\
  895. "1:\n\t"\
  896. "ld" ld_mo "exd %0, %H0, %2\n\t"\
  897. "mvn %0, %0\n\t"\
  898. "mvn %H0, %H0\n\t"\
  899. "st" st_mo "exd %1, %0, %H0, %2\n\t"\
  900. "teq %1, #0\n\t"\
  901. "bne 1b\n\t"\
  902. : "=&r" (result), "=&r" (tmp), "+Q" (storage)\
  903. : \
  904. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  905. );
  906. BOOST_ATOMIC_DETAIL_AARCH32_MO_SWITCH(order)
  907. #undef BOOST_ATOMIC_DETAIL_AARCH32_MO_INSN
  908. return result;
  909. }
  910. };
  911. template< typename Base, bool Signed >
  912. struct extra_operations< Base, 8u, Signed, true > :
  913. public extra_operations_gcc_aarch32_common< extra_operations_gcc_aarch32< Base, 8u, Signed > >
  914. {
  915. };
  916. } // namespace detail
  917. } // namespace atomics
  918. } // namespace boost
  919. #include <boost/atomic/detail/footer.hpp>
  920. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH32_HPP_INCLUDED_