extra_ops_gcc_arm.hpp 52 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2017 - 2018 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_arm.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for ARM.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_traits.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
  24. #include <boost/atomic/detail/gcc_arm_asm_common.hpp>
  25. #include <boost/atomic/detail/capabilities.hpp>
  26. #include <boost/atomic/detail/header.hpp>
  27. #ifdef BOOST_HAS_PRAGMA_ONCE
  28. #pragma once
  29. #endif
  30. namespace boost {
  31. namespace atomics {
  32. namespace detail {
  33. template< typename Base >
  34. struct extra_operations_gcc_arm_common :
  35. public Base
  36. {
  37. typedef Base base_type;
  38. typedef typename base_type::storage_type storage_type;
  39. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  40. {
  41. base_type::fetch_negate(storage, order);
  42. }
  43. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  44. {
  45. base_type::fetch_complement(storage, order);
  46. }
  47. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  48. {
  49. return !!base_type::negate(storage, order);
  50. }
  51. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  52. {
  53. return !!base_type::add(storage, v, order);
  54. }
  55. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  56. {
  57. return !!base_type::sub(storage, v, order);
  58. }
  59. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  60. {
  61. return !!base_type::bitwise_and(storage, v, order);
  62. }
  63. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  64. {
  65. return !!base_type::bitwise_or(storage, v, order);
  66. }
  67. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  68. {
  69. return !!base_type::bitwise_xor(storage, v, order);
  70. }
  71. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  72. {
  73. return !!base_type::bitwise_complement(storage, order);
  74. }
  75. };
  76. template< typename Base, std::size_t Size, bool Signed >
  77. struct extra_operations_gcc_arm;
  78. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  79. template< typename Base, bool Signed >
  80. struct extra_operations_gcc_arm< Base, 1u, Signed > :
  81. public extra_operations_generic< Base, 1u, Signed >
  82. {
  83. typedef extra_operations_generic< Base, 1u, Signed > base_type;
  84. typedef typename base_type::storage_type storage_type;
  85. typedef typename storage_traits< 4u >::type extended_storage_type;
  86. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  87. {
  88. core_arch_operations_gcc_arm_base::fence_before(order);
  89. uint32_t tmp;
  90. extended_storage_type original, result;
  91. __asm__ __volatile__
  92. (
  93. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  94. "1:\n\t"
  95. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  96. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  97. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  98. "teq %[tmp], #0\n\t" // flags = tmp==0
  99. "bne 1b\n\t" // if (!flags.equal) goto retry
  100. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  101. : [original] "=&r" (original), // %0
  102. [result] "=&r" (result), // %1
  103. [tmp] "=&l" (tmp), // %2
  104. [storage] "+Q" (storage) // %3
  105. :
  106. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  107. );
  108. core_arch_operations_gcc_arm_base::fence_after(order);
  109. return static_cast< storage_type >(original);
  110. }
  111. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  112. {
  113. core_arch_operations_gcc_arm_base::fence_before(order);
  114. uint32_t tmp;
  115. extended_storage_type original, result;
  116. __asm__ __volatile__
  117. (
  118. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  119. "1:\n\t"
  120. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  121. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  122. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  123. "teq %[tmp], #0\n\t" // flags = tmp==0
  124. "bne 1b\n\t" // if (!flags.equal) goto retry
  125. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  126. : [original] "=&r" (original), // %0
  127. [result] "=&r" (result), // %1
  128. [tmp] "=&l" (tmp), // %2
  129. [storage] "+Q" (storage) // %3
  130. :
  131. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  132. );
  133. core_arch_operations_gcc_arm_base::fence_after(order);
  134. return static_cast< storage_type >(result);
  135. }
  136. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  137. {
  138. core_arch_operations_gcc_arm_base::fence_before(order);
  139. uint32_t tmp;
  140. extended_storage_type original, result;
  141. __asm__ __volatile__
  142. (
  143. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  144. "1:\n\t"
  145. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  146. "add %[result], %[original], %[value]\n\t" // result = original + value
  147. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  148. "teq %[tmp], #0\n\t" // flags = tmp==0
  149. "bne 1b\n\t" // if (!flags.equal) goto retry
  150. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  151. : [original] "=&r" (original), // %0
  152. [result] "=&r" (result), // %1
  153. [tmp] "=&l" (tmp), // %2
  154. [storage] "+Q" (storage) // %3
  155. : [value] "Ir" (v) // %4
  156. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  157. );
  158. core_arch_operations_gcc_arm_base::fence_after(order);
  159. return static_cast< storage_type >(result);
  160. }
  161. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  162. {
  163. core_arch_operations_gcc_arm_base::fence_before(order);
  164. uint32_t tmp;
  165. extended_storage_type original, result;
  166. __asm__ __volatile__
  167. (
  168. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  169. "1:\n\t"
  170. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  171. "sub %[result], %[original], %[value]\n\t" // result = original - value
  172. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  173. "teq %[tmp], #0\n\t" // flags = tmp==0
  174. "bne 1b\n\t" // if (!flags.equal) goto retry
  175. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  176. : [original] "=&r" (original), // %0
  177. [result] "=&r" (result), // %1
  178. [tmp] "=&l" (tmp), // %2
  179. [storage] "+Q" (storage) // %3
  180. : [value] "Ir" (v) // %4
  181. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  182. );
  183. core_arch_operations_gcc_arm_base::fence_after(order);
  184. return static_cast< storage_type >(result);
  185. }
  186. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  187. {
  188. core_arch_operations_gcc_arm_base::fence_before(order);
  189. uint32_t tmp;
  190. extended_storage_type original, result;
  191. __asm__ __volatile__
  192. (
  193. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  194. "1:\n\t"
  195. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  196. "and %[result], %[original], %[value]\n\t" // result = original & value
  197. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  198. "teq %[tmp], #0\n\t" // flags = tmp==0
  199. "bne 1b\n\t" // if (!flags.equal) goto retry
  200. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  201. : [original] "=&r" (original), // %0
  202. [result] "=&r" (result), // %1
  203. [tmp] "=&l" (tmp), // %2
  204. [storage] "+Q" (storage) // %3
  205. : [value] "Ir" (v) // %4
  206. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  207. );
  208. core_arch_operations_gcc_arm_base::fence_after(order);
  209. return static_cast< storage_type >(result);
  210. }
  211. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  212. {
  213. core_arch_operations_gcc_arm_base::fence_before(order);
  214. uint32_t tmp;
  215. extended_storage_type original, result;
  216. __asm__ __volatile__
  217. (
  218. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  219. "1:\n\t"
  220. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  221. "orr %[result], %[original], %[value]\n\t" // result = original | value
  222. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  223. "teq %[tmp], #0\n\t" // flags = tmp==0
  224. "bne 1b\n\t" // if (!flags.equal) goto retry
  225. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  226. : [original] "=&r" (original), // %0
  227. [result] "=&r" (result), // %1
  228. [tmp] "=&l" (tmp), // %2
  229. [storage] "+Q" (storage) // %3
  230. : [value] "Ir" (v) // %4
  231. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  232. );
  233. core_arch_operations_gcc_arm_base::fence_after(order);
  234. return static_cast< storage_type >(result);
  235. }
  236. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  237. {
  238. core_arch_operations_gcc_arm_base::fence_before(order);
  239. uint32_t tmp;
  240. extended_storage_type original, result;
  241. __asm__ __volatile__
  242. (
  243. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  244. "1:\n\t"
  245. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  246. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  247. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  248. "teq %[tmp], #0\n\t" // flags = tmp==0
  249. "bne 1b\n\t" // if (!flags.equal) goto retry
  250. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  251. : [original] "=&r" (original), // %0
  252. [result] "=&r" (result), // %1
  253. [tmp] "=&l" (tmp), // %2
  254. [storage] "+Q" (storage) // %3
  255. : [value] "Ir" (v) // %4
  256. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  257. );
  258. core_arch_operations_gcc_arm_base::fence_after(order);
  259. return static_cast< storage_type >(result);
  260. }
  261. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  262. {
  263. core_arch_operations_gcc_arm_base::fence_before(order);
  264. uint32_t tmp;
  265. extended_storage_type original, result;
  266. __asm__ __volatile__
  267. (
  268. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  269. "1:\n\t"
  270. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  271. "mvn %[result], %[original]\n\t" // result = NOT original
  272. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  273. "teq %[tmp], #0\n\t" // flags = tmp==0
  274. "bne 1b\n\t" // if (!flags.equal) goto retry
  275. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  276. : [original] "=&r" (original), // %0
  277. [result] "=&r" (result), // %1
  278. [tmp] "=&l" (tmp), // %2
  279. [storage] "+Q" (storage) // %3
  280. :
  281. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  282. );
  283. core_arch_operations_gcc_arm_base::fence_after(order);
  284. return static_cast< storage_type >(original);
  285. }
  286. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  287. {
  288. core_arch_operations_gcc_arm_base::fence_before(order);
  289. uint32_t tmp;
  290. extended_storage_type original, result;
  291. __asm__ __volatile__
  292. (
  293. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  294. "1:\n\t"
  295. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  296. "mvn %[result], %[original]\n\t" // result = NOT original
  297. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  298. "teq %[tmp], #0\n\t" // flags = tmp==0
  299. "bne 1b\n\t" // if (!flags.equal) goto retry
  300. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  301. : [original] "=&r" (original), // %0
  302. [result] "=&r" (result), // %1
  303. [tmp] "=&l" (tmp), // %2
  304. [storage] "+Q" (storage) // %3
  305. :
  306. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  307. );
  308. core_arch_operations_gcc_arm_base::fence_after(order);
  309. return static_cast< storage_type >(result);
  310. }
  311. };
  312. template< typename Base, bool Signed >
  313. struct extra_operations< Base, 1u, Signed, true > :
  314. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 1u, Signed > >
  315. {
  316. };
  317. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  318. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  319. template< typename Base, bool Signed >
  320. struct extra_operations_gcc_arm< Base, 2u, Signed > :
  321. public extra_operations_generic< Base, 2u, Signed >
  322. {
  323. typedef extra_operations_generic< Base, 2u, Signed > base_type;
  324. typedef typename base_type::storage_type storage_type;
  325. typedef typename storage_traits< 4u >::type extended_storage_type;
  326. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  327. {
  328. core_arch_operations_gcc_arm_base::fence_before(order);
  329. uint32_t tmp;
  330. extended_storage_type original, result;
  331. __asm__ __volatile__
  332. (
  333. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  334. "1:\n\t"
  335. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  336. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  337. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  338. "teq %[tmp], #0\n\t" // flags = tmp==0
  339. "bne 1b\n\t" // if (!flags.equal) goto retry
  340. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  341. : [original] "=&r" (original), // %0
  342. [result] "=&r" (result), // %1
  343. [tmp] "=&l" (tmp), // %2
  344. [storage] "+Q" (storage) // %3
  345. :
  346. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  347. );
  348. core_arch_operations_gcc_arm_base::fence_after(order);
  349. return static_cast< storage_type >(original);
  350. }
  351. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  352. {
  353. core_arch_operations_gcc_arm_base::fence_before(order);
  354. uint32_t tmp;
  355. extended_storage_type original, result;
  356. __asm__ __volatile__
  357. (
  358. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  359. "1:\n\t"
  360. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  361. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  362. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  363. "teq %[tmp], #0\n\t" // flags = tmp==0
  364. "bne 1b\n\t" // if (!flags.equal) goto retry
  365. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  366. : [original] "=&r" (original), // %0
  367. [result] "=&r" (result), // %1
  368. [tmp] "=&l" (tmp), // %2
  369. [storage] "+Q" (storage) // %3
  370. :
  371. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  372. );
  373. core_arch_operations_gcc_arm_base::fence_after(order);
  374. return static_cast< storage_type >(result);
  375. }
  376. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  377. {
  378. core_arch_operations_gcc_arm_base::fence_before(order);
  379. uint32_t tmp;
  380. extended_storage_type original, result;
  381. __asm__ __volatile__
  382. (
  383. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  384. "1:\n\t"
  385. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  386. "add %[result], %[original], %[value]\n\t" // result = original + value
  387. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  388. "teq %[tmp], #0\n\t" // flags = tmp==0
  389. "bne 1b\n\t" // if (!flags.equal) goto retry
  390. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  391. : [original] "=&r" (original), // %0
  392. [result] "=&r" (result), // %1
  393. [tmp] "=&l" (tmp), // %2
  394. [storage] "+Q" (storage) // %3
  395. : [value] "Ir" (v) // %4
  396. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  397. );
  398. core_arch_operations_gcc_arm_base::fence_after(order);
  399. return static_cast< storage_type >(result);
  400. }
  401. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  402. {
  403. core_arch_operations_gcc_arm_base::fence_before(order);
  404. uint32_t tmp;
  405. extended_storage_type original, result;
  406. __asm__ __volatile__
  407. (
  408. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  409. "1:\n\t"
  410. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  411. "sub %[result], %[original], %[value]\n\t" // result = original - value
  412. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  413. "teq %[tmp], #0\n\t" // flags = tmp==0
  414. "bne 1b\n\t" // if (!flags.equal) goto retry
  415. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  416. : [original] "=&r" (original), // %0
  417. [result] "=&r" (result), // %1
  418. [tmp] "=&l" (tmp), // %2
  419. [storage] "+Q" (storage) // %3
  420. : [value] "Ir" (v) // %4
  421. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  422. );
  423. core_arch_operations_gcc_arm_base::fence_after(order);
  424. return static_cast< storage_type >(result);
  425. }
  426. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  427. {
  428. core_arch_operations_gcc_arm_base::fence_before(order);
  429. uint32_t tmp;
  430. extended_storage_type original, result;
  431. __asm__ __volatile__
  432. (
  433. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  434. "1:\n\t"
  435. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  436. "and %[result], %[original], %[value]\n\t" // result = original & value
  437. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  438. "teq %[tmp], #0\n\t" // flags = tmp==0
  439. "bne 1b\n\t" // if (!flags.equal) goto retry
  440. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  441. : [original] "=&r" (original), // %0
  442. [result] "=&r" (result), // %1
  443. [tmp] "=&l" (tmp), // %2
  444. [storage] "+Q" (storage) // %3
  445. : [value] "Ir" (v) // %4
  446. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  447. );
  448. core_arch_operations_gcc_arm_base::fence_after(order);
  449. return static_cast< storage_type >(result);
  450. }
  451. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  452. {
  453. core_arch_operations_gcc_arm_base::fence_before(order);
  454. uint32_t tmp;
  455. extended_storage_type original, result;
  456. __asm__ __volatile__
  457. (
  458. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  459. "1:\n\t"
  460. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  461. "orr %[result], %[original], %[value]\n\t" // result = original | value
  462. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  463. "teq %[tmp], #0\n\t" // flags = tmp==0
  464. "bne 1b\n\t" // if (!flags.equal) goto retry
  465. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  466. : [original] "=&r" (original), // %0
  467. [result] "=&r" (result), // %1
  468. [tmp] "=&l" (tmp), // %2
  469. [storage] "+Q" (storage) // %3
  470. : [value] "Ir" (v) // %4
  471. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  472. );
  473. core_arch_operations_gcc_arm_base::fence_after(order);
  474. return static_cast< storage_type >(result);
  475. }
  476. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  477. {
  478. core_arch_operations_gcc_arm_base::fence_before(order);
  479. uint32_t tmp;
  480. extended_storage_type original, result;
  481. __asm__ __volatile__
  482. (
  483. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  484. "1:\n\t"
  485. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  486. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  487. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  488. "teq %[tmp], #0\n\t" // flags = tmp==0
  489. "bne 1b\n\t" // if (!flags.equal) goto retry
  490. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  491. : [original] "=&r" (original), // %0
  492. [result] "=&r" (result), // %1
  493. [tmp] "=&l" (tmp), // %2
  494. [storage] "+Q" (storage) // %3
  495. : [value] "Ir" (v) // %4
  496. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  497. );
  498. core_arch_operations_gcc_arm_base::fence_after(order);
  499. return static_cast< storage_type >(result);
  500. }
  501. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  502. {
  503. core_arch_operations_gcc_arm_base::fence_before(order);
  504. uint32_t tmp;
  505. extended_storage_type original, result;
  506. __asm__ __volatile__
  507. (
  508. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  509. "1:\n\t"
  510. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  511. "mvn %[result], %[original]\n\t" // result = NOT original
  512. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  513. "teq %[tmp], #0\n\t" // flags = tmp==0
  514. "bne 1b\n\t" // if (!flags.equal) goto retry
  515. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  516. : [original] "=&r" (original), // %0
  517. [result] "=&r" (result), // %1
  518. [tmp] "=&l" (tmp), // %2
  519. [storage] "+Q" (storage) // %3
  520. :
  521. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  522. );
  523. core_arch_operations_gcc_arm_base::fence_after(order);
  524. return static_cast< storage_type >(original);
  525. }
  526. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  527. {
  528. core_arch_operations_gcc_arm_base::fence_before(order);
  529. uint32_t tmp;
  530. extended_storage_type original, result;
  531. __asm__ __volatile__
  532. (
  533. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  534. "1:\n\t"
  535. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  536. "mvn %[result], %[original]\n\t" // result = NOT original
  537. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  538. "teq %[tmp], #0\n\t" // flags = tmp==0
  539. "bne 1b\n\t" // if (!flags.equal) goto retry
  540. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  541. : [original] "=&r" (original), // %0
  542. [result] "=&r" (result), // %1
  543. [tmp] "=&l" (tmp), // %2
  544. [storage] "+Q" (storage) // %3
  545. :
  546. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  547. );
  548. core_arch_operations_gcc_arm_base::fence_after(order);
  549. return static_cast< storage_type >(result);
  550. }
  551. };
  552. template< typename Base, bool Signed >
  553. struct extra_operations< Base, 2u, Signed, true > :
  554. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 2u, Signed > >
  555. {
  556. };
  557. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  558. template< typename Base, bool Signed >
  559. struct extra_operations_gcc_arm< Base, 4u, Signed > :
  560. public extra_operations_generic< Base, 4u, Signed >
  561. {
  562. typedef extra_operations_generic< Base, 4u, Signed > base_type;
  563. typedef typename base_type::storage_type storage_type;
  564. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  565. {
  566. core_arch_operations_gcc_arm_base::fence_before(order);
  567. uint32_t tmp;
  568. storage_type original, result;
  569. __asm__ __volatile__
  570. (
  571. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  572. "1:\n\t"
  573. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  574. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  575. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  576. "teq %[tmp], #0\n\t" // flags = tmp==0
  577. "bne 1b\n\t" // if (!flags.equal) goto retry
  578. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  579. : [original] "=&r" (original), // %0
  580. [result] "=&r" (result), // %1
  581. [tmp] "=&l" (tmp), // %2
  582. [storage] "+Q" (storage) // %3
  583. :
  584. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  585. );
  586. core_arch_operations_gcc_arm_base::fence_after(order);
  587. return original;
  588. }
  589. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  590. {
  591. core_arch_operations_gcc_arm_base::fence_before(order);
  592. uint32_t tmp;
  593. storage_type original, result;
  594. __asm__ __volatile__
  595. (
  596. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  597. "1:\n\t"
  598. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  599. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  600. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  601. "teq %[tmp], #0\n\t" // flags = tmp==0
  602. "bne 1b\n\t" // if (!flags.equal) goto retry
  603. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  604. : [original] "=&r" (original), // %0
  605. [result] "=&r" (result), // %1
  606. [tmp] "=&l" (tmp), // %2
  607. [storage] "+Q" (storage) // %3
  608. :
  609. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  610. );
  611. core_arch_operations_gcc_arm_base::fence_after(order);
  612. return result;
  613. }
  614. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  615. {
  616. core_arch_operations_gcc_arm_base::fence_before(order);
  617. uint32_t tmp;
  618. storage_type original, result;
  619. __asm__ __volatile__
  620. (
  621. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  622. "1:\n\t"
  623. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  624. "add %[result], %[original], %[value]\n\t" // result = original + value
  625. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  626. "teq %[tmp], #0\n\t" // flags = tmp==0
  627. "bne 1b\n\t" // if (!flags.equal) goto retry
  628. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  629. : [original] "=&r" (original), // %0
  630. [result] "=&r" (result), // %1
  631. [tmp] "=&l" (tmp), // %2
  632. [storage] "+Q" (storage) // %3
  633. : [value] "Ir" (v) // %4
  634. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  635. );
  636. core_arch_operations_gcc_arm_base::fence_after(order);
  637. return result;
  638. }
  639. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  640. {
  641. core_arch_operations_gcc_arm_base::fence_before(order);
  642. uint32_t tmp;
  643. storage_type original, result;
  644. __asm__ __volatile__
  645. (
  646. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  647. "1:\n\t"
  648. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  649. "sub %[result], %[original], %[value]\n\t" // result = original - value
  650. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  651. "teq %[tmp], #0\n\t" // flags = tmp==0
  652. "bne 1b\n\t" // if (!flags.equal) goto retry
  653. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  654. : [original] "=&r" (original), // %0
  655. [result] "=&r" (result), // %1
  656. [tmp] "=&l" (tmp), // %2
  657. [storage] "+Q" (storage) // %3
  658. : [value] "Ir" (v) // %4
  659. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  660. );
  661. core_arch_operations_gcc_arm_base::fence_after(order);
  662. return result;
  663. }
  664. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  665. {
  666. core_arch_operations_gcc_arm_base::fence_before(order);
  667. uint32_t tmp;
  668. storage_type original, result;
  669. __asm__ __volatile__
  670. (
  671. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  672. "1:\n\t"
  673. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  674. "and %[result], %[original], %[value]\n\t" // result = original & value
  675. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  676. "teq %[tmp], #0\n\t" // flags = tmp==0
  677. "bne 1b\n\t" // if (!flags.equal) goto retry
  678. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  679. : [original] "=&r" (original), // %0
  680. [result] "=&r" (result), // %1
  681. [tmp] "=&l" (tmp), // %2
  682. [storage] "+Q" (storage) // %3
  683. : [value] "Ir" (v) // %4
  684. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  685. );
  686. core_arch_operations_gcc_arm_base::fence_after(order);
  687. return result;
  688. }
  689. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  690. {
  691. core_arch_operations_gcc_arm_base::fence_before(order);
  692. uint32_t tmp;
  693. storage_type original, result;
  694. __asm__ __volatile__
  695. (
  696. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  697. "1:\n\t"
  698. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  699. "orr %[result], %[original], %[value]\n\t" // result = original | value
  700. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  701. "teq %[tmp], #0\n\t" // flags = tmp==0
  702. "bne 1b\n\t" // if (!flags.equal) goto retry
  703. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  704. : [original] "=&r" (original), // %0
  705. [result] "=&r" (result), // %1
  706. [tmp] "=&l" (tmp), // %2
  707. [storage] "+Q" (storage) // %3
  708. : [value] "Ir" (v) // %4
  709. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  710. );
  711. core_arch_operations_gcc_arm_base::fence_after(order);
  712. return result;
  713. }
  714. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  715. {
  716. core_arch_operations_gcc_arm_base::fence_before(order);
  717. uint32_t tmp;
  718. storage_type original, result;
  719. __asm__ __volatile__
  720. (
  721. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  722. "1:\n\t"
  723. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  724. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  725. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  726. "teq %[tmp], #0\n\t" // flags = tmp==0
  727. "bne 1b\n\t" // if (!flags.equal) goto retry
  728. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  729. : [original] "=&r" (original), // %0
  730. [result] "=&r" (result), // %1
  731. [tmp] "=&l" (tmp), // %2
  732. [storage] "+Q" (storage) // %3
  733. : [value] "Ir" (v) // %4
  734. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  735. );
  736. core_arch_operations_gcc_arm_base::fence_after(order);
  737. return result;
  738. }
  739. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  740. {
  741. core_arch_operations_gcc_arm_base::fence_before(order);
  742. uint32_t tmp;
  743. storage_type original, result;
  744. __asm__ __volatile__
  745. (
  746. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  747. "1:\n\t"
  748. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  749. "mvn %[result], %[original]\n\t" // result = NOT original
  750. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  751. "teq %[tmp], #0\n\t" // flags = tmp==0
  752. "bne 1b\n\t" // if (!flags.equal) goto retry
  753. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  754. : [original] "=&r" (original), // %0
  755. [result] "=&r" (result), // %1
  756. [tmp] "=&l" (tmp), // %2
  757. [storage] "+Q" (storage) // %3
  758. :
  759. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  760. );
  761. core_arch_operations_gcc_arm_base::fence_after(order);
  762. return original;
  763. }
  764. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  765. {
  766. core_arch_operations_gcc_arm_base::fence_before(order);
  767. uint32_t tmp;
  768. storage_type original, result;
  769. __asm__ __volatile__
  770. (
  771. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  772. "1:\n\t"
  773. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  774. "mvn %[result], %[original]\n\t" // result = NOT original
  775. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  776. "teq %[tmp], #0\n\t" // flags = tmp==0
  777. "bne 1b\n\t" // if (!flags.equal) goto retry
  778. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  779. : [original] "=&r" (original), // %0
  780. [result] "=&r" (result), // %1
  781. [tmp] "=&l" (tmp), // %2
  782. [storage] "+Q" (storage) // %3
  783. :
  784. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  785. );
  786. core_arch_operations_gcc_arm_base::fence_after(order);
  787. return result;
  788. }
  789. };
  790. template< typename Base, bool Signed >
  791. struct extra_operations< Base, 4u, Signed, true > :
  792. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 4u, Signed > >
  793. {
  794. };
  795. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  796. template< typename Base, bool Signed >
  797. struct extra_operations_gcc_arm< Base, 8u, Signed > :
  798. public extra_operations_generic< Base, 8u, Signed >
  799. {
  800. typedef extra_operations_generic< Base, 8u, Signed > base_type;
  801. typedef typename base_type::storage_type storage_type;
  802. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  803. {
  804. core_arch_operations_gcc_arm_base::fence_before(order);
  805. storage_type original, result;
  806. uint32_t tmp;
  807. __asm__ __volatile__
  808. (
  809. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  810. "1:\n\t"
  811. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  812. "mvn %2, %1\n\t" // result = NOT original
  813. "mvn %H2, %H1\n\t"
  814. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
  815. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
  816. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  817. "teq %0, #0\n\t" // flags = tmp==0
  818. "bne 1b\n\t" // if (!flags.equal) goto retry
  819. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  820. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  821. "=&r" (original), // %1
  822. "=&r" (result), // %2
  823. "+Q" (storage) // %3
  824. :
  825. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  826. );
  827. core_arch_operations_gcc_arm_base::fence_after(order);
  828. return original;
  829. }
  830. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  831. {
  832. core_arch_operations_gcc_arm_base::fence_before(order);
  833. storage_type original, result;
  834. uint32_t tmp;
  835. __asm__ __volatile__
  836. (
  837. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  838. "1:\n\t"
  839. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  840. "mvn %2, %1\n\t" // result = NOT original
  841. "mvn %H2, %H1\n\t"
  842. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
  843. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
  844. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  845. "teq %0, #0\n\t" // flags = tmp==0
  846. "bne 1b\n\t" // if (!flags.equal) goto retry
  847. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  848. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  849. "=&r" (original), // %1
  850. "=&r" (result), // %2
  851. "+Q" (storage) // %3
  852. :
  853. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  854. );
  855. core_arch_operations_gcc_arm_base::fence_after(order);
  856. return result;
  857. }
  858. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  859. {
  860. core_arch_operations_gcc_arm_base::fence_before(order);
  861. storage_type original, result;
  862. uint32_t tmp;
  863. __asm__ __volatile__
  864. (
  865. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  866. "1:\n\t"
  867. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  868. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original + value
  869. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
  870. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  871. "teq %0, #0\n\t" // flags = tmp==0
  872. "bne 1b\n\t" // if (!flags.equal) goto retry
  873. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  874. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  875. "=&r" (original), // %1
  876. "=&r" (result), // %2
  877. "+Q" (storage) // %3
  878. : "r" (v) // %4
  879. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  880. );
  881. core_arch_operations_gcc_arm_base::fence_after(order);
  882. return result;
  883. }
  884. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  885. {
  886. core_arch_operations_gcc_arm_base::fence_before(order);
  887. storage_type original, result;
  888. uint32_t tmp;
  889. __asm__ __volatile__
  890. (
  891. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  892. "1:\n\t"
  893. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  894. "subs " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original - value
  895. "sbc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
  896. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  897. "teq %0, #0\n\t" // flags = tmp==0
  898. "bne 1b\n\t" // if (!flags.equal) goto retry
  899. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  900. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  901. "=&r" (original), // %1
  902. "=&r" (result), // %2
  903. "+Q" (storage) // %3
  904. : "r" (v) // %4
  905. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  906. );
  907. core_arch_operations_gcc_arm_base::fence_after(order);
  908. return result;
  909. }
  910. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  911. {
  912. core_arch_operations_gcc_arm_base::fence_before(order);
  913. storage_type original, result;
  914. uint32_t tmp;
  915. __asm__ __volatile__
  916. (
  917. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  918. "1:\n\t"
  919. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  920. "and %2, %1, %4\n\t" // result = original & value
  921. "and %H2, %H1, %H4\n\t"
  922. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  923. "teq %0, #0\n\t" // flags = tmp==0
  924. "bne 1b\n\t" // if (!flags.equal) goto retry
  925. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  926. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  927. "=&r" (original), // %1
  928. "=&r" (result), // %2
  929. "+Q" (storage) // %3
  930. : "r" (v) // %4
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  932. );
  933. core_arch_operations_gcc_arm_base::fence_after(order);
  934. return result;
  935. }
  936. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  937. {
  938. core_arch_operations_gcc_arm_base::fence_before(order);
  939. storage_type original, result;
  940. uint32_t tmp;
  941. __asm__ __volatile__
  942. (
  943. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  944. "1:\n\t"
  945. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  946. "orr %2, %1, %4\n\t" // result = original | value
  947. "orr %H2, %H1, %H4\n\t"
  948. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  949. "teq %0, #0\n\t" // flags = tmp==0
  950. "bne 1b\n\t" // if (!flags.equal) goto retry
  951. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  952. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  953. "=&r" (original), // %1
  954. "=&r" (result), // %2
  955. "+Q" (storage) // %3
  956. : "r" (v) // %4
  957. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  958. );
  959. core_arch_operations_gcc_arm_base::fence_after(order);
  960. return result;
  961. }
  962. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  963. {
  964. core_arch_operations_gcc_arm_base::fence_before(order);
  965. storage_type original, result;
  966. uint32_t tmp;
  967. __asm__ __volatile__
  968. (
  969. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  970. "1:\n\t"
  971. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  972. "eor %2, %1, %4\n\t" // result = original ^ value
  973. "eor %H2, %H1, %H4\n\t"
  974. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  975. "teq %0, #0\n\t" // flags = tmp==0
  976. "bne 1b\n\t" // if (!flags.equal) goto retry
  977. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  978. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  979. "=&r" (original), // %1
  980. "=&r" (result), // %2
  981. "+Q" (storage) // %3
  982. : "r" (v) // %4
  983. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  984. );
  985. core_arch_operations_gcc_arm_base::fence_after(order);
  986. return result;
  987. }
  988. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  989. {
  990. core_arch_operations_gcc_arm_base::fence_before(order);
  991. storage_type original, result;
  992. uint32_t tmp;
  993. __asm__ __volatile__
  994. (
  995. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  996. "1:\n\t"
  997. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  998. "mvn %2, %1\n\t" // result = NOT original
  999. "mvn %H2, %H1\n\t"
  1000. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  1001. "teq %0, #0\n\t" // flags = tmp==0
  1002. "bne 1b\n\t" // if (!flags.equal) goto retry
  1003. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1004. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1005. "=&r" (original), // %1
  1006. "=&r" (result), // %2
  1007. "+Q" (storage) // %3
  1008. :
  1009. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1010. );
  1011. core_arch_operations_gcc_arm_base::fence_after(order);
  1012. return original;
  1013. }
  1014. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1015. {
  1016. core_arch_operations_gcc_arm_base::fence_before(order);
  1017. storage_type original, result;
  1018. uint32_t tmp;
  1019. __asm__ __volatile__
  1020. (
  1021. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  1022. "1:\n\t"
  1023. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  1024. "mvn %2, %1\n\t" // result = NOT original
  1025. "mvn %H2, %H1\n\t"
  1026. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  1027. "teq %0, #0\n\t" // flags = tmp==0
  1028. "bne 1b\n\t" // if (!flags.equal) goto retry
  1029. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1030. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1031. "=&r" (original), // %1
  1032. "=&r" (result), // %2
  1033. "+Q" (storage) // %3
  1034. :
  1035. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1036. );
  1037. core_arch_operations_gcc_arm_base::fence_after(order);
  1038. return result;
  1039. }
  1040. };
  1041. template< typename Base, bool Signed >
  1042. struct extra_operations< Base, 8u, Signed, true > :
  1043. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 8u, Signed > >
  1044. {
  1045. };
  1046. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  1047. } // namespace detail
  1048. } // namespace atomics
  1049. } // namespace boost
  1050. #include <boost/atomic/detail/footer.hpp>
  1051. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_