extra_ops_gcc_x86.hpp 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2015 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_x86.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for x86.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/storage_traits.hpp>
  20. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  21. #include <boost/atomic/detail/extra_ops_generic.hpp>
  22. #include <boost/atomic/detail/header.hpp>
  23. #ifdef BOOST_HAS_PRAGMA_ONCE
  24. #pragma once
  25. #endif
  26. namespace boost {
  27. namespace atomics {
  28. namespace detail {
  29. template< typename Base, bool Signed >
  30. struct extra_operations< Base, 1u, Signed, true > :
  31. public extra_operations_generic< Base, 1u, Signed >
  32. {
  33. typedef extra_operations_generic< Base, 1u, Signed > base_type;
  34. typedef typename base_type::storage_type storage_type;
  35. typedef typename storage_traits< 4u >::type temp_storage_type;
  36. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  37. __asm__ __volatile__\
  38. (\
  39. ".align 16\n\t"\
  40. "1: movzbl %[orig], %2\n\t"\
  41. op " %b2\n\t"\
  42. "lock; cmpxchgb %b2, %[storage]\n\t"\
  43. "jne 1b"\
  44. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  45. : \
  46. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  47. )
  48. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  49. {
  50. storage_type original = storage;
  51. temp_storage_type result;
  52. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  53. return original;
  54. }
  55. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  56. {
  57. storage_type original = storage;
  58. temp_storage_type result;
  59. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  60. return original;
  61. }
  62. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  63. {
  64. storage_type original = storage;
  65. temp_storage_type result;
  66. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  67. return static_cast< storage_type >(result);
  68. }
  69. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  70. {
  71. storage_type original = storage;
  72. temp_storage_type result;
  73. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  74. return static_cast< storage_type >(result);
  75. }
  76. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  77. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  78. __asm__ __volatile__\
  79. (\
  80. ".align 16\n\t"\
  81. "1: mov %[arg], %2\n\t"\
  82. op " %%al, %b2\n\t"\
  83. "lock; cmpxchgb %b2, %[storage]\n\t"\
  84. "jne 1b"\
  85. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  86. : [arg] "ir" ((temp_storage_type)argument)\
  87. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  88. )
  89. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  90. {
  91. storage_type original = storage;
  92. temp_storage_type result;
  93. BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result);
  94. return static_cast< storage_type >(result);
  95. }
  96. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  97. {
  98. storage_type original = storage;
  99. temp_storage_type result;
  100. BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result);
  101. return static_cast< storage_type >(result);
  102. }
  103. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  104. {
  105. storage_type original = storage;
  106. temp_storage_type result;
  107. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result);
  108. return static_cast< storage_type >(result);
  109. }
  110. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  111. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  112. {
  113. return !!negate(storage, order);
  114. }
  115. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  116. {
  117. return !!bitwise_complement(storage, order);
  118. }
  119. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  120. {
  121. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  122. {
  123. __asm__ __volatile__
  124. (
  125. "lock; incb %[storage]\n\t"
  126. : [storage] "+m" (storage)
  127. :
  128. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  129. );
  130. }
  131. else
  132. {
  133. __asm__ __volatile__
  134. (
  135. "lock; addb %[argument], %[storage]\n\t"
  136. : [storage] "+m" (storage)
  137. : [argument] "iq" (v)
  138. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  139. );
  140. }
  141. }
  142. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  143. {
  144. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  145. {
  146. __asm__ __volatile__
  147. (
  148. "lock; decb %[storage]\n\t"
  149. : [storage] "+m" (storage)
  150. :
  151. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  152. );
  153. }
  154. else
  155. {
  156. __asm__ __volatile__
  157. (
  158. "lock; subb %[argument], %[storage]\n\t"
  159. : [storage] "+m" (storage)
  160. : [argument] "iq" (v)
  161. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  162. );
  163. }
  164. }
  165. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  166. {
  167. __asm__ __volatile__
  168. (
  169. "lock; negb %[storage]\n\t"
  170. : [storage] "+m" (storage)
  171. :
  172. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  173. );
  174. }
  175. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  176. {
  177. __asm__ __volatile__
  178. (
  179. "lock; andb %[argument], %[storage]\n\t"
  180. : [storage] "+m" (storage)
  181. : [argument] "iq" (v)
  182. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  183. );
  184. }
  185. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  186. {
  187. __asm__ __volatile__
  188. (
  189. "lock; orb %[argument], %[storage]\n\t"
  190. : [storage] "+m" (storage)
  191. : [argument] "iq" (v)
  192. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  193. );
  194. }
  195. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  196. {
  197. __asm__ __volatile__
  198. (
  199. "lock; xorb %[argument], %[storage]\n\t"
  200. : [storage] "+m" (storage)
  201. : [argument] "iq" (v)
  202. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  203. );
  204. }
  205. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  206. {
  207. __asm__ __volatile__
  208. (
  209. "lock; notb %[storage]\n\t"
  210. : [storage] "+m" (storage)
  211. :
  212. : "memory"
  213. );
  214. }
  215. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  216. {
  217. bool res;
  218. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  219. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  220. {
  221. __asm__ __volatile__
  222. (
  223. "lock; incb %[storage]\n\t"
  224. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  225. :
  226. : "memory"
  227. );
  228. }
  229. else
  230. {
  231. __asm__ __volatile__
  232. (
  233. "lock; addb %[argument], %[storage]\n\t"
  234. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  235. : [argument] "iq" (v)
  236. : "memory"
  237. );
  238. }
  239. #else
  240. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  241. {
  242. __asm__ __volatile__
  243. (
  244. "lock; incb %[storage]\n\t"
  245. "setnz %[result]\n\t"
  246. : [storage] "+m" (storage), [result] "=q" (res)
  247. :
  248. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  249. );
  250. }
  251. else
  252. {
  253. __asm__ __volatile__
  254. (
  255. "lock; addb %[argument], %[storage]\n\t"
  256. "setnz %[result]\n\t"
  257. : [storage] "+m" (storage), [result] "=q" (res)
  258. : [argument] "iq" (v)
  259. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  260. );
  261. }
  262. #endif
  263. return res;
  264. }
  265. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  266. {
  267. bool res;
  268. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  269. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  270. {
  271. __asm__ __volatile__
  272. (
  273. "lock; decb %[storage]\n\t"
  274. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  275. :
  276. : "memory"
  277. );
  278. }
  279. else
  280. {
  281. __asm__ __volatile__
  282. (
  283. "lock; subb %[argument], %[storage]\n\t"
  284. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  285. : [argument] "iq" (v)
  286. : "memory"
  287. );
  288. }
  289. #else
  290. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  291. {
  292. __asm__ __volatile__
  293. (
  294. "lock; decb %[storage]\n\t"
  295. "setnz %[result]\n\t"
  296. : [storage] "+m" (storage), [result] "=q" (res)
  297. :
  298. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  299. );
  300. }
  301. else
  302. {
  303. __asm__ __volatile__
  304. (
  305. "lock; subb %[argument], %[storage]\n\t"
  306. "setnz %[result]\n\t"
  307. : [storage] "+m" (storage), [result] "=q" (res)
  308. : [argument] "iq" (v)
  309. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  310. );
  311. }
  312. #endif
  313. return res;
  314. }
  315. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  316. {
  317. bool res;
  318. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  319. __asm__ __volatile__
  320. (
  321. "lock; andb %[argument], %[storage]\n\t"
  322. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  323. : [argument] "iq" (v)
  324. : "memory"
  325. );
  326. #else
  327. __asm__ __volatile__
  328. (
  329. "lock; andb %[argument], %[storage]\n\t"
  330. "setnz %[result]\n\t"
  331. : [storage] "+m" (storage), [result] "=q" (res)
  332. : [argument] "iq" (v)
  333. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  334. );
  335. #endif
  336. return res;
  337. }
  338. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  339. {
  340. bool res;
  341. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  342. __asm__ __volatile__
  343. (
  344. "lock; orb %[argument], %[storage]\n\t"
  345. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  346. : [argument] "iq" (v)
  347. : "memory"
  348. );
  349. #else
  350. __asm__ __volatile__
  351. (
  352. "lock; orb %[argument], %[storage]\n\t"
  353. "setnz %[result]\n\t"
  354. : [storage] "+m" (storage), [result] "=q" (res)
  355. : [argument] "iq" (v)
  356. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  357. );
  358. #endif
  359. return res;
  360. }
  361. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  362. {
  363. bool res;
  364. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  365. __asm__ __volatile__
  366. (
  367. "lock; xorb %[argument], %[storage]\n\t"
  368. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  369. : [argument] "iq" (v)
  370. : "memory"
  371. );
  372. #else
  373. __asm__ __volatile__
  374. (
  375. "lock; xorb %[argument], %[storage]\n\t"
  376. "setnz %[result]\n\t"
  377. : [storage] "+m" (storage), [result] "=q" (res)
  378. : [argument] "iq" (v)
  379. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  380. );
  381. #endif
  382. return res;
  383. }
  384. };
  385. template< typename Base, bool Signed >
  386. struct extra_operations< Base, 2u, Signed, true > :
  387. public extra_operations_generic< Base, 2u, Signed >
  388. {
  389. typedef extra_operations_generic< Base, 2u, Signed > base_type;
  390. typedef typename base_type::storage_type storage_type;
  391. typedef typename storage_traits< 4u >::type temp_storage_type;
  392. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  393. __asm__ __volatile__\
  394. (\
  395. ".align 16\n\t"\
  396. "1: movzwl %[orig], %2\n\t"\
  397. op " %w2\n\t"\
  398. "lock; cmpxchgw %w2, %[storage]\n\t"\
  399. "jne 1b"\
  400. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  401. : \
  402. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  403. )
  404. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  405. {
  406. storage_type original = storage;
  407. temp_storage_type result;
  408. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  409. return original;
  410. }
  411. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  412. {
  413. storage_type original = storage;
  414. temp_storage_type result;
  415. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  416. return original;
  417. }
  418. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  419. {
  420. storage_type original = storage;
  421. temp_storage_type result;
  422. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  423. return static_cast< storage_type >(result);
  424. }
  425. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  426. {
  427. storage_type original = storage;
  428. temp_storage_type result;
  429. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  430. return static_cast< storage_type >(result);
  431. }
  432. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  433. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  434. __asm__ __volatile__\
  435. (\
  436. ".align 16\n\t"\
  437. "1: mov %[arg], %2\n\t"\
  438. op " %%ax, %w2\n\t"\
  439. "lock; cmpxchgw %w2, %[storage]\n\t"\
  440. "jne 1b"\
  441. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  442. : [arg] "ir" ((temp_storage_type)argument)\
  443. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  444. )
  445. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  446. {
  447. storage_type original = storage;
  448. temp_storage_type result;
  449. BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result);
  450. return static_cast< storage_type >(result);
  451. }
  452. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  453. {
  454. storage_type original = storage;
  455. temp_storage_type result;
  456. BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result);
  457. return static_cast< storage_type >(result);
  458. }
  459. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  460. {
  461. storage_type original = storage;
  462. temp_storage_type result;
  463. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result);
  464. return static_cast< storage_type >(result);
  465. }
  466. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  467. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  468. {
  469. return !!negate(storage, order);
  470. }
  471. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  472. {
  473. return !!bitwise_complement(storage, order);
  474. }
  475. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  476. {
  477. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  478. {
  479. __asm__ __volatile__
  480. (
  481. "lock; incw %[storage]\n\t"
  482. : [storage] "+m" (storage)
  483. :
  484. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  485. );
  486. }
  487. else
  488. {
  489. __asm__ __volatile__
  490. (
  491. "lock; addw %[argument], %[storage]\n\t"
  492. : [storage] "+m" (storage)
  493. : [argument] "iq" (v)
  494. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  495. );
  496. }
  497. }
  498. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  499. {
  500. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  501. {
  502. __asm__ __volatile__
  503. (
  504. "lock; decw %[storage]\n\t"
  505. : [storage] "+m" (storage)
  506. :
  507. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  508. );
  509. }
  510. else
  511. {
  512. __asm__ __volatile__
  513. (
  514. "lock; subw %[argument], %[storage]\n\t"
  515. : [storage] "+m" (storage)
  516. : [argument] "iq" (v)
  517. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  518. );
  519. }
  520. }
  521. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  522. {
  523. __asm__ __volatile__
  524. (
  525. "lock; negw %[storage]\n\t"
  526. : [storage] "+m" (storage)
  527. :
  528. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  529. );
  530. }
  531. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  532. {
  533. __asm__ __volatile__
  534. (
  535. "lock; andw %[argument], %[storage]\n\t"
  536. : [storage] "+m" (storage)
  537. : [argument] "iq" (v)
  538. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  539. );
  540. }
  541. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  542. {
  543. __asm__ __volatile__
  544. (
  545. "lock; orw %[argument], %[storage]\n\t"
  546. : [storage] "+m" (storage)
  547. : [argument] "iq" (v)
  548. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  549. );
  550. }
  551. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  552. {
  553. __asm__ __volatile__
  554. (
  555. "lock; xorw %[argument], %[storage]\n\t"
  556. : [storage] "+m" (storage)
  557. : [argument] "iq" (v)
  558. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  559. );
  560. }
  561. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  562. {
  563. __asm__ __volatile__
  564. (
  565. "lock; notw %[storage]\n\t"
  566. : [storage] "+m" (storage)
  567. :
  568. : "memory"
  569. );
  570. }
  571. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  572. {
  573. bool res;
  574. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  575. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  576. {
  577. __asm__ __volatile__
  578. (
  579. "lock; incw %[storage]\n\t"
  580. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  581. :
  582. : "memory"
  583. );
  584. }
  585. else
  586. {
  587. __asm__ __volatile__
  588. (
  589. "lock; addw %[argument], %[storage]\n\t"
  590. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  591. : [argument] "iq" (v)
  592. : "memory"
  593. );
  594. }
  595. #else
  596. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  597. {
  598. __asm__ __volatile__
  599. (
  600. "lock; incw %[storage]\n\t"
  601. "setnz %[result]\n\t"
  602. : [storage] "+m" (storage), [result] "=q" (res)
  603. :
  604. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  605. );
  606. }
  607. else
  608. {
  609. __asm__ __volatile__
  610. (
  611. "lock; addw %[argument], %[storage]\n\t"
  612. "setnz %[result]\n\t"
  613. : [storage] "+m" (storage), [result] "=q" (res)
  614. : [argument] "iq" (v)
  615. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  616. );
  617. }
  618. #endif
  619. return res;
  620. }
  621. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  622. {
  623. bool res;
  624. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  625. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  626. {
  627. __asm__ __volatile__
  628. (
  629. "lock; decw %[storage]\n\t"
  630. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  631. :
  632. : "memory"
  633. );
  634. }
  635. else
  636. {
  637. __asm__ __volatile__
  638. (
  639. "lock; subw %[argument], %[storage]\n\t"
  640. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  641. : [argument] "iq" (v)
  642. : "memory"
  643. );
  644. }
  645. #else
  646. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  647. {
  648. __asm__ __volatile__
  649. (
  650. "lock; decw %[storage]\n\t"
  651. "setnz %[result]\n\t"
  652. : [storage] "+m" (storage), [result] "=q" (res)
  653. :
  654. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  655. );
  656. }
  657. else
  658. {
  659. __asm__ __volatile__
  660. (
  661. "lock; subw %[argument], %[storage]\n\t"
  662. "setnz %[result]\n\t"
  663. : [storage] "+m" (storage), [result] "=q" (res)
  664. : [argument] "iq" (v)
  665. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  666. );
  667. }
  668. #endif
  669. return res;
  670. }
  671. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  672. {
  673. bool res;
  674. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  675. __asm__ __volatile__
  676. (
  677. "lock; andw %[argument], %[storage]\n\t"
  678. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  679. : [argument] "iq" (v)
  680. : "memory"
  681. );
  682. #else
  683. __asm__ __volatile__
  684. (
  685. "lock; andw %[argument], %[storage]\n\t"
  686. "setnz %[result]\n\t"
  687. : [storage] "+m" (storage), [result] "=q" (res)
  688. : [argument] "iq" (v)
  689. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  690. );
  691. #endif
  692. return res;
  693. }
  694. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  695. {
  696. bool res;
  697. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  698. __asm__ __volatile__
  699. (
  700. "lock; orw %[argument], %[storage]\n\t"
  701. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  702. : [argument] "iq" (v)
  703. : "memory"
  704. );
  705. #else
  706. __asm__ __volatile__
  707. (
  708. "lock; orw %[argument], %[storage]\n\t"
  709. "setnz %[result]\n\t"
  710. : [storage] "+m" (storage), [result] "=q" (res)
  711. : [argument] "iq" (v)
  712. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  713. );
  714. #endif
  715. return res;
  716. }
  717. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  718. {
  719. bool res;
  720. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  721. __asm__ __volatile__
  722. (
  723. "lock; xorw %[argument], %[storage]\n\t"
  724. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  725. : [argument] "iq" (v)
  726. : "memory"
  727. );
  728. #else
  729. __asm__ __volatile__
  730. (
  731. "lock; xorw %[argument], %[storage]\n\t"
  732. "setnz %[result]\n\t"
  733. : [storage] "+m" (storage), [result] "=q" (res)
  734. : [argument] "iq" (v)
  735. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  736. );
  737. #endif
  738. return res;
  739. }
  740. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  741. {
  742. bool res;
  743. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  744. __asm__ __volatile__
  745. (
  746. "lock; btsw %[bit_number], %[storage]\n\t"
  747. : [storage] "+m" (storage), [result] "=@ccc" (res)
  748. : [bit_number] "Kq" ((uint16_t)bit_number)
  749. : "memory"
  750. );
  751. #else
  752. __asm__ __volatile__
  753. (
  754. "lock; btsw %[bit_number], %[storage]\n\t"
  755. "setc %[result]\n\t"
  756. : [storage] "+m" (storage), [result] "=q" (res)
  757. : [bit_number] "Kq" ((uint16_t)bit_number)
  758. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  759. );
  760. #endif
  761. return res;
  762. }
  763. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  764. {
  765. bool res;
  766. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  767. __asm__ __volatile__
  768. (
  769. "lock; btrw %[bit_number], %[storage]\n\t"
  770. : [storage] "+m" (storage), [result] "=@ccc" (res)
  771. : [bit_number] "Kq" ((uint16_t)bit_number)
  772. : "memory"
  773. );
  774. #else
  775. __asm__ __volatile__
  776. (
  777. "lock; btrw %[bit_number], %[storage]\n\t"
  778. "setc %[result]\n\t"
  779. : [storage] "+m" (storage), [result] "=q" (res)
  780. : [bit_number] "Kq" ((uint16_t)bit_number)
  781. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  782. );
  783. #endif
  784. return res;
  785. }
  786. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  787. {
  788. bool res;
  789. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  790. __asm__ __volatile__
  791. (
  792. "lock; btcw %[bit_number], %[storage]\n\t"
  793. : [storage] "+m" (storage), [result] "=@ccc" (res)
  794. : [bit_number] "Kq" ((uint16_t)bit_number)
  795. : "memory"
  796. );
  797. #else
  798. __asm__ __volatile__
  799. (
  800. "lock; btcw %[bit_number], %[storage]\n\t"
  801. "setc %[result]\n\t"
  802. : [storage] "+m" (storage), [result] "=q" (res)
  803. : [bit_number] "Kq" ((uint16_t)bit_number)
  804. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  805. );
  806. #endif
  807. return res;
  808. }
  809. };
  810. template< typename Base, bool Signed >
  811. struct extra_operations< Base, 4u, Signed, true > :
  812. public extra_operations_generic< Base, 4u, Signed >
  813. {
  814. typedef extra_operations_generic< Base, 4u, Signed > base_type;
  815. typedef typename base_type::storage_type storage_type;
  816. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  817. __asm__ __volatile__\
  818. (\
  819. ".align 16\n\t"\
  820. "1: mov %[orig], %[res]\n\t"\
  821. op " %[res]\n\t"\
  822. "lock; cmpxchgl %[res], %[storage]\n\t"\
  823. "jne 1b"\
  824. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  825. : \
  826. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  827. )
  828. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  829. {
  830. storage_type original = storage;
  831. storage_type result;
  832. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  833. return original;
  834. }
  835. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  836. {
  837. storage_type original = storage;
  838. storage_type result;
  839. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  840. return original;
  841. }
  842. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  843. {
  844. storage_type original = storage;
  845. storage_type result;
  846. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  847. return result;
  848. }
  849. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  850. {
  851. storage_type original = storage;
  852. storage_type result;
  853. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  854. return result;
  855. }
  856. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  857. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  858. __asm__ __volatile__\
  859. (\
  860. ".align 16\n\t"\
  861. "1: mov %[arg], %[res]\n\t"\
  862. op " %%eax, %[res]\n\t"\
  863. "lock; cmpxchgl %[res], %[storage]\n\t"\
  864. "jne 1b"\
  865. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  866. : [arg] "ir" (argument)\
  867. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  868. )
  869. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  870. {
  871. storage_type original = storage;
  872. storage_type result;
  873. BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result);
  874. return static_cast< storage_type >(result);
  875. }
  876. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  877. {
  878. storage_type original = storage;
  879. storage_type result;
  880. BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result);
  881. return static_cast< storage_type >(result);
  882. }
  883. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  884. {
  885. storage_type original = storage;
  886. storage_type result;
  887. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result);
  888. return static_cast< storage_type >(result);
  889. }
  890. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  891. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  892. {
  893. return !!negate(storage, order);
  894. }
  895. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  896. {
  897. return !!bitwise_complement(storage, order);
  898. }
  899. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  900. {
  901. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  902. {
  903. __asm__ __volatile__
  904. (
  905. "lock; incl %[storage]\n\t"
  906. : [storage] "+m" (storage)
  907. :
  908. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  909. );
  910. }
  911. else
  912. {
  913. __asm__ __volatile__
  914. (
  915. "lock; addl %[argument], %[storage]\n\t"
  916. : [storage] "+m" (storage)
  917. : [argument] "ir" (v)
  918. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  919. );
  920. }
  921. }
  922. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  923. {
  924. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  925. {
  926. __asm__ __volatile__
  927. (
  928. "lock; decl %[storage]\n\t"
  929. : [storage] "+m" (storage)
  930. :
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  932. );
  933. }
  934. else
  935. {
  936. __asm__ __volatile__
  937. (
  938. "lock; subl %[argument], %[storage]\n\t"
  939. : [storage] "+m" (storage)
  940. : [argument] "ir" (v)
  941. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  942. );
  943. }
  944. }
  945. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  946. {
  947. __asm__ __volatile__
  948. (
  949. "lock; negl %[storage]\n\t"
  950. : [storage] "+m" (storage)
  951. :
  952. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  953. );
  954. }
  955. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  956. {
  957. __asm__ __volatile__
  958. (
  959. "lock; andl %[argument], %[storage]\n\t"
  960. : [storage] "+m" (storage)
  961. : [argument] "ir" (v)
  962. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  963. );
  964. }
  965. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  966. {
  967. __asm__ __volatile__
  968. (
  969. "lock; orl %[argument], %[storage]\n\t"
  970. : [storage] "+m" (storage)
  971. : [argument] "ir" (v)
  972. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  973. );
  974. }
  975. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  976. {
  977. __asm__ __volatile__
  978. (
  979. "lock; xorl %[argument], %[storage]\n\t"
  980. : [storage] "+m" (storage)
  981. : [argument] "ir" (v)
  982. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  983. );
  984. }
  985. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  986. {
  987. __asm__ __volatile__
  988. (
  989. "lock; notl %[storage]\n\t"
  990. : [storage] "+m" (storage)
  991. :
  992. : "memory"
  993. );
  994. }
  995. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  996. {
  997. bool res;
  998. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  999. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1000. {
  1001. __asm__ __volatile__
  1002. (
  1003. "lock; incl %[storage]\n\t"
  1004. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1005. :
  1006. : "memory"
  1007. );
  1008. }
  1009. else
  1010. {
  1011. __asm__ __volatile__
  1012. (
  1013. "lock; addl %[argument], %[storage]\n\t"
  1014. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1015. : [argument] "ir" (v)
  1016. : "memory"
  1017. );
  1018. }
  1019. #else
  1020. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1021. {
  1022. __asm__ __volatile__
  1023. (
  1024. "lock; incl %[storage]\n\t"
  1025. "setnz %[result]\n\t"
  1026. : [storage] "+m" (storage), [result] "=q" (res)
  1027. :
  1028. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1029. );
  1030. }
  1031. else
  1032. {
  1033. __asm__ __volatile__
  1034. (
  1035. "lock; addl %[argument], %[storage]\n\t"
  1036. "setnz %[result]\n\t"
  1037. : [storage] "+m" (storage), [result] "=q" (res)
  1038. : [argument] "ir" (v)
  1039. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1040. );
  1041. }
  1042. #endif
  1043. return res;
  1044. }
  1045. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1046. {
  1047. bool res;
  1048. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1049. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1050. {
  1051. __asm__ __volatile__
  1052. (
  1053. "lock; decl %[storage]\n\t"
  1054. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1055. :
  1056. : "memory"
  1057. );
  1058. }
  1059. else
  1060. {
  1061. __asm__ __volatile__
  1062. (
  1063. "lock; subl %[argument], %[storage]\n\t"
  1064. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1065. : [argument] "ir" (v)
  1066. : "memory"
  1067. );
  1068. }
  1069. #else
  1070. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1071. {
  1072. __asm__ __volatile__
  1073. (
  1074. "lock; decl %[storage]\n\t"
  1075. "setnz %[result]\n\t"
  1076. : [storage] "+m" (storage), [result] "=q" (res)
  1077. :
  1078. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1079. );
  1080. }
  1081. else
  1082. {
  1083. __asm__ __volatile__
  1084. (
  1085. "lock; subl %[argument], %[storage]\n\t"
  1086. "setnz %[result]\n\t"
  1087. : [storage] "+m" (storage), [result] "=q" (res)
  1088. : [argument] "ir" (v)
  1089. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1090. );
  1091. }
  1092. #endif
  1093. return res;
  1094. }
  1095. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1096. {
  1097. bool res;
  1098. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1099. __asm__ __volatile__
  1100. (
  1101. "lock; andl %[argument], %[storage]\n\t"
  1102. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1103. : [argument] "ir" (v)
  1104. : "memory"
  1105. );
  1106. #else
  1107. __asm__ __volatile__
  1108. (
  1109. "lock; andl %[argument], %[storage]\n\t"
  1110. "setnz %[result]\n\t"
  1111. : [storage] "+m" (storage), [result] "=q" (res)
  1112. : [argument] "ir" (v)
  1113. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1114. );
  1115. #endif
  1116. return res;
  1117. }
  1118. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1119. {
  1120. bool res;
  1121. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1122. __asm__ __volatile__
  1123. (
  1124. "lock; orl %[argument], %[storage]\n\t"
  1125. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1126. : [argument] "ir" (v)
  1127. : "memory"
  1128. );
  1129. #else
  1130. __asm__ __volatile__
  1131. (
  1132. "lock; orl %[argument], %[storage]\n\t"
  1133. "setnz %[result]\n\t"
  1134. : [storage] "+m" (storage), [result] "=q" (res)
  1135. : [argument] "ir" (v)
  1136. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1137. );
  1138. #endif
  1139. return res;
  1140. }
  1141. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1142. {
  1143. bool res;
  1144. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1145. __asm__ __volatile__
  1146. (
  1147. "lock; xorl %[argument], %[storage]\n\t"
  1148. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1149. : [argument] "ir" (v)
  1150. : "memory"
  1151. );
  1152. #else
  1153. __asm__ __volatile__
  1154. (
  1155. "lock; xorl %[argument], %[storage]\n\t"
  1156. "setnz %[result]\n\t"
  1157. : [storage] "+m" (storage), [result] "=q" (res)
  1158. : [argument] "ir" (v)
  1159. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1160. );
  1161. #endif
  1162. return res;
  1163. }
  1164. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1165. {
  1166. bool res;
  1167. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1168. __asm__ __volatile__
  1169. (
  1170. "lock; btsl %[bit_number], %[storage]\n\t"
  1171. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1172. : [bit_number] "Kr" ((uint32_t)bit_number)
  1173. : "memory"
  1174. );
  1175. #else
  1176. __asm__ __volatile__
  1177. (
  1178. "lock; btsl %[bit_number], %[storage]\n\t"
  1179. "setc %[result]\n\t"
  1180. : [storage] "+m" (storage), [result] "=q" (res)
  1181. : [bit_number] "Kr" ((uint32_t)bit_number)
  1182. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1183. );
  1184. #endif
  1185. return res;
  1186. }
  1187. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1188. {
  1189. bool res;
  1190. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1191. __asm__ __volatile__
  1192. (
  1193. "lock; btrl %[bit_number], %[storage]\n\t"
  1194. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1195. : [bit_number] "Kr" ((uint32_t)bit_number)
  1196. : "memory"
  1197. );
  1198. #else
  1199. __asm__ __volatile__
  1200. (
  1201. "lock; btrl %[bit_number], %[storage]\n\t"
  1202. "setc %[result]\n\t"
  1203. : [storage] "+m" (storage), [result] "=q" (res)
  1204. : [bit_number] "Kr" ((uint32_t)bit_number)
  1205. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1206. );
  1207. #endif
  1208. return res;
  1209. }
  1210. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1211. {
  1212. bool res;
  1213. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1214. __asm__ __volatile__
  1215. (
  1216. "lock; btcl %[bit_number], %[storage]\n\t"
  1217. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1218. : [bit_number] "Kr" ((uint32_t)bit_number)
  1219. : "memory"
  1220. );
  1221. #else
  1222. __asm__ __volatile__
  1223. (
  1224. "lock; btcl %[bit_number], %[storage]\n\t"
  1225. "setc %[result]\n\t"
  1226. : [storage] "+m" (storage), [result] "=q" (res)
  1227. : [bit_number] "Kr" ((uint32_t)bit_number)
  1228. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1229. );
  1230. #endif
  1231. return res;
  1232. }
  1233. };
  1234. #if defined(__x86_64__)
  1235. template< typename Base, bool Signed >
  1236. struct extra_operations< Base, 8u, Signed, true > :
  1237. public extra_operations_generic< Base, 8u, Signed >
  1238. {
  1239. typedef extra_operations_generic< Base, 8u, Signed > base_type;
  1240. typedef typename base_type::storage_type storage_type;
  1241. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  1242. __asm__ __volatile__\
  1243. (\
  1244. ".align 16\n\t"\
  1245. "1: mov %[orig], %[res]\n\t"\
  1246. op " %[res]\n\t"\
  1247. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1248. "jne 1b"\
  1249. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1250. : \
  1251. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1252. )
  1253. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1254. {
  1255. storage_type original = storage;
  1256. storage_type result;
  1257. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1258. return original;
  1259. }
  1260. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1261. {
  1262. storage_type original = storage;
  1263. storage_type result;
  1264. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1265. return original;
  1266. }
  1267. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1268. {
  1269. storage_type original = storage;
  1270. storage_type result;
  1271. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1272. return result;
  1273. }
  1274. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1275. {
  1276. storage_type original = storage;
  1277. storage_type result;
  1278. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1279. return result;
  1280. }
  1281. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1282. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  1283. __asm__ __volatile__\
  1284. (\
  1285. ".align 16\n\t"\
  1286. "1: mov %[arg], %[res]\n\t"\
  1287. op " %%rax, %[res]\n\t"\
  1288. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1289. "jne 1b"\
  1290. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1291. : [arg] "r" (argument)\
  1292. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1293. )
  1294. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1295. {
  1296. storage_type original = storage;
  1297. storage_type result;
  1298. BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result);
  1299. return static_cast< storage_type >(result);
  1300. }
  1301. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1302. {
  1303. storage_type original = storage;
  1304. storage_type result;
  1305. BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result);
  1306. return static_cast< storage_type >(result);
  1307. }
  1308. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1309. {
  1310. storage_type original = storage;
  1311. storage_type result;
  1312. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result);
  1313. return static_cast< storage_type >(result);
  1314. }
  1315. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1316. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1317. {
  1318. return !!negate(storage, order);
  1319. }
  1320. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1321. {
  1322. return !!bitwise_complement(storage, order);
  1323. }
  1324. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1325. {
  1326. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1327. {
  1328. __asm__ __volatile__
  1329. (
  1330. "lock; incq %[storage]\n\t"
  1331. : [storage] "+m" (storage)
  1332. :
  1333. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1334. );
  1335. }
  1336. else
  1337. {
  1338. __asm__ __volatile__
  1339. (
  1340. "lock; addq %[argument], %[storage]\n\t"
  1341. : [storage] "+m" (storage)
  1342. : [argument] "er" (v)
  1343. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1344. );
  1345. }
  1346. }
  1347. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1348. {
  1349. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1350. {
  1351. __asm__ __volatile__
  1352. (
  1353. "lock; decq %[storage]\n\t"
  1354. : [storage] "+m" (storage)
  1355. :
  1356. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1357. );
  1358. }
  1359. else
  1360. {
  1361. __asm__ __volatile__
  1362. (
  1363. "lock; subq %[argument], %[storage]\n\t"
  1364. : [storage] "+m" (storage)
  1365. : [argument] "er" (v)
  1366. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1367. );
  1368. }
  1369. }
  1370. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1371. {
  1372. __asm__ __volatile__
  1373. (
  1374. "lock; negq %[storage]\n\t"
  1375. : [storage] "+m" (storage)
  1376. :
  1377. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1378. );
  1379. }
  1380. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1381. {
  1382. __asm__ __volatile__
  1383. (
  1384. "lock; andq %[argument], %[storage]\n\t"
  1385. : [storage] "+m" (storage)
  1386. : [argument] "er" (v)
  1387. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1388. );
  1389. }
  1390. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1391. {
  1392. __asm__ __volatile__
  1393. (
  1394. "lock; orq %[argument], %[storage]\n\t"
  1395. : [storage] "+m" (storage)
  1396. : [argument] "er" (v)
  1397. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1398. );
  1399. }
  1400. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1401. {
  1402. __asm__ __volatile__
  1403. (
  1404. "lock; xorq %[argument], %[storage]\n\t"
  1405. : [storage] "+m" (storage)
  1406. : [argument] "er" (v)
  1407. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1408. );
  1409. }
  1410. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1411. {
  1412. __asm__ __volatile__
  1413. (
  1414. "lock; notq %[storage]\n\t"
  1415. : [storage] "+m" (storage)
  1416. :
  1417. : "memory"
  1418. );
  1419. }
  1420. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1421. {
  1422. bool res;
  1423. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1424. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1425. {
  1426. __asm__ __volatile__
  1427. (
  1428. "lock; incq %[storage]\n\t"
  1429. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1430. :
  1431. : "memory"
  1432. );
  1433. }
  1434. else
  1435. {
  1436. __asm__ __volatile__
  1437. (
  1438. "lock; addq %[argument], %[storage]\n\t"
  1439. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1440. : [argument] "er" (v)
  1441. : "memory"
  1442. );
  1443. }
  1444. #else
  1445. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1446. {
  1447. __asm__ __volatile__
  1448. (
  1449. "lock; incq %[storage]\n\t"
  1450. "setnz %[result]\n\t"
  1451. : [storage] "+m" (storage), [result] "=q" (res)
  1452. :
  1453. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1454. );
  1455. }
  1456. else
  1457. {
  1458. __asm__ __volatile__
  1459. (
  1460. "lock; addq %[argument], %[storage]\n\t"
  1461. "setnz %[result]\n\t"
  1462. : [storage] "+m" (storage), [result] "=q" (res)
  1463. : [argument] "er" (v)
  1464. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1465. );
  1466. }
  1467. #endif
  1468. return res;
  1469. }
  1470. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1471. {
  1472. bool res;
  1473. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1474. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1475. {
  1476. __asm__ __volatile__
  1477. (
  1478. "lock; decq %[storage]\n\t"
  1479. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1480. :
  1481. : "memory"
  1482. );
  1483. }
  1484. else
  1485. {
  1486. __asm__ __volatile__
  1487. (
  1488. "lock; subq %[argument], %[storage]\n\t"
  1489. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1490. : [argument] "er" (v)
  1491. : "memory"
  1492. );
  1493. }
  1494. #else
  1495. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1496. {
  1497. __asm__ __volatile__
  1498. (
  1499. "lock; decq %[storage]\n\t"
  1500. "setnz %[result]\n\t"
  1501. : [storage] "+m" (storage), [result] "=q" (res)
  1502. :
  1503. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1504. );
  1505. }
  1506. else
  1507. {
  1508. __asm__ __volatile__
  1509. (
  1510. "lock; subq %[argument], %[storage]\n\t"
  1511. "setnz %[result]\n\t"
  1512. : [storage] "+m" (storage), [result] "=q" (res)
  1513. : [argument] "er" (v)
  1514. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1515. );
  1516. }
  1517. #endif
  1518. return res;
  1519. }
  1520. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1521. {
  1522. bool res;
  1523. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1524. __asm__ __volatile__
  1525. (
  1526. "lock; andq %[argument], %[storage]\n\t"
  1527. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1528. : [argument] "er" (v)
  1529. : "memory"
  1530. );
  1531. #else
  1532. __asm__ __volatile__
  1533. (
  1534. "lock; andq %[argument], %[storage]\n\t"
  1535. "setnz %[result]\n\t"
  1536. : [storage] "+m" (storage), [result] "=q" (res)
  1537. : [argument] "er" (v)
  1538. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1539. );
  1540. #endif
  1541. return res;
  1542. }
  1543. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1544. {
  1545. bool res;
  1546. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1547. __asm__ __volatile__
  1548. (
  1549. "lock; orq %[argument], %[storage]\n\t"
  1550. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1551. : [argument] "er" (v)
  1552. : "memory"
  1553. );
  1554. #else
  1555. __asm__ __volatile__
  1556. (
  1557. "lock; orq %[argument], %[storage]\n\t"
  1558. "setnz %[result]\n\t"
  1559. : [storage] "+m" (storage), [result] "=q" (res)
  1560. : [argument] "er" (v)
  1561. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1562. );
  1563. #endif
  1564. return res;
  1565. }
  1566. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1567. {
  1568. bool res;
  1569. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1570. __asm__ __volatile__
  1571. (
  1572. "lock; xorq %[argument], %[storage]\n\t"
  1573. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1574. : [argument] "er" (v)
  1575. : "memory"
  1576. );
  1577. #else
  1578. __asm__ __volatile__
  1579. (
  1580. "lock; xorq %[argument], %[storage]\n\t"
  1581. "setnz %[result]\n\t"
  1582. : [storage] "+m" (storage), [result] "=q" (res)
  1583. : [argument] "er" (v)
  1584. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1585. );
  1586. #endif
  1587. return res;
  1588. }
  1589. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1590. {
  1591. bool res;
  1592. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1593. __asm__ __volatile__
  1594. (
  1595. "lock; btsq %[bit_number], %[storage]\n\t"
  1596. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1597. : [bit_number] "Kr" ((uint64_t)bit_number)
  1598. : "memory"
  1599. );
  1600. #else
  1601. __asm__ __volatile__
  1602. (
  1603. "lock; btsq %[bit_number], %[storage]\n\t"
  1604. "setc %[result]\n\t"
  1605. : [storage] "+m" (storage), [result] "=q" (res)
  1606. : [bit_number] "Kr" ((uint64_t)bit_number)
  1607. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1608. );
  1609. #endif
  1610. return res;
  1611. }
  1612. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1613. {
  1614. bool res;
  1615. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1616. __asm__ __volatile__
  1617. (
  1618. "lock; btrq %[bit_number], %[storage]\n\t"
  1619. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1620. : [bit_number] "Kr" ((uint64_t)bit_number)
  1621. : "memory"
  1622. );
  1623. #else
  1624. __asm__ __volatile__
  1625. (
  1626. "lock; btrq %[bit_number], %[storage]\n\t"
  1627. "setc %[result]\n\t"
  1628. : [storage] "+m" (storage), [result] "=q" (res)
  1629. : [bit_number] "Kr" ((uint64_t)bit_number)
  1630. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1631. );
  1632. #endif
  1633. return res;
  1634. }
  1635. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1636. {
  1637. bool res;
  1638. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1639. __asm__ __volatile__
  1640. (
  1641. "lock; btcq %[bit_number], %[storage]\n\t"
  1642. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1643. : [bit_number] "Kr" ((uint64_t)bit_number)
  1644. : "memory"
  1645. );
  1646. #else
  1647. __asm__ __volatile__
  1648. (
  1649. "lock; btcq %[bit_number], %[storage]\n\t"
  1650. "setc %[result]\n\t"
  1651. : [storage] "+m" (storage), [result] "=q" (res)
  1652. : [bit_number] "Kr" ((uint64_t)bit_number)
  1653. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1654. );
  1655. #endif
  1656. return res;
  1657. }
  1658. };
  1659. #endif // defined(__x86_64__)
  1660. } // namespace detail
  1661. } // namespace atomics
  1662. } // namespace boost
  1663. #include <boost/atomic/detail/footer.hpp>
  1664. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_