interlocked_read.hpp 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. #ifndef BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
  2. #define BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
  3. // interlocked_read_win32.hpp
  4. //
  5. // (C) Copyright 2005-8 Anthony Williams
  6. // (C) Copyright 2012 Vicente J. Botet Escriba
  7. // (C) Copyright 2017 Andrey Semashev
  8. //
  9. // Distributed under the Boost Software License, Version 1.0. (See
  10. // accompanying file LICENSE_1_0.txt or copy at
  11. // http://www.boost.org/LICENSE_1_0.txt)
  12. #include <boost/detail/interlocked.hpp>
  13. #include <boost/thread/detail/config.hpp>
  14. #include <boost/config/abi_prefix.hpp>
  15. // Define compiler barriers
  16. #if defined(__INTEL_COMPILER)
  17. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __memory_barrier()
  18. #elif defined(__clang__)
  19. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __atomic_signal_fence(__ATOMIC_SEQ_CST)
  20. #elif defined(_MSC_VER) && !defined(_WIN32_WCE)
  21. extern "C" void _ReadWriteBarrier(void);
  22. #pragma intrinsic(_ReadWriteBarrier)
  23. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() _ReadWriteBarrier()
  24. #endif
  25. #ifndef BOOST_THREAD_DETAIL_COMPILER_BARRIER
  26. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER()
  27. #endif
  28. #if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
  29. // Since VS2005 and until VS2012 volatile reads always acquire and volatile writes are always release.
  30. // But VS2012 adds a compiler switch that can change behavior to the standard. On x86 though
  31. // the compiler generates a single instruction for the load/store, which is enough synchronization
  32. // as far as uarch is concerned. To prevent compiler reordering code around the load/store we add
  33. // compiler barriers.
  34. namespace boost
  35. {
  36. namespace detail
  37. {
  38. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  39. {
  40. long const res=*x;
  41. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  42. return res;
  43. }
  44. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  45. {
  46. void* const res=*x;
  47. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  48. return res;
  49. }
  50. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  51. {
  52. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  53. *x=value;
  54. }
  55. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  56. {
  57. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  58. *x=value;
  59. }
  60. }
  61. }
  62. #elif defined(_MSC_VER) && _MSC_VER >= 1700 && (defined(_M_ARM) || defined(_M_ARM64))
  63. #include <intrin.h>
  64. namespace boost
  65. {
  66. namespace detail
  67. {
  68. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  69. {
  70. long const res=__iso_volatile_load32((const volatile __int32*)x);
  71. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  72. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  73. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  74. return res;
  75. }
  76. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  77. {
  78. void* const res=
  79. #if defined(_M_ARM64)
  80. (void*)__iso_volatile_load64((const volatile __int64*)x);
  81. #else
  82. (void*)__iso_volatile_load32((const volatile __int32*)x);
  83. #endif
  84. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  85. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  86. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  87. return res;
  88. }
  89. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  90. {
  91. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  92. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  93. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  94. __iso_volatile_store32((volatile __int32*)x, (__int32)value);
  95. }
  96. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  97. {
  98. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  99. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  100. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  101. #if defined(_M_ARM64)
  102. __iso_volatile_store64((volatile __int64*)x, (__int64)value);
  103. #else
  104. __iso_volatile_store32((volatile __int32*)x, (__int32)value);
  105. #endif
  106. }
  107. }
  108. }
  109. #elif defined(__GNUC__) && (((__GNUC__ * 100 + __GNUC_MINOR__) >= 407) || (defined(__clang__) && (__clang_major__ * 100 + __clang_minor__) >= 302))
  110. namespace boost
  111. {
  112. namespace detail
  113. {
  114. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  115. {
  116. return __atomic_load_n((long*)x, __ATOMIC_ACQUIRE);
  117. }
  118. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  119. {
  120. return __atomic_load_n((void**)x, __ATOMIC_ACQUIRE);
  121. }
  122. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  123. {
  124. __atomic_store_n((long*)x, value, __ATOMIC_RELEASE);
  125. }
  126. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  127. {
  128. __atomic_store_n((void**)x, value, __ATOMIC_RELEASE);
  129. }
  130. }
  131. }
  132. #elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
  133. namespace boost
  134. {
  135. namespace detail
  136. {
  137. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  138. {
  139. long res;
  140. __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
  141. return res;
  142. }
  143. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  144. {
  145. void* res;
  146. #if defined(__x86_64__)
  147. __asm__ __volatile__ ("movq %1, %0" : "=r" (res) : "m" (*x) : "memory");
  148. #else
  149. __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
  150. #endif
  151. return res;
  152. }
  153. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  154. {
  155. __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
  156. }
  157. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  158. {
  159. #if defined(__x86_64__)
  160. __asm__ __volatile__ ("movq %1, %0" : "=m" (*x) : "r" (value) : "memory");
  161. #else
  162. __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
  163. #endif
  164. }
  165. }
  166. }
  167. #else
  168. namespace boost
  169. {
  170. namespace detail
  171. {
  172. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  173. {
  174. return BOOST_INTERLOCKED_COMPARE_EXCHANGE((long*)x,0,0);
  175. }
  176. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  177. {
  178. return BOOST_INTERLOCKED_COMPARE_EXCHANGE_POINTER((void**)x,0,0);
  179. }
  180. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  181. {
  182. BOOST_INTERLOCKED_EXCHANGE((long*)x,value);
  183. }
  184. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  185. {
  186. BOOST_INTERLOCKED_EXCHANGE_POINTER((void**)x,value);
  187. }
  188. }
  189. }
  190. #endif
  191. #include <boost/config/abi_suffix.hpp>
  192. #endif