interlocked_read.hpp 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. #ifndef BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
  2. #define BOOST_THREAD_DETAIL_INTERLOCKED_READ_WIN32_HPP
  3. // interlocked_read_win32.hpp
  4. //
  5. // (C) Copyright 2005-8 Anthony Williams
  6. // (C) Copyright 2012 Vicente J. Botet Escriba
  7. // (C) Copyright 2017 Andrey Semashev
  8. //
  9. // Distributed under the Boost Software License, Version 1.0. (See
  10. // accompanying file LICENSE_1_0.txt or copy at
  11. // http://www.boost.org/LICENSE_1_0.txt)
  12. #include <boost/detail/interlocked.hpp>
  13. #include <boost/thread/detail/config.hpp>
  14. #include <boost/config/abi_prefix.hpp>
  15. // Define compiler barriers
  16. #if defined(__INTEL_COMPILER)
  17. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() __memory_barrier()
  18. #elif defined(_MSC_VER) && !defined(_WIN32_WCE)
  19. extern "C" void _ReadWriteBarrier(void);
  20. #pragma intrinsic(_ReadWriteBarrier)
  21. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER() _ReadWriteBarrier()
  22. #endif
  23. #ifndef BOOST_THREAD_DETAIL_COMPILER_BARRIER
  24. #define BOOST_THREAD_DETAIL_COMPILER_BARRIER()
  25. #endif
  26. #if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
  27. // Since VS2005 and until VS2012 volatile reads always acquire and volatile writes are always release.
  28. // But VS2012 adds a compiler switch that can change behavior to the standard. On x86 though
  29. // the compiler generates a single instruction for the load/store, which is enough synchronization
  30. // as far as uarch is concerned. To prevent compiler reordering code around the load/store we add
  31. // compiler barriers.
  32. namespace boost
  33. {
  34. namespace detail
  35. {
  36. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  37. {
  38. long const res=*x;
  39. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  40. return res;
  41. }
  42. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  43. {
  44. void* const res=*x;
  45. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  46. return res;
  47. }
  48. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  49. {
  50. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  51. *x=value;
  52. }
  53. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  54. {
  55. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  56. *x=value;
  57. }
  58. }
  59. }
  60. #elif defined(_MSC_VER) && _MSC_VER >= 1700 && (defined(_M_ARM) || defined(_M_ARM64))
  61. #include <intrin.h>
  62. namespace boost
  63. {
  64. namespace detail
  65. {
  66. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  67. {
  68. long const res=__iso_volatile_load32((const volatile __int32*)x);
  69. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  70. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  71. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  72. return res;
  73. }
  74. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  75. {
  76. void* const res=
  77. #if defined(_M_ARM64)
  78. (void*)__iso_volatile_load64((const volatile __int64*)x);
  79. #else
  80. (void*)__iso_volatile_load32((const volatile __int32*)x);
  81. #endif
  82. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  83. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  84. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  85. return res;
  86. }
  87. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  88. {
  89. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  90. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  91. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  92. __iso_volatile_store32((volatile __int32*)x, (__int32)value);
  93. }
  94. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  95. {
  96. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  97. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  98. BOOST_THREAD_DETAIL_COMPILER_BARRIER();
  99. #if defined(_M_ARM64)
  100. __iso_volatile_store64((volatile __int64*)x, (__int64)value);
  101. #else
  102. __iso_volatile_store32((volatile __int32*)x, (__int32)value);
  103. #endif
  104. }
  105. }
  106. }
  107. #elif defined(__GNUC__) && (((__GNUC__ * 100 + __GNUC_MINOR__) >= 407) || (defined(__clang__) && (__clang_major__ * 100 + __clang_minor__) >= 302))
  108. namespace boost
  109. {
  110. namespace detail
  111. {
  112. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  113. {
  114. return __atomic_load_n((long*)x, __ATOMIC_ACQUIRE);
  115. }
  116. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  117. {
  118. return __atomic_load_n((void**)x, __ATOMIC_ACQUIRE);
  119. }
  120. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  121. {
  122. __atomic_store_n((long*)x, value, __ATOMIC_RELEASE);
  123. }
  124. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  125. {
  126. __atomic_store_n((void**)x, value, __ATOMIC_RELEASE);
  127. }
  128. }
  129. }
  130. #elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
  131. namespace boost
  132. {
  133. namespace detail
  134. {
  135. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  136. {
  137. long res;
  138. __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
  139. return res;
  140. }
  141. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  142. {
  143. void* res;
  144. #if defined(__x86_64__)
  145. __asm__ __volatile__ ("movq %1, %0" : "=r" (res) : "m" (*x) : "memory");
  146. #else
  147. __asm__ __volatile__ ("movl %1, %0" : "=r" (res) : "m" (*x) : "memory");
  148. #endif
  149. return res;
  150. }
  151. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  152. {
  153. __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
  154. }
  155. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  156. {
  157. #if defined(__x86_64__)
  158. __asm__ __volatile__ ("movq %1, %0" : "=m" (*x) : "r" (value) : "memory");
  159. #else
  160. __asm__ __volatile__ ("movl %1, %0" : "=m" (*x) : "r" (value) : "memory");
  161. #endif
  162. }
  163. }
  164. }
  165. #else
  166. namespace boost
  167. {
  168. namespace detail
  169. {
  170. inline long interlocked_read_acquire(long volatile* x) BOOST_NOEXCEPT
  171. {
  172. return BOOST_INTERLOCKED_COMPARE_EXCHANGE((long*)x,0,0);
  173. }
  174. inline void* interlocked_read_acquire(void* volatile* x) BOOST_NOEXCEPT
  175. {
  176. return BOOST_INTERLOCKED_COMPARE_EXCHANGE_POINTER((void**)x,0,0);
  177. }
  178. inline void interlocked_write_release(long volatile* x,long value) BOOST_NOEXCEPT
  179. {
  180. BOOST_INTERLOCKED_EXCHANGE((long*)x,value);
  181. }
  182. inline void interlocked_write_release(void* volatile* x,void* value) BOOST_NOEXCEPT
  183. {
  184. BOOST_INTERLOCKED_EXCHANGE_POINTER((void**)x,value);
  185. }
  186. }
  187. }
  188. #endif
  189. #include <boost/config/abi_suffix.hpp>
  190. #endif