ops_msvc_arm.hpp 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2009 Helge Bahmann
  7. * Copyright (c) 2012 Tim Blechmann
  8. * Copyright (c) 2014 Andrey Semashev
  9. */
  10. /*!
  11. * \file atomic/detail/ops_msvc_arm.hpp
  12. *
  13. * This header contains implementation of the \c operations template.
  14. */
  15. #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
  16. #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
  17. #include <intrin.h>
  18. #include <cstddef>
  19. #include <boost/memory_order.hpp>
  20. #include <boost/atomic/detail/config.hpp>
  21. #include <boost/atomic/detail/interlocked.hpp>
  22. #include <boost/atomic/detail/storage_type.hpp>
  23. #include <boost/atomic/detail/operations_fwd.hpp>
  24. #include <boost/atomic/detail/type_traits/make_signed.hpp>
  25. #include <boost/atomic/capabilities.hpp>
  26. #include <boost/atomic/detail/ops_msvc_common.hpp>
  27. #ifdef BOOST_HAS_PRAGMA_ONCE
  28. #pragma once
  29. #endif
  30. #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
  31. #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
  32. #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
  33. #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
  34. #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
  35. #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
  36. #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
  37. #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
  38. namespace boost {
  39. namespace atomics {
  40. namespace detail {
  41. // A note about memory_order_consume. Technically, this architecture allows to avoid
  42. // unnecessary memory barrier after consume load since it supports data dependency ordering.
  43. // However, some compiler optimizations may break a seemingly valid code relying on data
  44. // dependency tracking by injecting bogus branches to aid out of order execution.
  45. // This may happen not only in Boost.Atomic code but also in user's code, which we have no
  46. // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
  47. // For this reason we promote memory_order_consume to memory_order_acquire.
  48. struct msvc_arm_operations_base
  49. {
  50. static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
  51. static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
  52. static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
  53. {
  54. __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  55. }
  56. static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
  57. {
  58. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  59. if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
  60. hardware_full_fence();
  61. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  62. }
  63. static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
  64. {
  65. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  66. if (order == memory_order_seq_cst)
  67. hardware_full_fence();
  68. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  69. }
  70. static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
  71. {
  72. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  73. if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
  74. hardware_full_fence();
  75. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  76. }
  77. static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  78. {
  79. // Combine order flags together and promote memory_order_consume to memory_order_acquire
  80. return static_cast< memory_order >(((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & ~static_cast< unsigned int >(memory_order_consume))
  81. | (((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & static_cast< unsigned int >(memory_order_consume)) << 1u));
  82. }
  83. };
  84. template< std::size_t Size, bool Signed, typename Derived >
  85. struct msvc_arm_operations :
  86. public msvc_arm_operations_base
  87. {
  88. typedef typename make_storage_type< Size >::type storage_type;
  89. typedef typename make_storage_type< Size >::aligned aligned_storage_type;
  90. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
  91. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  92. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  93. {
  94. typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
  95. return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
  96. }
  97. static BOOST_FORCEINLINE bool compare_exchange_weak(
  98. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  99. {
  100. return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
  101. }
  102. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  103. {
  104. return !!Derived::exchange(storage, (storage_type)1, order);
  105. }
  106. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  107. {
  108. Derived::store(storage, (storage_type)0, order);
  109. }
  110. };
  111. template< bool Signed >
  112. struct operations< 1u, Signed > :
  113. public msvc_arm_operations< 1u, Signed, operations< 1u, Signed > >
  114. {
  115. typedef msvc_arm_operations< 1u, Signed, operations< 1u, Signed > > base_type;
  116. typedef typename base_type::storage_type storage_type;
  117. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  118. {
  119. base_type::fence_before_store(order);
  120. BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
  121. base_type::fence_after_store(order);
  122. }
  123. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  124. {
  125. storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
  126. base_type::fence_after_load(order);
  127. return v;
  128. }
  129. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  130. {
  131. switch (order)
  132. {
  133. case memory_order_relaxed:
  134. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
  135. break;
  136. case memory_order_consume:
  137. case memory_order_acquire:
  138. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
  139. break;
  140. case memory_order_release:
  141. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
  142. break;
  143. case memory_order_acq_rel:
  144. case memory_order_seq_cst:
  145. default:
  146. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
  147. break;
  148. }
  149. return v;
  150. }
  151. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  152. {
  153. switch (order)
  154. {
  155. case memory_order_relaxed:
  156. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
  157. break;
  158. case memory_order_consume:
  159. case memory_order_acquire:
  160. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
  161. break;
  162. case memory_order_release:
  163. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
  164. break;
  165. case memory_order_acq_rel:
  166. case memory_order_seq_cst:
  167. default:
  168. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
  169. break;
  170. }
  171. return v;
  172. }
  173. static BOOST_FORCEINLINE bool compare_exchange_strong(
  174. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  175. {
  176. storage_type previous = expected, old_val;
  177. switch (cas_common_order(success_order, failure_order))
  178. {
  179. case memory_order_relaxed:
  180. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
  181. break;
  182. case memory_order_consume:
  183. case memory_order_acquire:
  184. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
  185. break;
  186. case memory_order_release:
  187. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
  188. break;
  189. case memory_order_acq_rel:
  190. case memory_order_seq_cst:
  191. default:
  192. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
  193. break;
  194. }
  195. expected = old_val;
  196. return (previous == old_val);
  197. }
  198. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  199. {
  200. switch (order)
  201. {
  202. case memory_order_relaxed:
  203. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
  204. break;
  205. case memory_order_consume:
  206. case memory_order_acquire:
  207. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
  208. break;
  209. case memory_order_release:
  210. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
  211. break;
  212. case memory_order_acq_rel:
  213. case memory_order_seq_cst:
  214. default:
  215. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
  216. break;
  217. }
  218. return v;
  219. }
  220. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  221. {
  222. switch (order)
  223. {
  224. case memory_order_relaxed:
  225. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
  226. break;
  227. case memory_order_consume:
  228. case memory_order_acquire:
  229. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
  230. break;
  231. case memory_order_release:
  232. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
  233. break;
  234. case memory_order_acq_rel:
  235. case memory_order_seq_cst:
  236. default:
  237. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
  238. break;
  239. }
  240. return v;
  241. }
  242. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  243. {
  244. switch (order)
  245. {
  246. case memory_order_relaxed:
  247. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
  248. break;
  249. case memory_order_consume:
  250. case memory_order_acquire:
  251. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
  252. break;
  253. case memory_order_release:
  254. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
  255. break;
  256. case memory_order_acq_rel:
  257. case memory_order_seq_cst:
  258. default:
  259. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
  260. break;
  261. }
  262. return v;
  263. }
  264. };
  265. template< bool Signed >
  266. struct operations< 2u, Signed > :
  267. public msvc_arm_operations< 2u, Signed, operations< 2u, Signed > >
  268. {
  269. typedef msvc_arm_operations< 2u, Signed, operations< 2u, Signed > > base_type;
  270. typedef typename base_type::storage_type storage_type;
  271. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  272. {
  273. base_type::fence_before_store(order);
  274. BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
  275. base_type::fence_after_store(order);
  276. }
  277. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  278. {
  279. storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
  280. base_type::fence_after_load(order);
  281. return v;
  282. }
  283. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  284. {
  285. switch (order)
  286. {
  287. case memory_order_relaxed:
  288. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
  289. break;
  290. case memory_order_consume:
  291. case memory_order_acquire:
  292. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
  293. break;
  294. case memory_order_release:
  295. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
  296. break;
  297. case memory_order_acq_rel:
  298. case memory_order_seq_cst:
  299. default:
  300. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
  301. break;
  302. }
  303. return v;
  304. }
  305. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  306. {
  307. switch (order)
  308. {
  309. case memory_order_relaxed:
  310. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
  311. break;
  312. case memory_order_consume:
  313. case memory_order_acquire:
  314. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
  315. break;
  316. case memory_order_release:
  317. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
  318. break;
  319. case memory_order_acq_rel:
  320. case memory_order_seq_cst:
  321. default:
  322. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
  323. break;
  324. }
  325. return v;
  326. }
  327. static BOOST_FORCEINLINE bool compare_exchange_strong(
  328. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  329. {
  330. storage_type previous = expected, old_val;
  331. switch (cas_common_order(success_order, failure_order))
  332. {
  333. case memory_order_relaxed:
  334. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
  335. break;
  336. case memory_order_consume:
  337. case memory_order_acquire:
  338. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
  339. break;
  340. case memory_order_release:
  341. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
  342. break;
  343. case memory_order_acq_rel:
  344. case memory_order_seq_cst:
  345. default:
  346. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
  347. break;
  348. }
  349. expected = old_val;
  350. return (previous == old_val);
  351. }
  352. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  353. {
  354. switch (order)
  355. {
  356. case memory_order_relaxed:
  357. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
  358. break;
  359. case memory_order_consume:
  360. case memory_order_acquire:
  361. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
  362. break;
  363. case memory_order_release:
  364. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
  365. break;
  366. case memory_order_acq_rel:
  367. case memory_order_seq_cst:
  368. default:
  369. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
  370. break;
  371. }
  372. return v;
  373. }
  374. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  375. {
  376. switch (order)
  377. {
  378. case memory_order_relaxed:
  379. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
  380. break;
  381. case memory_order_consume:
  382. case memory_order_acquire:
  383. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
  384. break;
  385. case memory_order_release:
  386. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
  387. break;
  388. case memory_order_acq_rel:
  389. case memory_order_seq_cst:
  390. default:
  391. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
  392. break;
  393. }
  394. return v;
  395. }
  396. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  397. {
  398. switch (order)
  399. {
  400. case memory_order_relaxed:
  401. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
  402. break;
  403. case memory_order_consume:
  404. case memory_order_acquire:
  405. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
  406. break;
  407. case memory_order_release:
  408. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
  409. break;
  410. case memory_order_acq_rel:
  411. case memory_order_seq_cst:
  412. default:
  413. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
  414. break;
  415. }
  416. return v;
  417. }
  418. };
  419. template< bool Signed >
  420. struct operations< 4u, Signed > :
  421. public msvc_arm_operations< 4u, Signed, operations< 4u, Signed > >
  422. {
  423. typedef msvc_arm_operations< 4u, Signed, operations< 4u, Signed > > base_type;
  424. typedef typename base_type::storage_type storage_type;
  425. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  426. {
  427. base_type::fence_before_store(order);
  428. BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
  429. base_type::fence_after_store(order);
  430. }
  431. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  432. {
  433. storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
  434. base_type::fence_after_load(order);
  435. return v;
  436. }
  437. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  438. {
  439. switch (order)
  440. {
  441. case memory_order_relaxed:
  442. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
  443. break;
  444. case memory_order_consume:
  445. case memory_order_acquire:
  446. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
  447. break;
  448. case memory_order_release:
  449. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
  450. break;
  451. case memory_order_acq_rel:
  452. case memory_order_seq_cst:
  453. default:
  454. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
  455. break;
  456. }
  457. return v;
  458. }
  459. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  460. {
  461. switch (order)
  462. {
  463. case memory_order_relaxed:
  464. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
  465. break;
  466. case memory_order_consume:
  467. case memory_order_acquire:
  468. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
  469. break;
  470. case memory_order_release:
  471. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
  472. break;
  473. case memory_order_acq_rel:
  474. case memory_order_seq_cst:
  475. default:
  476. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
  477. break;
  478. }
  479. return v;
  480. }
  481. static BOOST_FORCEINLINE bool compare_exchange_strong(
  482. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  483. {
  484. storage_type previous = expected, old_val;
  485. switch (cas_common_order(success_order, failure_order))
  486. {
  487. case memory_order_relaxed:
  488. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
  489. break;
  490. case memory_order_consume:
  491. case memory_order_acquire:
  492. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
  493. break;
  494. case memory_order_release:
  495. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
  496. break;
  497. case memory_order_acq_rel:
  498. case memory_order_seq_cst:
  499. default:
  500. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
  501. break;
  502. }
  503. expected = old_val;
  504. return (previous == old_val);
  505. }
  506. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  507. {
  508. switch (order)
  509. {
  510. case memory_order_relaxed:
  511. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
  512. break;
  513. case memory_order_consume:
  514. case memory_order_acquire:
  515. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
  516. break;
  517. case memory_order_release:
  518. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
  519. break;
  520. case memory_order_acq_rel:
  521. case memory_order_seq_cst:
  522. default:
  523. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
  524. break;
  525. }
  526. return v;
  527. }
  528. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  529. {
  530. switch (order)
  531. {
  532. case memory_order_relaxed:
  533. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
  534. break;
  535. case memory_order_consume:
  536. case memory_order_acquire:
  537. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
  538. break;
  539. case memory_order_release:
  540. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
  541. break;
  542. case memory_order_acq_rel:
  543. case memory_order_seq_cst:
  544. default:
  545. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
  546. break;
  547. }
  548. return v;
  549. }
  550. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  551. {
  552. switch (order)
  553. {
  554. case memory_order_relaxed:
  555. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
  556. break;
  557. case memory_order_consume:
  558. case memory_order_acquire:
  559. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
  560. break;
  561. case memory_order_release:
  562. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
  563. break;
  564. case memory_order_acq_rel:
  565. case memory_order_seq_cst:
  566. default:
  567. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
  568. break;
  569. }
  570. return v;
  571. }
  572. };
  573. template< bool Signed >
  574. struct operations< 8u, Signed > :
  575. public msvc_arm_operations< 8u, Signed, operations< 8u, Signed > >
  576. {
  577. typedef msvc_arm_operations< 8u, Signed, operations< 8u, Signed > > base_type;
  578. typedef typename base_type::storage_type storage_type;
  579. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  580. {
  581. base_type::fence_before_store(order);
  582. BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
  583. base_type::fence_after_store(order);
  584. }
  585. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  586. {
  587. storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
  588. base_type::fence_after_load(order);
  589. return v;
  590. }
  591. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  592. {
  593. switch (order)
  594. {
  595. case memory_order_relaxed:
  596. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
  597. break;
  598. case memory_order_consume:
  599. case memory_order_acquire:
  600. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
  601. break;
  602. case memory_order_release:
  603. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
  604. break;
  605. case memory_order_acq_rel:
  606. case memory_order_seq_cst:
  607. default:
  608. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
  609. break;
  610. }
  611. return v;
  612. }
  613. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  614. {
  615. switch (order)
  616. {
  617. case memory_order_relaxed:
  618. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
  619. break;
  620. case memory_order_consume:
  621. case memory_order_acquire:
  622. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
  623. break;
  624. case memory_order_release:
  625. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
  626. break;
  627. case memory_order_acq_rel:
  628. case memory_order_seq_cst:
  629. default:
  630. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
  631. break;
  632. }
  633. return v;
  634. }
  635. static BOOST_FORCEINLINE bool compare_exchange_strong(
  636. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  637. {
  638. storage_type previous = expected, old_val;
  639. switch (cas_common_order(success_order, failure_order))
  640. {
  641. case memory_order_relaxed:
  642. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
  643. break;
  644. case memory_order_consume:
  645. case memory_order_acquire:
  646. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
  647. break;
  648. case memory_order_release:
  649. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
  650. break;
  651. case memory_order_acq_rel:
  652. case memory_order_seq_cst:
  653. default:
  654. old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
  655. break;
  656. }
  657. expected = old_val;
  658. return (previous == old_val);
  659. }
  660. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  661. {
  662. switch (order)
  663. {
  664. case memory_order_relaxed:
  665. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
  666. break;
  667. case memory_order_consume:
  668. case memory_order_acquire:
  669. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
  670. break;
  671. case memory_order_release:
  672. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
  673. break;
  674. case memory_order_acq_rel:
  675. case memory_order_seq_cst:
  676. default:
  677. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
  678. break;
  679. }
  680. return v;
  681. }
  682. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  683. {
  684. switch (order)
  685. {
  686. case memory_order_relaxed:
  687. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
  688. break;
  689. case memory_order_consume:
  690. case memory_order_acquire:
  691. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
  692. break;
  693. case memory_order_release:
  694. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
  695. break;
  696. case memory_order_acq_rel:
  697. case memory_order_seq_cst:
  698. default:
  699. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
  700. break;
  701. }
  702. return v;
  703. }
  704. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  705. {
  706. switch (order)
  707. {
  708. case memory_order_relaxed:
  709. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
  710. break;
  711. case memory_order_consume:
  712. case memory_order_acquire:
  713. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
  714. break;
  715. case memory_order_release:
  716. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
  717. break;
  718. case memory_order_acq_rel:
  719. case memory_order_seq_cst:
  720. default:
  721. v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
  722. break;
  723. }
  724. return v;
  725. }
  726. };
  727. BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
  728. {
  729. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  730. if (order != memory_order_relaxed)
  731. msvc_arm_operations_base::hardware_full_fence();
  732. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  733. }
  734. BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
  735. {
  736. if (order != memory_order_relaxed)
  737. BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  738. }
  739. } // namespace detail
  740. } // namespace atomics
  741. } // namespace boost
  742. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
  743. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
  744. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
  745. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
  746. #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
  747. #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
  748. #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
  749. #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
  750. #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_