extra_ops_gcc_arm.hpp 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2017 - 2018 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_arm.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for ARM.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_type.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
  24. #include <boost/atomic/capabilities.hpp>
  25. #ifdef BOOST_HAS_PRAGMA_ONCE
  26. #pragma once
  27. #endif
  28. namespace boost {
  29. namespace atomics {
  30. namespace detail {
  31. template< typename Base >
  32. struct gcc_arm_extra_operations_common :
  33. public Base
  34. {
  35. typedef Base base_type;
  36. typedef typename base_type::storage_type storage_type;
  37. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  38. {
  39. base_type::fetch_negate(storage, order);
  40. }
  41. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  42. {
  43. base_type::fetch_complement(storage, order);
  44. }
  45. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  46. {
  47. return !!base_type::negate(storage, order);
  48. }
  49. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  50. {
  51. return !!base_type::add(storage, v, order);
  52. }
  53. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  54. {
  55. return !!base_type::sub(storage, v, order);
  56. }
  57. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  58. {
  59. return !!base_type::bitwise_and(storage, v, order);
  60. }
  61. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  62. {
  63. return !!base_type::bitwise_or(storage, v, order);
  64. }
  65. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  66. {
  67. return !!base_type::bitwise_xor(storage, v, order);
  68. }
  69. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  70. {
  71. return !!base_type::bitwise_complement(storage, order);
  72. }
  73. };
  74. template< typename Base, std::size_t Size, bool Signed >
  75. struct gcc_arm_extra_operations;
  76. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  77. template< typename Base, bool Signed >
  78. struct gcc_arm_extra_operations< Base, 1u, Signed > :
  79. public generic_extra_operations< Base, 1u, Signed >
  80. {
  81. typedef generic_extra_operations< Base, 1u, Signed > base_type;
  82. typedef typename base_type::storage_type storage_type;
  83. typedef typename make_storage_type< 4u >::type extended_storage_type;
  84. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  85. {
  86. gcc_arm_operations_base::fence_before(order);
  87. uint32_t tmp;
  88. extended_storage_type original, result;
  89. __asm__ __volatile__
  90. (
  91. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  92. "1:\n"
  93. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  94. "rsb %[result], %[original], #0\n" // result = 0 - original
  95. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  96. "teq %[tmp], #0\n" // flags = tmp==0
  97. "bne 1b\n" // if (!flags.equal) goto retry
  98. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  99. : [original] "=&r" (original), // %0
  100. [result] "=&r" (result), // %1
  101. [tmp] "=&l" (tmp), // %2
  102. [storage] "+Q" (storage) // %3
  103. :
  104. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  105. );
  106. gcc_arm_operations_base::fence_after(order);
  107. return static_cast< storage_type >(original);
  108. }
  109. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  110. {
  111. gcc_arm_operations_base::fence_before(order);
  112. uint32_t tmp;
  113. extended_storage_type original, result;
  114. __asm__ __volatile__
  115. (
  116. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  117. "1:\n"
  118. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  119. "rsb %[result], %[original], #0\n" // result = 0 - original
  120. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  121. "teq %[tmp], #0\n" // flags = tmp==0
  122. "bne 1b\n" // if (!flags.equal) goto retry
  123. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  124. : [original] "=&r" (original), // %0
  125. [result] "=&r" (result), // %1
  126. [tmp] "=&l" (tmp), // %2
  127. [storage] "+Q" (storage) // %3
  128. :
  129. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  130. );
  131. gcc_arm_operations_base::fence_after(order);
  132. return static_cast< storage_type >(result);
  133. }
  134. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  135. {
  136. gcc_arm_operations_base::fence_before(order);
  137. uint32_t tmp;
  138. extended_storage_type original, result;
  139. __asm__ __volatile__
  140. (
  141. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  142. "1:\n"
  143. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  144. "add %[result], %[original], %[value]\n" // result = original + value
  145. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  146. "teq %[tmp], #0\n" // flags = tmp==0
  147. "bne 1b\n" // if (!flags.equal) goto retry
  148. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  149. : [original] "=&r" (original), // %0
  150. [result] "=&r" (result), // %1
  151. [tmp] "=&l" (tmp), // %2
  152. [storage] "+Q" (storage) // %3
  153. : [value] "Ir" (v) // %4
  154. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  155. );
  156. gcc_arm_operations_base::fence_after(order);
  157. return static_cast< storage_type >(result);
  158. }
  159. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  160. {
  161. gcc_arm_operations_base::fence_before(order);
  162. uint32_t tmp;
  163. extended_storage_type original, result;
  164. __asm__ __volatile__
  165. (
  166. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  167. "1:\n"
  168. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  169. "sub %[result], %[original], %[value]\n" // result = original - value
  170. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  171. "teq %[tmp], #0\n" // flags = tmp==0
  172. "bne 1b\n" // if (!flags.equal) goto retry
  173. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  174. : [original] "=&r" (original), // %0
  175. [result] "=&r" (result), // %1
  176. [tmp] "=&l" (tmp), // %2
  177. [storage] "+Q" (storage) // %3
  178. : [value] "Ir" (v) // %4
  179. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  180. );
  181. gcc_arm_operations_base::fence_after(order);
  182. return static_cast< storage_type >(result);
  183. }
  184. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  185. {
  186. gcc_arm_operations_base::fence_before(order);
  187. uint32_t tmp;
  188. extended_storage_type original, result;
  189. __asm__ __volatile__
  190. (
  191. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  192. "1:\n"
  193. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  194. "and %[result], %[original], %[value]\n" // result = original & value
  195. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  196. "teq %[tmp], #0\n" // flags = tmp==0
  197. "bne 1b\n" // if (!flags.equal) goto retry
  198. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  199. : [original] "=&r" (original), // %0
  200. [result] "=&r" (result), // %1
  201. [tmp] "=&l" (tmp), // %2
  202. [storage] "+Q" (storage) // %3
  203. : [value] "Ir" (v) // %4
  204. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  205. );
  206. gcc_arm_operations_base::fence_after(order);
  207. return static_cast< storage_type >(result);
  208. }
  209. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  210. {
  211. gcc_arm_operations_base::fence_before(order);
  212. uint32_t tmp;
  213. extended_storage_type original, result;
  214. __asm__ __volatile__
  215. (
  216. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  217. "1:\n"
  218. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  219. "orr %[result], %[original], %[value]\n" // result = original | value
  220. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  221. "teq %[tmp], #0\n" // flags = tmp==0
  222. "bne 1b\n" // if (!flags.equal) goto retry
  223. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  224. : [original] "=&r" (original), // %0
  225. [result] "=&r" (result), // %1
  226. [tmp] "=&l" (tmp), // %2
  227. [storage] "+Q" (storage) // %3
  228. : [value] "Ir" (v) // %4
  229. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  230. );
  231. gcc_arm_operations_base::fence_after(order);
  232. return static_cast< storage_type >(result);
  233. }
  234. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  235. {
  236. gcc_arm_operations_base::fence_before(order);
  237. uint32_t tmp;
  238. extended_storage_type original, result;
  239. __asm__ __volatile__
  240. (
  241. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  242. "1:\n"
  243. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  244. "eor %[result], %[original], %[value]\n" // result = original ^ value
  245. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  246. "teq %[tmp], #0\n" // flags = tmp==0
  247. "bne 1b\n" // if (!flags.equal) goto retry
  248. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  249. : [original] "=&r" (original), // %0
  250. [result] "=&r" (result), // %1
  251. [tmp] "=&l" (tmp), // %2
  252. [storage] "+Q" (storage) // %3
  253. : [value] "Ir" (v) // %4
  254. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  255. );
  256. gcc_arm_operations_base::fence_after(order);
  257. return static_cast< storage_type >(result);
  258. }
  259. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  260. {
  261. gcc_arm_operations_base::fence_before(order);
  262. uint32_t tmp;
  263. extended_storage_type original, result;
  264. __asm__ __volatile__
  265. (
  266. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  267. "1:\n"
  268. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  269. "mvn %[result], %[original]\n" // result = NOT original
  270. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  271. "teq %[tmp], #0\n" // flags = tmp==0
  272. "bne 1b\n" // if (!flags.equal) goto retry
  273. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  274. : [original] "=&r" (original), // %0
  275. [result] "=&r" (result), // %1
  276. [tmp] "=&l" (tmp), // %2
  277. [storage] "+Q" (storage) // %3
  278. :
  279. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  280. );
  281. gcc_arm_operations_base::fence_after(order);
  282. return static_cast< storage_type >(original);
  283. }
  284. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  285. {
  286. gcc_arm_operations_base::fence_before(order);
  287. uint32_t tmp;
  288. extended_storage_type original, result;
  289. __asm__ __volatile__
  290. (
  291. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  292. "1:\n"
  293. "ldrexb %[original], %[storage]\n" // original = zero_extend(*(&storage))
  294. "mvn %[result], %[original]\n" // result = NOT original
  295. "strexb %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  296. "teq %[tmp], #0\n" // flags = tmp==0
  297. "bne 1b\n" // if (!flags.equal) goto retry
  298. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  299. : [original] "=&r" (original), // %0
  300. [result] "=&r" (result), // %1
  301. [tmp] "=&l" (tmp), // %2
  302. [storage] "+Q" (storage) // %3
  303. :
  304. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  305. );
  306. gcc_arm_operations_base::fence_after(order);
  307. return static_cast< storage_type >(result);
  308. }
  309. };
  310. template< typename Base, bool Signed >
  311. struct extra_operations< Base, 1u, Signed, true > :
  312. public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 1u, Signed > >
  313. {
  314. };
  315. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  316. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  317. template< typename Base, bool Signed >
  318. struct gcc_arm_extra_operations< Base, 2u, Signed > :
  319. public generic_extra_operations< Base, 2u, Signed >
  320. {
  321. typedef generic_extra_operations< Base, 2u, Signed > base_type;
  322. typedef typename base_type::storage_type storage_type;
  323. typedef typename make_storage_type< 4u >::type extended_storage_type;
  324. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  325. {
  326. gcc_arm_operations_base::fence_before(order);
  327. uint32_t tmp;
  328. extended_storage_type original, result;
  329. __asm__ __volatile__
  330. (
  331. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  332. "1:\n"
  333. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  334. "rsb %[result], %[original], #0\n" // result = 0 - original
  335. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  336. "teq %[tmp], #0\n" // flags = tmp==0
  337. "bne 1b\n" // if (!flags.equal) goto retry
  338. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  339. : [original] "=&r" (original), // %0
  340. [result] "=&r" (result), // %1
  341. [tmp] "=&l" (tmp), // %2
  342. [storage] "+Q" (storage) // %3
  343. :
  344. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  345. );
  346. gcc_arm_operations_base::fence_after(order);
  347. return static_cast< storage_type >(original);
  348. }
  349. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  350. {
  351. gcc_arm_operations_base::fence_before(order);
  352. uint32_t tmp;
  353. extended_storage_type original, result;
  354. __asm__ __volatile__
  355. (
  356. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  357. "1:\n"
  358. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  359. "rsb %[result], %[original], #0\n" // result = 0 - original
  360. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  361. "teq %[tmp], #0\n" // flags = tmp==0
  362. "bne 1b\n" // if (!flags.equal) goto retry
  363. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  364. : [original] "=&r" (original), // %0
  365. [result] "=&r" (result), // %1
  366. [tmp] "=&l" (tmp), // %2
  367. [storage] "+Q" (storage) // %3
  368. :
  369. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  370. );
  371. gcc_arm_operations_base::fence_after(order);
  372. return static_cast< storage_type >(result);
  373. }
  374. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  375. {
  376. gcc_arm_operations_base::fence_before(order);
  377. uint32_t tmp;
  378. extended_storage_type original, result;
  379. __asm__ __volatile__
  380. (
  381. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  382. "1:\n"
  383. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  384. "add %[result], %[original], %[value]\n" // result = original + value
  385. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  386. "teq %[tmp], #0\n" // flags = tmp==0
  387. "bne 1b\n" // if (!flags.equal) goto retry
  388. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  389. : [original] "=&r" (original), // %0
  390. [result] "=&r" (result), // %1
  391. [tmp] "=&l" (tmp), // %2
  392. [storage] "+Q" (storage) // %3
  393. : [value] "Ir" (v) // %4
  394. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  395. );
  396. gcc_arm_operations_base::fence_after(order);
  397. return static_cast< storage_type >(result);
  398. }
  399. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  400. {
  401. gcc_arm_operations_base::fence_before(order);
  402. uint32_t tmp;
  403. extended_storage_type original, result;
  404. __asm__ __volatile__
  405. (
  406. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  407. "1:\n"
  408. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  409. "sub %[result], %[original], %[value]\n" // result = original - value
  410. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  411. "teq %[tmp], #0\n" // flags = tmp==0
  412. "bne 1b\n" // if (!flags.equal) goto retry
  413. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  414. : [original] "=&r" (original), // %0
  415. [result] "=&r" (result), // %1
  416. [tmp] "=&l" (tmp), // %2
  417. [storage] "+Q" (storage) // %3
  418. : [value] "Ir" (v) // %4
  419. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  420. );
  421. gcc_arm_operations_base::fence_after(order);
  422. return static_cast< storage_type >(result);
  423. }
  424. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  425. {
  426. gcc_arm_operations_base::fence_before(order);
  427. uint32_t tmp;
  428. extended_storage_type original, result;
  429. __asm__ __volatile__
  430. (
  431. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  432. "1:\n"
  433. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  434. "and %[result], %[original], %[value]\n" // result = original & value
  435. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  436. "teq %[tmp], #0\n" // flags = tmp==0
  437. "bne 1b\n" // if (!flags.equal) goto retry
  438. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  439. : [original] "=&r" (original), // %0
  440. [result] "=&r" (result), // %1
  441. [tmp] "=&l" (tmp), // %2
  442. [storage] "+Q" (storage) // %3
  443. : [value] "Ir" (v) // %4
  444. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  445. );
  446. gcc_arm_operations_base::fence_after(order);
  447. return static_cast< storage_type >(result);
  448. }
  449. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  450. {
  451. gcc_arm_operations_base::fence_before(order);
  452. uint32_t tmp;
  453. extended_storage_type original, result;
  454. __asm__ __volatile__
  455. (
  456. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  457. "1:\n"
  458. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  459. "orr %[result], %[original], %[value]\n" // result = original | value
  460. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  461. "teq %[tmp], #0\n" // flags = tmp==0
  462. "bne 1b\n" // if (!flags.equal) goto retry
  463. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  464. : [original] "=&r" (original), // %0
  465. [result] "=&r" (result), // %1
  466. [tmp] "=&l" (tmp), // %2
  467. [storage] "+Q" (storage) // %3
  468. : [value] "Ir" (v) // %4
  469. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  470. );
  471. gcc_arm_operations_base::fence_after(order);
  472. return static_cast< storage_type >(result);
  473. }
  474. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  475. {
  476. gcc_arm_operations_base::fence_before(order);
  477. uint32_t tmp;
  478. extended_storage_type original, result;
  479. __asm__ __volatile__
  480. (
  481. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  482. "1:\n"
  483. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  484. "eor %[result], %[original], %[value]\n" // result = original ^ value
  485. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  486. "teq %[tmp], #0\n" // flags = tmp==0
  487. "bne 1b\n" // if (!flags.equal) goto retry
  488. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  489. : [original] "=&r" (original), // %0
  490. [result] "=&r" (result), // %1
  491. [tmp] "=&l" (tmp), // %2
  492. [storage] "+Q" (storage) // %3
  493. : [value] "Ir" (v) // %4
  494. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  495. );
  496. gcc_arm_operations_base::fence_after(order);
  497. return static_cast< storage_type >(result);
  498. }
  499. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  500. {
  501. gcc_arm_operations_base::fence_before(order);
  502. uint32_t tmp;
  503. extended_storage_type original, result;
  504. __asm__ __volatile__
  505. (
  506. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  507. "1:\n"
  508. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  509. "mvn %[result], %[original]\n" // result = NOT original
  510. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  511. "teq %[tmp], #0\n" // flags = tmp==0
  512. "bne 1b\n" // if (!flags.equal) goto retry
  513. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  514. : [original] "=&r" (original), // %0
  515. [result] "=&r" (result), // %1
  516. [tmp] "=&l" (tmp), // %2
  517. [storage] "+Q" (storage) // %3
  518. :
  519. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  520. );
  521. gcc_arm_operations_base::fence_after(order);
  522. return static_cast< storage_type >(original);
  523. }
  524. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  525. {
  526. gcc_arm_operations_base::fence_before(order);
  527. uint32_t tmp;
  528. extended_storage_type original, result;
  529. __asm__ __volatile__
  530. (
  531. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  532. "1:\n"
  533. "ldrexh %[original], %[storage]\n" // original = zero_extend(*(&storage))
  534. "mvn %[result], %[original]\n" // result = NOT original
  535. "strexh %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  536. "teq %[tmp], #0\n" // flags = tmp==0
  537. "bne 1b\n" // if (!flags.equal) goto retry
  538. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  539. : [original] "=&r" (original), // %0
  540. [result] "=&r" (result), // %1
  541. [tmp] "=&l" (tmp), // %2
  542. [storage] "+Q" (storage) // %3
  543. :
  544. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  545. );
  546. gcc_arm_operations_base::fence_after(order);
  547. return static_cast< storage_type >(result);
  548. }
  549. };
  550. template< typename Base, bool Signed >
  551. struct extra_operations< Base, 2u, Signed, true > :
  552. public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 2u, Signed > >
  553. {
  554. };
  555. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  556. template< typename Base, bool Signed >
  557. struct gcc_arm_extra_operations< Base, 4u, Signed > :
  558. public generic_extra_operations< Base, 4u, Signed >
  559. {
  560. typedef generic_extra_operations< Base, 4u, Signed > base_type;
  561. typedef typename base_type::storage_type storage_type;
  562. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  563. {
  564. gcc_arm_operations_base::fence_before(order);
  565. uint32_t tmp;
  566. storage_type original, result;
  567. __asm__ __volatile__
  568. (
  569. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  570. "1:\n"
  571. "ldrex %[original], %[storage]\n" // original = *(&storage)
  572. "rsb %[result], %[original], #0\n" // result = 0 - original
  573. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  574. "teq %[tmp], #0\n" // flags = tmp==0
  575. "bne 1b\n" // if (!flags.equal) goto retry
  576. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  577. : [original] "=&r" (original), // %0
  578. [result] "=&r" (result), // %1
  579. [tmp] "=&l" (tmp), // %2
  580. [storage] "+Q" (storage) // %3
  581. :
  582. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  583. );
  584. gcc_arm_operations_base::fence_after(order);
  585. return original;
  586. }
  587. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  588. {
  589. gcc_arm_operations_base::fence_before(order);
  590. uint32_t tmp;
  591. storage_type original, result;
  592. __asm__ __volatile__
  593. (
  594. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  595. "1:\n"
  596. "ldrex %[original], %[storage]\n" // original = *(&storage)
  597. "rsb %[result], %[original], #0\n" // result = 0 - original
  598. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  599. "teq %[tmp], #0\n" // flags = tmp==0
  600. "bne 1b\n" // if (!flags.equal) goto retry
  601. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  602. : [original] "=&r" (original), // %0
  603. [result] "=&r" (result), // %1
  604. [tmp] "=&l" (tmp), // %2
  605. [storage] "+Q" (storage) // %3
  606. :
  607. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  608. );
  609. gcc_arm_operations_base::fence_after(order);
  610. return result;
  611. }
  612. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  613. {
  614. gcc_arm_operations_base::fence_before(order);
  615. uint32_t tmp;
  616. storage_type original, result;
  617. __asm__ __volatile__
  618. (
  619. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  620. "1:\n"
  621. "ldrex %[original], %[storage]\n" // original = *(&storage)
  622. "add %[result], %[original], %[value]\n" // result = original + value
  623. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  624. "teq %[tmp], #0\n" // flags = tmp==0
  625. "bne 1b\n" // if (!flags.equal) goto retry
  626. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  627. : [original] "=&r" (original), // %0
  628. [result] "=&r" (result), // %1
  629. [tmp] "=&l" (tmp), // %2
  630. [storage] "+Q" (storage) // %3
  631. : [value] "Ir" (v) // %4
  632. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  633. );
  634. gcc_arm_operations_base::fence_after(order);
  635. return result;
  636. }
  637. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  638. {
  639. gcc_arm_operations_base::fence_before(order);
  640. uint32_t tmp;
  641. storage_type original, result;
  642. __asm__ __volatile__
  643. (
  644. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  645. "1:\n"
  646. "ldrex %[original], %[storage]\n" // original = *(&storage)
  647. "sub %[result], %[original], %[value]\n" // result = original - value
  648. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  649. "teq %[tmp], #0\n" // flags = tmp==0
  650. "bne 1b\n" // if (!flags.equal) goto retry
  651. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  652. : [original] "=&r" (original), // %0
  653. [result] "=&r" (result), // %1
  654. [tmp] "=&l" (tmp), // %2
  655. [storage] "+Q" (storage) // %3
  656. : [value] "Ir" (v) // %4
  657. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  658. );
  659. gcc_arm_operations_base::fence_after(order);
  660. return result;
  661. }
  662. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  663. {
  664. gcc_arm_operations_base::fence_before(order);
  665. uint32_t tmp;
  666. storage_type original, result;
  667. __asm__ __volatile__
  668. (
  669. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  670. "1:\n"
  671. "ldrex %[original], %[storage]\n" // original = *(&storage)
  672. "and %[result], %[original], %[value]\n" // result = original & value
  673. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  674. "teq %[tmp], #0\n" // flags = tmp==0
  675. "bne 1b\n" // if (!flags.equal) goto retry
  676. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  677. : [original] "=&r" (original), // %0
  678. [result] "=&r" (result), // %1
  679. [tmp] "=&l" (tmp), // %2
  680. [storage] "+Q" (storage) // %3
  681. : [value] "Ir" (v) // %4
  682. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  683. );
  684. gcc_arm_operations_base::fence_after(order);
  685. return result;
  686. }
  687. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  688. {
  689. gcc_arm_operations_base::fence_before(order);
  690. uint32_t tmp;
  691. storage_type original, result;
  692. __asm__ __volatile__
  693. (
  694. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  695. "1:\n"
  696. "ldrex %[original], %[storage]\n" // original = *(&storage)
  697. "orr %[result], %[original], %[value]\n" // result = original | value
  698. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  699. "teq %[tmp], #0\n" // flags = tmp==0
  700. "bne 1b\n" // if (!flags.equal) goto retry
  701. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  702. : [original] "=&r" (original), // %0
  703. [result] "=&r" (result), // %1
  704. [tmp] "=&l" (tmp), // %2
  705. [storage] "+Q" (storage) // %3
  706. : [value] "Ir" (v) // %4
  707. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  708. );
  709. gcc_arm_operations_base::fence_after(order);
  710. return result;
  711. }
  712. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  713. {
  714. gcc_arm_operations_base::fence_before(order);
  715. uint32_t tmp;
  716. storage_type original, result;
  717. __asm__ __volatile__
  718. (
  719. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  720. "1:\n"
  721. "ldrex %[original], %[storage]\n" // original = *(&storage)
  722. "eor %[result], %[original], %[value]\n" // result = original ^ value
  723. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  724. "teq %[tmp], #0\n" // flags = tmp==0
  725. "bne 1b\n" // if (!flags.equal) goto retry
  726. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  727. : [original] "=&r" (original), // %0
  728. [result] "=&r" (result), // %1
  729. [tmp] "=&l" (tmp), // %2
  730. [storage] "+Q" (storage) // %3
  731. : [value] "Ir" (v) // %4
  732. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  733. );
  734. gcc_arm_operations_base::fence_after(order);
  735. return result;
  736. }
  737. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  738. {
  739. gcc_arm_operations_base::fence_before(order);
  740. uint32_t tmp;
  741. storage_type original, result;
  742. __asm__ __volatile__
  743. (
  744. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  745. "1:\n"
  746. "ldrex %[original], %[storage]\n" // original = *(&storage)
  747. "mvn %[result], %[original]\n" // result = NOT original
  748. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  749. "teq %[tmp], #0\n" // flags = tmp==0
  750. "bne 1b\n" // if (!flags.equal) goto retry
  751. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  752. : [original] "=&r" (original), // %0
  753. [result] "=&r" (result), // %1
  754. [tmp] "=&l" (tmp), // %2
  755. [storage] "+Q" (storage) // %3
  756. :
  757. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  758. );
  759. gcc_arm_operations_base::fence_after(order);
  760. return original;
  761. }
  762. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  763. {
  764. gcc_arm_operations_base::fence_before(order);
  765. uint32_t tmp;
  766. storage_type original, result;
  767. __asm__ __volatile__
  768. (
  769. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  770. "1:\n"
  771. "ldrex %[original], %[storage]\n" // original = *(&storage)
  772. "mvn %[result], %[original]\n" // result = NOT original
  773. "strex %[tmp], %[result], %[storage]\n" // *(&storage) = result, tmp = store failed
  774. "teq %[tmp], #0\n" // flags = tmp==0
  775. "bne 1b\n" // if (!flags.equal) goto retry
  776. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  777. : [original] "=&r" (original), // %0
  778. [result] "=&r" (result), // %1
  779. [tmp] "=&l" (tmp), // %2
  780. [storage] "+Q" (storage) // %3
  781. :
  782. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  783. );
  784. gcc_arm_operations_base::fence_after(order);
  785. return result;
  786. }
  787. };
  788. template< typename Base, bool Signed >
  789. struct extra_operations< Base, 4u, Signed, true > :
  790. public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 4u, Signed > >
  791. {
  792. };
  793. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  794. template< typename Base, bool Signed >
  795. struct gcc_arm_extra_operations< Base, 8u, Signed > :
  796. public generic_extra_operations< Base, 8u, Signed >
  797. {
  798. typedef generic_extra_operations< Base, 8u, Signed > base_type;
  799. typedef typename base_type::storage_type storage_type;
  800. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  801. {
  802. gcc_arm_operations_base::fence_before(order);
  803. storage_type original, result;
  804. uint32_t tmp;
  805. __asm__ __volatile__
  806. (
  807. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  808. "1:\n"
  809. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  810. "mvn %2, %1\n" // result = NOT original
  811. "mvn %H2, %H1\n"
  812. "adds %2, %2, #1\n" // result = result + 1
  813. "adc %H2, %H2, #0\n"
  814. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  815. "teq %0, #0\n" // flags = tmp==0
  816. "bne 1b\n" // if (!flags.equal) goto retry
  817. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  818. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  819. "=&r" (original), // %1
  820. "=&r" (result) // %2
  821. : "r" (&storage) // %3
  822. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  823. );
  824. gcc_arm_operations_base::fence_after(order);
  825. return original;
  826. }
  827. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  828. {
  829. gcc_arm_operations_base::fence_before(order);
  830. storage_type original, result;
  831. uint32_t tmp;
  832. __asm__ __volatile__
  833. (
  834. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  835. "1:\n"
  836. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  837. "mvn %2, %1\n" // result = NOT original
  838. "mvn %H2, %H1\n"
  839. "adds %2, %2, #1\n" // result = result + 1
  840. "adc %H2, %H2, #0\n"
  841. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  842. "teq %0, #0\n" // flags = tmp==0
  843. "bne 1b\n" // if (!flags.equal) goto retry
  844. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  845. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  846. "=&r" (original), // %1
  847. "=&r" (result) // %2
  848. : "r" (&storage) // %3
  849. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  850. );
  851. gcc_arm_operations_base::fence_after(order);
  852. return result;
  853. }
  854. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  855. {
  856. gcc_arm_operations_base::fence_before(order);
  857. storage_type original, result;
  858. uint32_t tmp;
  859. __asm__ __volatile__
  860. (
  861. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  862. "1:\n"
  863. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  864. "adds %2, %1, %4\n" // result = original + value
  865. "adc %H2, %H1, %H4\n"
  866. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  867. "teq %0, #0\n" // flags = tmp==0
  868. "bne 1b\n" // if (!flags.equal) goto retry
  869. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  870. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  871. "=&r" (original), // %1
  872. "=&r" (result) // %2
  873. : "r" (&storage), // %3
  874. "r" (v) // %4
  875. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  876. );
  877. gcc_arm_operations_base::fence_after(order);
  878. return result;
  879. }
  880. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  881. {
  882. gcc_arm_operations_base::fence_before(order);
  883. storage_type original, result;
  884. uint32_t tmp;
  885. __asm__ __volatile__
  886. (
  887. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  888. "1:\n"
  889. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  890. "subs %2, %1, %4\n" // result = original - value
  891. "sbc %H2, %H1, %H4\n"
  892. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  893. "teq %0, #0\n" // flags = tmp==0
  894. "bne 1b\n" // if (!flags.equal) goto retry
  895. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  896. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  897. "=&r" (original), // %1
  898. "=&r" (result) // %2
  899. : "r" (&storage), // %3
  900. "r" (v) // %4
  901. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  902. );
  903. gcc_arm_operations_base::fence_after(order);
  904. return result;
  905. }
  906. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  907. {
  908. gcc_arm_operations_base::fence_before(order);
  909. storage_type original, result;
  910. uint32_t tmp;
  911. __asm__ __volatile__
  912. (
  913. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  914. "1:\n"
  915. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  916. "and %2, %1, %4\n" // result = original & value
  917. "and %H2, %H1, %H4\n"
  918. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  919. "teq %0, #0\n" // flags = tmp==0
  920. "bne 1b\n" // if (!flags.equal) goto retry
  921. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  922. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  923. "=&r" (original), // %1
  924. "=&r" (result) // %2
  925. : "r" (&storage), // %3
  926. "r" (v) // %4
  927. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  928. );
  929. gcc_arm_operations_base::fence_after(order);
  930. return result;
  931. }
  932. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  933. {
  934. gcc_arm_operations_base::fence_before(order);
  935. storage_type original, result;
  936. uint32_t tmp;
  937. __asm__ __volatile__
  938. (
  939. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  940. "1:\n"
  941. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  942. "orr %2, %1, %4\n" // result = original | value
  943. "orr %H2, %H1, %H4\n"
  944. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  945. "teq %0, #0\n" // flags = tmp==0
  946. "bne 1b\n" // if (!flags.equal) goto retry
  947. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  948. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  949. "=&r" (original), // %1
  950. "=&r" (result) // %2
  951. : "r" (&storage), // %3
  952. "r" (v) // %4
  953. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  954. );
  955. gcc_arm_operations_base::fence_after(order);
  956. return result;
  957. }
  958. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  959. {
  960. gcc_arm_operations_base::fence_before(order);
  961. storage_type original, result;
  962. uint32_t tmp;
  963. __asm__ __volatile__
  964. (
  965. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  966. "1:\n"
  967. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  968. "eor %2, %1, %4\n" // result = original ^ value
  969. "eor %H2, %H1, %H4\n"
  970. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  971. "teq %0, #0\n" // flags = tmp==0
  972. "bne 1b\n" // if (!flags.equal) goto retry
  973. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  974. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  975. "=&r" (original), // %1
  976. "=&r" (result) // %2
  977. : "r" (&storage), // %3
  978. "r" (v) // %4
  979. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  980. );
  981. gcc_arm_operations_base::fence_after(order);
  982. return result;
  983. }
  984. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  985. {
  986. gcc_arm_operations_base::fence_before(order);
  987. storage_type original, result;
  988. uint32_t tmp;
  989. __asm__ __volatile__
  990. (
  991. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  992. "1:\n"
  993. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  994. "mvn %2, %1\n" // result = NOT original
  995. "mvn %H2, %H1\n"
  996. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  997. "teq %0, #0\n" // flags = tmp==0
  998. "bne 1b\n" // if (!flags.equal) goto retry
  999. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1000. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1001. "=&r" (original), // %1
  1002. "=&r" (result) // %2
  1003. : "r" (&storage) // %3
  1004. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1005. );
  1006. gcc_arm_operations_base::fence_after(order);
  1007. return original;
  1008. }
  1009. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1010. {
  1011. gcc_arm_operations_base::fence_before(order);
  1012. storage_type original, result;
  1013. uint32_t tmp;
  1014. __asm__ __volatile__
  1015. (
  1016. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  1017. "1:\n"
  1018. "ldrexd %1, %H1, [%3]\n" // original = *(&storage)
  1019. "mvn %2, %1\n" // result = NOT original
  1020. "mvn %H2, %H1\n"
  1021. "strexd %0, %2, %H2, [%3]\n" // *(&storage) = result, tmp = store failed
  1022. "teq %0, #0\n" // flags = tmp==0
  1023. "bne 1b\n" // if (!flags.equal) goto retry
  1024. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1025. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1026. "=&r" (original), // %1
  1027. "=&r" (result) // %2
  1028. : "r" (&storage) // %3
  1029. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1030. );
  1031. gcc_arm_operations_base::fence_after(order);
  1032. return result;
  1033. }
  1034. };
  1035. template< typename Base, bool Signed >
  1036. struct extra_operations< Base, 8u, Signed, true > :
  1037. public gcc_arm_extra_operations_common< gcc_arm_extra_operations< Base, 8u, Signed > >
  1038. {
  1039. };
  1040. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  1041. } // namespace detail
  1042. } // namespace atomics
  1043. } // namespace boost
  1044. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_