ops_gcc_ppc.hpp 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2009 Helge Bahmann
  7. * Copyright (c) 2013 Tim Blechmann
  8. * Copyright (c) 2014 Andrey Semashev
  9. */
  10. /*!
  11. * \file atomic/detail/ops_gcc_ppc.hpp
  12. *
  13. * This header contains implementation of the \c operations template.
  14. */
  15. #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_
  16. #define BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_
  17. #include <cstddef>
  18. #include <boost/memory_order.hpp>
  19. #include <boost/atomic/detail/config.hpp>
  20. #include <boost/atomic/detail/storage_type.hpp>
  21. #include <boost/atomic/detail/operations_fwd.hpp>
  22. #include <boost/atomic/detail/ops_gcc_ppc_common.hpp>
  23. #include <boost/atomic/capabilities.hpp>
  24. #ifdef BOOST_HAS_PRAGMA_ONCE
  25. #pragma once
  26. #endif
  27. namespace boost {
  28. namespace atomics {
  29. namespace detail {
  30. // The implementation below uses information from this document:
  31. // http://www.rdrop.com/users/paulmck/scalability/paper/N2745r.2010.02.19a.html
  32. /*
  33. Refer to: Motorola: "Programming Environments Manual for 32-Bit
  34. Implementations of the PowerPC Architecture", Appendix E:
  35. "Synchronization Programming Examples" for an explanation of what is
  36. going on here (can be found on the web at various places by the
  37. name "MPCFPE32B.pdf", Google is your friend...)
  38. Most of the atomic operations map to instructions in a relatively
  39. straight-forward fashion, but "load"s may at first glance appear
  40. a bit strange as they map to:
  41. lwz %rX, addr
  42. cmpw %rX, %rX
  43. bne- 1f
  44. 1:
  45. That is, the CPU is forced to perform a branch that "formally" depends
  46. on the value retrieved from memory. This scheme has an overhead of
  47. about 1-2 clock cycles per load, but it allows to map "acquire" to
  48. the "isync" instruction instead of "sync" uniformly and for all type
  49. of atomic operations. Since "isync" has a cost of about 15 clock
  50. cycles, while "sync" hast a cost of about 50 clock cycles, the small
  51. penalty to atomic loads more than compensates for this.
  52. Byte- and halfword-sized atomic values are implemented in two ways.
  53. When 8 and 16-bit instructions are available (in Power8 and later),
  54. they are used. Otherwise operations are realized by encoding the
  55. value to be represented into a word, performing sign/zero extension
  56. as appropriate. This means that after add/sub operations the value
  57. needs fixing up to accurately preserve the wrap-around semantic of
  58. the smaller type. (Nothing special needs to be done for the bit-wise
  59. and the "exchange type" operators as the compiler already sees to
  60. it that values carried in registers are extended appropriately and
  61. everything falls into place naturally).
  62. The register constraint "b" instructs gcc to use any register
  63. except r0; this is sometimes required because the encoding for
  64. r0 is used to signify "constant zero" in a number of instructions,
  65. making r0 unusable in this place. For simplicity this constraint
  66. is used everywhere since I am to lazy to look this up on a
  67. per-instruction basis, and ppc has enough registers for this not
  68. to pose a problem.
  69. */
  70. template< bool Signed >
  71. struct operations< 4u, Signed > :
  72. public gcc_ppc_operations_base
  73. {
  74. typedef typename make_storage_type< 4u >::type storage_type;
  75. typedef typename make_storage_type< 4u >::aligned aligned_storage_type;
  76. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
  77. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  78. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  79. {
  80. fence_before(order);
  81. __asm__ __volatile__
  82. (
  83. "stw %1, %0\n\t"
  84. : "+m" (storage)
  85. : "r" (v)
  86. );
  87. }
  88. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  89. {
  90. storage_type v;
  91. if (order == memory_order_seq_cst)
  92. __asm__ __volatile__ ("sync" ::: "memory");
  93. if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
  94. {
  95. __asm__ __volatile__
  96. (
  97. "lwz %0, %1\n\t"
  98. "cmpw %0, %0\n\t"
  99. "bne- 1f\n\t"
  100. "1:\n\t"
  101. "isync\n\t"
  102. : "=&r" (v)
  103. : "m" (storage)
  104. : "cr0", "memory"
  105. );
  106. }
  107. else
  108. {
  109. __asm__ __volatile__
  110. (
  111. "lwz %0, %1\n\t"
  112. : "=&r" (v)
  113. : "m" (storage)
  114. );
  115. }
  116. return v;
  117. }
  118. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  119. {
  120. storage_type original;
  121. fence_before(order);
  122. __asm__ __volatile__
  123. (
  124. "1:\n\t"
  125. "lwarx %0,%y1\n\t"
  126. "stwcx. %2,%y1\n\t"
  127. "bne- 1b\n\t"
  128. : "=&b" (original), "+Z" (storage)
  129. : "b" (v)
  130. : "cr0"
  131. );
  132. fence_after(order);
  133. return original;
  134. }
  135. static BOOST_FORCEINLINE bool compare_exchange_weak(
  136. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  137. {
  138. int success;
  139. fence_before(success_order);
  140. __asm__ __volatile__
  141. (
  142. "li %1, 0\n\t"
  143. "lwarx %0,%y2\n\t"
  144. "cmpw %0, %3\n\t"
  145. "bne- 1f\n\t"
  146. "stwcx. %4,%y2\n\t"
  147. "bne- 1f\n\t"
  148. "li %1, 1\n\t"
  149. "1:\n\t"
  150. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  151. : "b" (expected), "b" (desired)
  152. : "cr0"
  153. );
  154. if (success)
  155. fence_after(success_order);
  156. else
  157. fence_after(failure_order);
  158. return !!success;
  159. }
  160. static BOOST_FORCEINLINE bool compare_exchange_strong(
  161. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  162. {
  163. int success;
  164. fence_before(success_order);
  165. __asm__ __volatile__
  166. (
  167. "li %1, 0\n\t"
  168. "0: lwarx %0,%y2\n\t"
  169. "cmpw %0, %3\n\t"
  170. "bne- 1f\n\t"
  171. "stwcx. %4,%y2\n\t"
  172. "bne- 0b\n\t"
  173. "li %1, 1\n\t"
  174. "1:\n\t"
  175. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  176. : "b" (expected), "b" (desired)
  177. : "cr0"
  178. );
  179. if (success)
  180. fence_after(success_order);
  181. else
  182. fence_after(failure_order);
  183. return !!success;
  184. }
  185. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  186. {
  187. storage_type original, result;
  188. fence_before(order);
  189. __asm__ __volatile__
  190. (
  191. "1:\n\t"
  192. "lwarx %0,%y2\n\t"
  193. "add %1,%0,%3\n\t"
  194. "stwcx. %1,%y2\n\t"
  195. "bne- 1b\n\t"
  196. : "=&b" (original), "=&b" (result), "+Z" (storage)
  197. : "b" (v)
  198. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  199. );
  200. fence_after(order);
  201. return original;
  202. }
  203. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  204. {
  205. storage_type original, result;
  206. fence_before(order);
  207. __asm__ __volatile__
  208. (
  209. "1:\n\t"
  210. "lwarx %0,%y2\n\t"
  211. "sub %1,%0,%3\n\t"
  212. "stwcx. %1,%y2\n\t"
  213. "bne- 1b\n\t"
  214. : "=&b" (original), "=&b" (result), "+Z" (storage)
  215. : "b" (v)
  216. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  217. );
  218. fence_after(order);
  219. return original;
  220. }
  221. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  222. {
  223. storage_type original, result;
  224. fence_before(order);
  225. __asm__ __volatile__
  226. (
  227. "1:\n\t"
  228. "lwarx %0,%y2\n\t"
  229. "and %1,%0,%3\n\t"
  230. "stwcx. %1,%y2\n\t"
  231. "bne- 1b\n\t"
  232. : "=&b" (original), "=&b" (result), "+Z" (storage)
  233. : "b" (v)
  234. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  235. );
  236. fence_after(order);
  237. return original;
  238. }
  239. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  240. {
  241. storage_type original, result;
  242. fence_before(order);
  243. __asm__ __volatile__
  244. (
  245. "1:\n\t"
  246. "lwarx %0,%y2\n\t"
  247. "or %1,%0,%3\n\t"
  248. "stwcx. %1,%y2\n\t"
  249. "bne- 1b\n\t"
  250. : "=&b" (original), "=&b" (result), "+Z" (storage)
  251. : "b" (v)
  252. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  253. );
  254. fence_after(order);
  255. return original;
  256. }
  257. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  258. {
  259. storage_type original, result;
  260. fence_before(order);
  261. __asm__ __volatile__
  262. (
  263. "1:\n\t"
  264. "lwarx %0,%y2\n\t"
  265. "xor %1,%0,%3\n\t"
  266. "stwcx. %1,%y2\n\t"
  267. "bne- 1b\n\t"
  268. : "=&b" (original), "=&b" (result), "+Z" (storage)
  269. : "b" (v)
  270. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  271. );
  272. fence_after(order);
  273. return original;
  274. }
  275. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  276. {
  277. return !!exchange(storage, (storage_type)1, order);
  278. }
  279. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  280. {
  281. store(storage, 0, order);
  282. }
  283. };
  284. #if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
  285. template< bool Signed >
  286. struct operations< 1u, Signed > :
  287. public gcc_ppc_operations_base
  288. {
  289. typedef typename make_storage_type< 1u >::type storage_type;
  290. typedef typename make_storage_type< 1u >::aligned aligned_storage_type;
  291. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
  292. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  293. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  294. {
  295. fence_before(order);
  296. __asm__ __volatile__
  297. (
  298. "stb %1, %0\n\t"
  299. : "+m" (storage)
  300. : "r" (v)
  301. );
  302. }
  303. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  304. {
  305. storage_type v;
  306. if (order == memory_order_seq_cst)
  307. __asm__ __volatile__ ("sync" ::: "memory");
  308. if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
  309. {
  310. __asm__ __volatile__
  311. (
  312. "lbz %0, %1\n\t"
  313. "cmpw %0, %0\n\t"
  314. "bne- 1f\n\t"
  315. "1:\n\t"
  316. "isync\n\t"
  317. : "=&r" (v)
  318. : "m" (storage)
  319. : "cr0", "memory"
  320. );
  321. }
  322. else
  323. {
  324. __asm__ __volatile__
  325. (
  326. "lbz %0, %1\n\t"
  327. : "=&r" (v)
  328. : "m" (storage)
  329. );
  330. }
  331. return v;
  332. }
  333. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  334. {
  335. storage_type original;
  336. fence_before(order);
  337. __asm__ __volatile__
  338. (
  339. "1:\n\t"
  340. "lbarx %0,%y1\n\t"
  341. "stbcx. %2,%y1\n\t"
  342. "bne- 1b\n\t"
  343. : "=&b" (original), "+Z" (storage)
  344. : "b" (v)
  345. : "cr0"
  346. );
  347. fence_after(order);
  348. return original;
  349. }
  350. static BOOST_FORCEINLINE bool compare_exchange_weak(
  351. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  352. {
  353. int success;
  354. fence_before(success_order);
  355. __asm__ __volatile__
  356. (
  357. "li %1, 0\n\t"
  358. "lbarx %0,%y2\n\t"
  359. "cmpw %0, %3\n\t"
  360. "bne- 1f\n\t"
  361. "stbcx. %4,%y2\n\t"
  362. "bne- 1f\n\t"
  363. "li %1, 1\n\t"
  364. "1:\n\t"
  365. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  366. : "b" (expected), "b" (desired)
  367. : "cr0"
  368. );
  369. if (success)
  370. fence_after(success_order);
  371. else
  372. fence_after(failure_order);
  373. return !!success;
  374. }
  375. static BOOST_FORCEINLINE bool compare_exchange_strong(
  376. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  377. {
  378. int success;
  379. fence_before(success_order);
  380. __asm__ __volatile__
  381. (
  382. "li %1, 0\n\t"
  383. "0: lbarx %0,%y2\n\t"
  384. "cmpw %0, %3\n\t"
  385. "bne- 1f\n\t"
  386. "stbcx. %4,%y2\n\t"
  387. "bne- 0b\n\t"
  388. "li %1, 1\n\t"
  389. "1:\n\t"
  390. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  391. : "b" (expected), "b" (desired)
  392. : "cr0"
  393. );
  394. if (success)
  395. fence_after(success_order);
  396. else
  397. fence_after(failure_order);
  398. return !!success;
  399. }
  400. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  401. {
  402. storage_type original, result;
  403. fence_before(order);
  404. __asm__ __volatile__
  405. (
  406. "1:\n\t"
  407. "lbarx %0,%y2\n\t"
  408. "add %1,%0,%3\n\t"
  409. "stbcx. %1,%y2\n\t"
  410. "bne- 1b\n\t"
  411. : "=&b" (original), "=&b" (result), "+Z" (storage)
  412. : "b" (v)
  413. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  414. );
  415. fence_after(order);
  416. return original;
  417. }
  418. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  419. {
  420. storage_type original, result;
  421. fence_before(order);
  422. __asm__ __volatile__
  423. (
  424. "1:\n\t"
  425. "lbarx %0,%y2\n\t"
  426. "sub %1,%0,%3\n\t"
  427. "stbcx. %1,%y2\n\t"
  428. "bne- 1b\n\t"
  429. : "=&b" (original), "=&b" (result), "+Z" (storage)
  430. : "b" (v)
  431. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  432. );
  433. fence_after(order);
  434. return original;
  435. }
  436. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  437. {
  438. storage_type original, result;
  439. fence_before(order);
  440. __asm__ __volatile__
  441. (
  442. "1:\n\t"
  443. "lbarx %0,%y2\n\t"
  444. "and %1,%0,%3\n\t"
  445. "stbcx. %1,%y2\n\t"
  446. "bne- 1b\n\t"
  447. : "=&b" (original), "=&b" (result), "+Z" (storage)
  448. : "b" (v)
  449. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  450. );
  451. fence_after(order);
  452. return original;
  453. }
  454. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  455. {
  456. storage_type original, result;
  457. fence_before(order);
  458. __asm__ __volatile__
  459. (
  460. "1:\n\t"
  461. "lbarx %0,%y2\n\t"
  462. "or %1,%0,%3\n\t"
  463. "stbcx. %1,%y2\n\t"
  464. "bne- 1b\n\t"
  465. : "=&b" (original), "=&b" (result), "+Z" (storage)
  466. : "b" (v)
  467. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  468. );
  469. fence_after(order);
  470. return original;
  471. }
  472. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  473. {
  474. storage_type original, result;
  475. fence_before(order);
  476. __asm__ __volatile__
  477. (
  478. "1:\n\t"
  479. "lbarx %0,%y2\n\t"
  480. "xor %1,%0,%3\n\t"
  481. "stbcx. %1,%y2\n\t"
  482. "bne- 1b\n\t"
  483. : "=&b" (original), "=&b" (result), "+Z" (storage)
  484. : "b" (v)
  485. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  486. );
  487. fence_after(order);
  488. return original;
  489. }
  490. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  491. {
  492. return !!exchange(storage, (storage_type)1, order);
  493. }
  494. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  495. {
  496. store(storage, 0, order);
  497. }
  498. };
  499. #else // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
  500. template< >
  501. struct operations< 1u, false > :
  502. public operations< 4u, false >
  503. {
  504. typedef operations< 4u, false > base_type;
  505. typedef base_type::storage_type storage_type;
  506. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  507. {
  508. storage_type original, result;
  509. fence_before(order);
  510. __asm__ __volatile__
  511. (
  512. "1:\n\t"
  513. "lwarx %0,%y2\n\t"
  514. "add %1,%0,%3\n\t"
  515. "rlwinm %1, %1, 0, 0xff\n\t"
  516. "stwcx. %1,%y2\n\t"
  517. "bne- 1b\n\t"
  518. : "=&b" (original), "=&b" (result), "+Z" (storage)
  519. : "b" (v)
  520. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  521. );
  522. fence_after(order);
  523. return original;
  524. }
  525. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  526. {
  527. storage_type original, result;
  528. fence_before(order);
  529. __asm__ __volatile__
  530. (
  531. "1:\n\t"
  532. "lwarx %0,%y2\n\t"
  533. "sub %1,%0,%3\n\t"
  534. "rlwinm %1, %1, 0, 0xff\n\t"
  535. "stwcx. %1,%y2\n\t"
  536. "bne- 1b\n\t"
  537. : "=&b" (original), "=&b" (result), "+Z" (storage)
  538. : "b" (v)
  539. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  540. );
  541. fence_after(order);
  542. return original;
  543. }
  544. };
  545. template< >
  546. struct operations< 1u, true > :
  547. public operations< 4u, true >
  548. {
  549. typedef operations< 4u, true > base_type;
  550. typedef base_type::storage_type storage_type;
  551. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  552. {
  553. storage_type original, result;
  554. fence_before(order);
  555. __asm__ __volatile__
  556. (
  557. "1:\n\t"
  558. "lwarx %0,%y2\n\t"
  559. "add %1,%0,%3\n\t"
  560. "extsb %1, %1\n\t"
  561. "stwcx. %1,%y2\n\t"
  562. "bne- 1b\n\t"
  563. : "=&b" (original), "=&b" (result), "+Z" (storage)
  564. : "b" (v)
  565. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  566. );
  567. fence_after(order);
  568. return original;
  569. }
  570. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  571. {
  572. storage_type original, result;
  573. fence_before(order);
  574. __asm__ __volatile__
  575. (
  576. "1:\n\t"
  577. "lwarx %0,%y2\n\t"
  578. "sub %1,%0,%3\n\t"
  579. "extsb %1, %1\n\t"
  580. "stwcx. %1,%y2\n\t"
  581. "bne- 1b\n\t"
  582. : "=&b" (original), "=&b" (result), "+Z" (storage)
  583. : "b" (v)
  584. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  585. );
  586. fence_after(order);
  587. return original;
  588. }
  589. };
  590. #endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
  591. #if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
  592. template< bool Signed >
  593. struct operations< 2u, Signed > :
  594. public gcc_ppc_operations_base
  595. {
  596. typedef typename make_storage_type< 2u >::type storage_type;
  597. typedef typename make_storage_type< 2u >::aligned aligned_storage_type;
  598. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
  599. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  600. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  601. {
  602. fence_before(order);
  603. __asm__ __volatile__
  604. (
  605. "sth %1, %0\n\t"
  606. : "+m" (storage)
  607. : "r" (v)
  608. );
  609. }
  610. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  611. {
  612. storage_type v;
  613. if (order == memory_order_seq_cst)
  614. __asm__ __volatile__ ("sync" ::: "memory");
  615. if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
  616. {
  617. __asm__ __volatile__
  618. (
  619. "lhz %0, %1\n\t"
  620. "cmpw %0, %0\n\t"
  621. "bne- 1f\n\t"
  622. "1:\n\t"
  623. "isync\n\t"
  624. : "=&r" (v)
  625. : "m" (storage)
  626. : "cr0", "memory"
  627. );
  628. }
  629. else
  630. {
  631. __asm__ __volatile__
  632. (
  633. "lhz %0, %1\n\t"
  634. : "=&r" (v)
  635. : "m" (storage)
  636. );
  637. }
  638. return v;
  639. }
  640. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  641. {
  642. storage_type original;
  643. fence_before(order);
  644. __asm__ __volatile__
  645. (
  646. "1:\n\t"
  647. "lharx %0,%y1\n\t"
  648. "sthcx. %2,%y1\n\t"
  649. "bne- 1b\n\t"
  650. : "=&b" (original), "+Z" (storage)
  651. : "b" (v)
  652. : "cr0"
  653. );
  654. fence_after(order);
  655. return original;
  656. }
  657. static BOOST_FORCEINLINE bool compare_exchange_weak(
  658. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  659. {
  660. int success;
  661. fence_before(success_order);
  662. __asm__ __volatile__
  663. (
  664. "li %1, 0\n\t"
  665. "lharx %0,%y2\n\t"
  666. "cmpw %0, %3\n\t"
  667. "bne- 1f\n\t"
  668. "sthcx. %4,%y2\n\t"
  669. "bne- 1f\n\t"
  670. "li %1, 1\n\t"
  671. "1:\n\t"
  672. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  673. : "b" (expected), "b" (desired)
  674. : "cr0"
  675. );
  676. if (success)
  677. fence_after(success_order);
  678. else
  679. fence_after(failure_order);
  680. return !!success;
  681. }
  682. static BOOST_FORCEINLINE bool compare_exchange_strong(
  683. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  684. {
  685. int success;
  686. fence_before(success_order);
  687. __asm__ __volatile__
  688. (
  689. "li %1, 0\n\t"
  690. "0: lharx %0,%y2\n\t"
  691. "cmpw %0, %3\n\t"
  692. "bne- 1f\n\t"
  693. "sthcx. %4,%y2\n\t"
  694. "bne- 0b\n\t"
  695. "li %1, 1\n\t"
  696. "1:\n\t"
  697. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  698. : "b" (expected), "b" (desired)
  699. : "cr0"
  700. );
  701. if (success)
  702. fence_after(success_order);
  703. else
  704. fence_after(failure_order);
  705. return !!success;
  706. }
  707. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  708. {
  709. storage_type original, result;
  710. fence_before(order);
  711. __asm__ __volatile__
  712. (
  713. "1:\n\t"
  714. "lharx %0,%y2\n\t"
  715. "add %1,%0,%3\n\t"
  716. "sthcx. %1,%y2\n\t"
  717. "bne- 1b\n\t"
  718. : "=&b" (original), "=&b" (result), "+Z" (storage)
  719. : "b" (v)
  720. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  721. );
  722. fence_after(order);
  723. return original;
  724. }
  725. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  726. {
  727. storage_type original, result;
  728. fence_before(order);
  729. __asm__ __volatile__
  730. (
  731. "1:\n\t"
  732. "lharx %0,%y2\n\t"
  733. "sub %1,%0,%3\n\t"
  734. "sthcx. %1,%y2\n\t"
  735. "bne- 1b\n\t"
  736. : "=&b" (original), "=&b" (result), "+Z" (storage)
  737. : "b" (v)
  738. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  739. );
  740. fence_after(order);
  741. return original;
  742. }
  743. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  744. {
  745. storage_type original, result;
  746. fence_before(order);
  747. __asm__ __volatile__
  748. (
  749. "1:\n\t"
  750. "lharx %0,%y2\n\t"
  751. "and %1,%0,%3\n\t"
  752. "sthcx. %1,%y2\n\t"
  753. "bne- 1b\n\t"
  754. : "=&b" (original), "=&b" (result), "+Z" (storage)
  755. : "b" (v)
  756. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  757. );
  758. fence_after(order);
  759. return original;
  760. }
  761. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  762. {
  763. storage_type original, result;
  764. fence_before(order);
  765. __asm__ __volatile__
  766. (
  767. "1:\n\t"
  768. "lharx %0,%y2\n\t"
  769. "or %1,%0,%3\n\t"
  770. "sthcx. %1,%y2\n\t"
  771. "bne- 1b\n\t"
  772. : "=&b" (original), "=&b" (result), "+Z" (storage)
  773. : "b" (v)
  774. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  775. );
  776. fence_after(order);
  777. return original;
  778. }
  779. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  780. {
  781. storage_type original, result;
  782. fence_before(order);
  783. __asm__ __volatile__
  784. (
  785. "1:\n\t"
  786. "lharx %0,%y2\n\t"
  787. "xor %1,%0,%3\n\t"
  788. "sthcx. %1,%y2\n\t"
  789. "bne- 1b\n\t"
  790. : "=&b" (original), "=&b" (result), "+Z" (storage)
  791. : "b" (v)
  792. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  793. );
  794. fence_after(order);
  795. return original;
  796. }
  797. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  798. {
  799. return !!exchange(storage, (storage_type)1, order);
  800. }
  801. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  802. {
  803. store(storage, 0, order);
  804. }
  805. };
  806. #else // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
  807. template< >
  808. struct operations< 2u, false > :
  809. public operations< 4u, false >
  810. {
  811. typedef operations< 4u, false > base_type;
  812. typedef base_type::storage_type storage_type;
  813. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  814. {
  815. storage_type original, result;
  816. fence_before(order);
  817. __asm__ __volatile__
  818. (
  819. "1:\n\t"
  820. "lwarx %0,%y2\n\t"
  821. "add %1,%0,%3\n\t"
  822. "rlwinm %1, %1, 0, 0xffff\n\t"
  823. "stwcx. %1,%y2\n\t"
  824. "bne- 1b\n\t"
  825. : "=&b" (original), "=&b" (result), "+Z" (storage)
  826. : "b" (v)
  827. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  828. );
  829. fence_after(order);
  830. return original;
  831. }
  832. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  833. {
  834. storage_type original, result;
  835. fence_before(order);
  836. __asm__ __volatile__
  837. (
  838. "1:\n\t"
  839. "lwarx %0,%y2\n\t"
  840. "sub %1,%0,%3\n\t"
  841. "rlwinm %1, %1, 0, 0xffff\n\t"
  842. "stwcx. %1,%y2\n\t"
  843. "bne- 1b\n\t"
  844. : "=&b" (original), "=&b" (result), "+Z" (storage)
  845. : "b" (v)
  846. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  847. );
  848. fence_after(order);
  849. return original;
  850. }
  851. };
  852. template< >
  853. struct operations< 2u, true > :
  854. public operations< 4u, true >
  855. {
  856. typedef operations< 4u, true > base_type;
  857. typedef base_type::storage_type storage_type;
  858. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  859. {
  860. storage_type original, result;
  861. fence_before(order);
  862. __asm__ __volatile__
  863. (
  864. "1:\n\t"
  865. "lwarx %0,%y2\n\t"
  866. "add %1,%0,%3\n\t"
  867. "extsh %1, %1\n\t"
  868. "stwcx. %1,%y2\n\t"
  869. "bne- 1b\n\t"
  870. : "=&b" (original), "=&b" (result), "+Z" (storage)
  871. : "b" (v)
  872. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  873. );
  874. fence_after(order);
  875. return original;
  876. }
  877. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  878. {
  879. storage_type original, result;
  880. fence_before(order);
  881. __asm__ __volatile__
  882. (
  883. "1:\n\t"
  884. "lwarx %0,%y2\n\t"
  885. "sub %1,%0,%3\n\t"
  886. "extsh %1, %1\n\t"
  887. "stwcx. %1,%y2\n\t"
  888. "bne- 1b\n\t"
  889. : "=&b" (original), "=&b" (result), "+Z" (storage)
  890. : "b" (v)
  891. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  892. );
  893. fence_after(order);
  894. return original;
  895. }
  896. };
  897. #endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
  898. #if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LDARX_STDCX)
  899. template< bool Signed >
  900. struct operations< 8u, Signed > :
  901. public gcc_ppc_operations_base
  902. {
  903. typedef typename make_storage_type< 8u >::type storage_type;
  904. typedef typename make_storage_type< 8u >::aligned aligned_storage_type;
  905. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
  906. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  907. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  908. {
  909. fence_before(order);
  910. __asm__ __volatile__
  911. (
  912. "std %1, %0\n\t"
  913. : "+m" (storage)
  914. : "r" (v)
  915. );
  916. }
  917. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  918. {
  919. storage_type v;
  920. if (order == memory_order_seq_cst)
  921. __asm__ __volatile__ ("sync" ::: "memory");
  922. if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
  923. {
  924. __asm__ __volatile__
  925. (
  926. "ld %0, %1\n\t"
  927. "cmpd %0, %0\n\t"
  928. "bne- 1f\n\t"
  929. "1:\n\t"
  930. "isync\n\t"
  931. : "=&b" (v)
  932. : "m" (storage)
  933. : "cr0", "memory"
  934. );
  935. }
  936. else
  937. {
  938. __asm__ __volatile__
  939. (
  940. "ld %0, %1\n\t"
  941. : "=&b" (v)
  942. : "m" (storage)
  943. );
  944. }
  945. return v;
  946. }
  947. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  948. {
  949. storage_type original;
  950. fence_before(order);
  951. __asm__ __volatile__
  952. (
  953. "1:\n\t"
  954. "ldarx %0,%y1\n\t"
  955. "stdcx. %2,%y1\n\t"
  956. "bne- 1b\n\t"
  957. : "=&b" (original), "+Z" (storage)
  958. : "b" (v)
  959. : "cr0"
  960. );
  961. fence_after(order);
  962. return original;
  963. }
  964. static BOOST_FORCEINLINE bool compare_exchange_weak(
  965. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  966. {
  967. int success;
  968. fence_before(success_order);
  969. __asm__ __volatile__
  970. (
  971. "li %1, 0\n\t"
  972. "ldarx %0,%y2\n\t"
  973. "cmpd %0, %3\n\t"
  974. "bne- 1f\n\t"
  975. "stdcx. %4,%y2\n\t"
  976. "bne- 1f\n\t"
  977. "li %1, 1\n\t"
  978. "1:"
  979. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  980. : "b" (expected), "b" (desired)
  981. : "cr0"
  982. );
  983. if (success)
  984. fence_after(success_order);
  985. else
  986. fence_after(failure_order);
  987. return !!success;
  988. }
  989. static BOOST_FORCEINLINE bool compare_exchange_strong(
  990. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  991. {
  992. int success;
  993. fence_before(success_order);
  994. __asm__ __volatile__
  995. (
  996. "li %1, 0\n\t"
  997. "0: ldarx %0,%y2\n\t"
  998. "cmpd %0, %3\n\t"
  999. "bne- 1f\n\t"
  1000. "stdcx. %4,%y2\n\t"
  1001. "bne- 0b\n\t"
  1002. "li %1, 1\n\t"
  1003. "1:\n\t"
  1004. : "=&b" (expected), "=&b" (success), "+Z" (storage)
  1005. : "b" (expected), "b" (desired)
  1006. : "cr0"
  1007. );
  1008. if (success)
  1009. fence_after(success_order);
  1010. else
  1011. fence_after(failure_order);
  1012. return !!success;
  1013. }
  1014. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1015. {
  1016. storage_type original, result;
  1017. fence_before(order);
  1018. __asm__ __volatile__
  1019. (
  1020. "1:\n\t"
  1021. "ldarx %0,%y2\n\t"
  1022. "add %1,%0,%3\n\t"
  1023. "stdcx. %1,%y2\n\t"
  1024. "bne- 1b\n\t"
  1025. : "=&b" (original), "=&b" (result), "+Z" (storage)
  1026. : "b" (v)
  1027. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1028. );
  1029. fence_after(order);
  1030. return original;
  1031. }
  1032. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1033. {
  1034. storage_type original, result;
  1035. fence_before(order);
  1036. __asm__ __volatile__
  1037. (
  1038. "1:\n\t"
  1039. "ldarx %0,%y2\n\t"
  1040. "sub %1,%0,%3\n\t"
  1041. "stdcx. %1,%y2\n\t"
  1042. "bne- 1b\n\t"
  1043. : "=&b" (original), "=&b" (result), "+Z" (storage)
  1044. : "b" (v)
  1045. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1046. );
  1047. fence_after(order);
  1048. return original;
  1049. }
  1050. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1051. {
  1052. storage_type original, result;
  1053. fence_before(order);
  1054. __asm__ __volatile__
  1055. (
  1056. "1:\n\t"
  1057. "ldarx %0,%y2\n\t"
  1058. "and %1,%0,%3\n\t"
  1059. "stdcx. %1,%y2\n\t"
  1060. "bne- 1b\n\t"
  1061. : "=&b" (original), "=&b" (result), "+Z" (storage)
  1062. : "b" (v)
  1063. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1064. );
  1065. fence_after(order);
  1066. return original;
  1067. }
  1068. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1069. {
  1070. storage_type original, result;
  1071. fence_before(order);
  1072. __asm__ __volatile__
  1073. (
  1074. "1:\n\t"
  1075. "ldarx %0,%y2\n\t"
  1076. "or %1,%0,%3\n\t"
  1077. "stdcx. %1,%y2\n\t"
  1078. "bne- 1b\n\t"
  1079. : "=&b" (original), "=&b" (result), "+Z" (storage)
  1080. : "b" (v)
  1081. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1082. );
  1083. fence_after(order);
  1084. return original;
  1085. }
  1086. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  1087. {
  1088. storage_type original, result;
  1089. fence_before(order);
  1090. __asm__ __volatile__
  1091. (
  1092. "1:\n\t"
  1093. "ldarx %0,%y2\n\t"
  1094. "xor %1,%0,%3\n\t"
  1095. "stdcx. %1,%y2\n\t"
  1096. "bne- 1b\n\t"
  1097. : "=&b" (original), "=&b" (result), "+Z" (storage)
  1098. : "b" (v)
  1099. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1100. );
  1101. fence_after(order);
  1102. return original;
  1103. }
  1104. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1105. {
  1106. return !!exchange(storage, (storage_type)1, order);
  1107. }
  1108. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1109. {
  1110. store(storage, 0, order);
  1111. }
  1112. };
  1113. #endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LDARX_STDCX)
  1114. BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
  1115. {
  1116. if (order != memory_order_relaxed)
  1117. {
  1118. #if defined(__powerpc64__) || defined(__PPC64__)
  1119. if (order != memory_order_seq_cst)
  1120. __asm__ __volatile__ ("lwsync" ::: "memory");
  1121. else
  1122. __asm__ __volatile__ ("sync" ::: "memory");
  1123. #else
  1124. __asm__ __volatile__ ("sync" ::: "memory");
  1125. #endif
  1126. }
  1127. }
  1128. BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
  1129. {
  1130. if (order != memory_order_relaxed)
  1131. #if defined(__ibmxl__) || defined(__IBMCPP__)
  1132. __fence();
  1133. #else
  1134. __asm__ __volatile__ ("" ::: "memory");
  1135. #endif
  1136. }
  1137. } // namespace detail
  1138. } // namespace atomics
  1139. } // namespace boost
  1140. #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_