extra_ops_gcc_x86.hpp 52 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2015 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_x86.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for x86.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/memory_order.hpp>
  17. #include <boost/atomic/detail/config.hpp>
  18. #include <boost/atomic/detail/storage_type.hpp>
  19. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  20. #include <boost/atomic/capabilities.hpp>
  21. #ifdef BOOST_HAS_PRAGMA_ONCE
  22. #pragma once
  23. #endif
  24. namespace boost {
  25. namespace atomics {
  26. namespace detail {
  27. template< typename Base >
  28. struct gcc_x86_extra_operations_common :
  29. public Base
  30. {
  31. typedef Base base_type;
  32. typedef typename base_type::storage_type storage_type;
  33. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  34. {
  35. return static_cast< storage_type >(Base::fetch_add(storage, v, order) + v);
  36. }
  37. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  38. {
  39. return static_cast< storage_type >(Base::fetch_sub(storage, v, order) - v);
  40. }
  41. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  42. {
  43. bool res;
  44. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  45. __asm__ __volatile__
  46. (
  47. "lock; bts %[bit_number], %[storage]\n\t"
  48. : [storage] "+m" (storage), [result] "=@ccc" (res)
  49. : [bit_number] "Kq" (bit_number)
  50. : "memory"
  51. );
  52. #else
  53. __asm__ __volatile__
  54. (
  55. "lock; bts %[bit_number], %[storage]\n\t"
  56. "setc %[result]\n\t"
  57. : [storage] "+m" (storage), [result] "=q" (res)
  58. : [bit_number] "Kq" (bit_number)
  59. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  60. );
  61. #endif
  62. return res;
  63. }
  64. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  65. {
  66. bool res;
  67. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  68. __asm__ __volatile__
  69. (
  70. "lock; btr %[bit_number], %[storage]\n\t"
  71. : [storage] "+m" (storage), [result] "=@ccc" (res)
  72. : [bit_number] "Kq" (bit_number)
  73. : "memory"
  74. );
  75. #else
  76. __asm__ __volatile__
  77. (
  78. "lock; btr %[bit_number], %[storage]\n\t"
  79. "setc %[result]\n\t"
  80. : [storage] "+m" (storage), [result] "=q" (res)
  81. : [bit_number] "Kq" (bit_number)
  82. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  83. );
  84. #endif
  85. return res;
  86. }
  87. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  88. {
  89. bool res;
  90. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  91. __asm__ __volatile__
  92. (
  93. "lock; btc %[bit_number], %[storage]\n\t"
  94. : [storage] "+m" (storage), [result] "=@ccc" (res)
  95. : [bit_number] "Kq" (bit_number)
  96. : "memory"
  97. );
  98. #else
  99. __asm__ __volatile__
  100. (
  101. "lock; btc %[bit_number], %[storage]\n\t"
  102. "setc %[result]\n\t"
  103. : [storage] "+m" (storage), [result] "=q" (res)
  104. : [bit_number] "Kq" (bit_number)
  105. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  106. );
  107. #endif
  108. return res;
  109. }
  110. };
  111. template< typename Base, bool Signed >
  112. struct extra_operations< Base, 1u, Signed, true > :
  113. public gcc_x86_extra_operations_common< Base >
  114. {
  115. typedef gcc_x86_extra_operations_common< Base > base_type;
  116. typedef typename base_type::storage_type storage_type;
  117. typedef typename make_storage_type< 4u >::type temp_storage_type;
  118. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  119. __asm__ __volatile__\
  120. (\
  121. ".align 16\n\t"\
  122. "1: movzbl %[orig], %2\n\t"\
  123. op " %b2\n\t"\
  124. "lock; cmpxchgb %b2, %[storage]\n\t"\
  125. "jne 1b"\
  126. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  127. : \
  128. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  129. )
  130. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  131. {
  132. storage_type original = storage;
  133. temp_storage_type result;
  134. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  135. return original;
  136. }
  137. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  138. {
  139. storage_type original = storage;
  140. temp_storage_type result;
  141. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  142. return original;
  143. }
  144. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  145. {
  146. storage_type original = storage;
  147. temp_storage_type result;
  148. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  149. return static_cast< storage_type >(result);
  150. }
  151. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  152. {
  153. storage_type original = storage;
  154. temp_storage_type result;
  155. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  156. return static_cast< storage_type >(result);
  157. }
  158. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  159. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  160. __asm__ __volatile__\
  161. (\
  162. ".align 16\n\t"\
  163. "1: mov %[arg], %2\n\t"\
  164. op " %%al, %b2\n\t"\
  165. "lock; cmpxchgb %b2, %[storage]\n\t"\
  166. "jne 1b"\
  167. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  168. : [arg] "ir" ((temp_storage_type)argument)\
  169. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  170. )
  171. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  172. {
  173. storage_type original = storage;
  174. temp_storage_type result;
  175. BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result);
  176. return static_cast< storage_type >(result);
  177. }
  178. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  179. {
  180. storage_type original = storage;
  181. temp_storage_type result;
  182. BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result);
  183. return static_cast< storage_type >(result);
  184. }
  185. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  186. {
  187. storage_type original = storage;
  188. temp_storage_type result;
  189. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result);
  190. return static_cast< storage_type >(result);
  191. }
  192. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  193. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  194. {
  195. return !!negate(storage, order);
  196. }
  197. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  198. {
  199. return !!bitwise_complement(storage, order);
  200. }
  201. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  202. {
  203. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  204. {
  205. __asm__ __volatile__
  206. (
  207. "lock; incb %[storage]\n\t"
  208. : [storage] "+m" (storage)
  209. :
  210. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  211. );
  212. }
  213. else
  214. {
  215. __asm__ __volatile__
  216. (
  217. "lock; addb %[argument], %[storage]\n\t"
  218. : [storage] "+m" (storage)
  219. : [argument] "iq" (v)
  220. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  221. );
  222. }
  223. }
  224. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  225. {
  226. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  227. {
  228. __asm__ __volatile__
  229. (
  230. "lock; decb %[storage]\n\t"
  231. : [storage] "+m" (storage)
  232. :
  233. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  234. );
  235. }
  236. else
  237. {
  238. __asm__ __volatile__
  239. (
  240. "lock; subb %[argument], %[storage]\n\t"
  241. : [storage] "+m" (storage)
  242. : [argument] "iq" (v)
  243. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  244. );
  245. }
  246. }
  247. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  248. {
  249. __asm__ __volatile__
  250. (
  251. "lock; negb %[storage]\n\t"
  252. : [storage] "+m" (storage)
  253. :
  254. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  255. );
  256. }
  257. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  258. {
  259. __asm__ __volatile__
  260. (
  261. "lock; andb %[argument], %[storage]\n\t"
  262. : [storage] "+m" (storage)
  263. : [argument] "iq" (v)
  264. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  265. );
  266. }
  267. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  268. {
  269. __asm__ __volatile__
  270. (
  271. "lock; orb %[argument], %[storage]\n\t"
  272. : [storage] "+m" (storage)
  273. : [argument] "iq" (v)
  274. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  275. );
  276. }
  277. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  278. {
  279. __asm__ __volatile__
  280. (
  281. "lock; xorb %[argument], %[storage]\n\t"
  282. : [storage] "+m" (storage)
  283. : [argument] "iq" (v)
  284. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  285. );
  286. }
  287. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  288. {
  289. __asm__ __volatile__
  290. (
  291. "lock; notb %[storage]\n\t"
  292. : [storage] "+m" (storage)
  293. :
  294. : "memory"
  295. );
  296. }
  297. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  298. {
  299. bool res;
  300. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  301. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  302. {
  303. __asm__ __volatile__
  304. (
  305. "lock; incb %[storage]\n\t"
  306. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  307. :
  308. : "memory"
  309. );
  310. }
  311. else
  312. {
  313. __asm__ __volatile__
  314. (
  315. "lock; addb %[argument], %[storage]\n\t"
  316. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  317. : [argument] "iq" (v)
  318. : "memory"
  319. );
  320. }
  321. #else
  322. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  323. {
  324. __asm__ __volatile__
  325. (
  326. "lock; incb %[storage]\n\t"
  327. "setnz %[result]\n\t"
  328. : [storage] "+m" (storage), [result] "=q" (res)
  329. :
  330. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  331. );
  332. }
  333. else
  334. {
  335. __asm__ __volatile__
  336. (
  337. "lock; addb %[argument], %[storage]\n\t"
  338. "setnz %[result]\n\t"
  339. : [storage] "+m" (storage), [result] "=q" (res)
  340. : [argument] "iq" (v)
  341. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  342. );
  343. }
  344. #endif
  345. return res;
  346. }
  347. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  348. {
  349. bool res;
  350. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  351. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  352. {
  353. __asm__ __volatile__
  354. (
  355. "lock; decb %[storage]\n\t"
  356. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  357. :
  358. : "memory"
  359. );
  360. }
  361. else
  362. {
  363. __asm__ __volatile__
  364. (
  365. "lock; subb %[argument], %[storage]\n\t"
  366. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  367. : [argument] "iq" (v)
  368. : "memory"
  369. );
  370. }
  371. #else
  372. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  373. {
  374. __asm__ __volatile__
  375. (
  376. "lock; decb %[storage]\n\t"
  377. "setnz %[result]\n\t"
  378. : [storage] "+m" (storage), [result] "=q" (res)
  379. :
  380. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  381. );
  382. }
  383. else
  384. {
  385. __asm__ __volatile__
  386. (
  387. "lock; subb %[argument], %[storage]\n\t"
  388. "setnz %[result]\n\t"
  389. : [storage] "+m" (storage), [result] "=q" (res)
  390. : [argument] "iq" (v)
  391. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  392. );
  393. }
  394. #endif
  395. return res;
  396. }
  397. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  398. {
  399. bool res;
  400. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  401. __asm__ __volatile__
  402. (
  403. "lock; andb %[argument], %[storage]\n\t"
  404. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  405. : [argument] "iq" (v)
  406. : "memory"
  407. );
  408. #else
  409. __asm__ __volatile__
  410. (
  411. "lock; andb %[argument], %[storage]\n\t"
  412. "setnz %[result]\n\t"
  413. : [storage] "+m" (storage), [result] "=q" (res)
  414. : [argument] "iq" (v)
  415. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  416. );
  417. #endif
  418. return res;
  419. }
  420. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  421. {
  422. bool res;
  423. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  424. __asm__ __volatile__
  425. (
  426. "lock; orb %[argument], %[storage]\n\t"
  427. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  428. : [argument] "iq" (v)
  429. : "memory"
  430. );
  431. #else
  432. __asm__ __volatile__
  433. (
  434. "lock; orb %[argument], %[storage]\n\t"
  435. "setnz %[result]\n\t"
  436. : [storage] "+m" (storage), [result] "=q" (res)
  437. : [argument] "iq" (v)
  438. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  439. );
  440. #endif
  441. return res;
  442. }
  443. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  444. {
  445. bool res;
  446. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  447. __asm__ __volatile__
  448. (
  449. "lock; xorb %[argument], %[storage]\n\t"
  450. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  451. : [argument] "iq" (v)
  452. : "memory"
  453. );
  454. #else
  455. __asm__ __volatile__
  456. (
  457. "lock; xorb %[argument], %[storage]\n\t"
  458. "setnz %[result]\n\t"
  459. : [storage] "+m" (storage), [result] "=q" (res)
  460. : [argument] "iq" (v)
  461. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  462. );
  463. #endif
  464. return res;
  465. }
  466. };
  467. template< typename Base, bool Signed >
  468. struct extra_operations< Base, 2u, Signed, true > :
  469. public gcc_x86_extra_operations_common< Base >
  470. {
  471. typedef gcc_x86_extra_operations_common< Base > base_type;
  472. typedef typename base_type::storage_type storage_type;
  473. typedef typename make_storage_type< 4u >::type temp_storage_type;
  474. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  475. __asm__ __volatile__\
  476. (\
  477. ".align 16\n\t"\
  478. "1: movzwl %[orig], %2\n\t"\
  479. op " %w2\n\t"\
  480. "lock; cmpxchgw %w2, %[storage]\n\t"\
  481. "jne 1b"\
  482. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  483. : \
  484. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  485. )
  486. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  487. {
  488. storage_type original = storage;
  489. temp_storage_type result;
  490. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  491. return original;
  492. }
  493. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  494. {
  495. storage_type original = storage;
  496. temp_storage_type result;
  497. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  498. return original;
  499. }
  500. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  501. {
  502. storage_type original = storage;
  503. temp_storage_type result;
  504. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  505. return static_cast< storage_type >(result);
  506. }
  507. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  508. {
  509. storage_type original = storage;
  510. temp_storage_type result;
  511. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  512. return static_cast< storage_type >(result);
  513. }
  514. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  515. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  516. __asm__ __volatile__\
  517. (\
  518. ".align 16\n\t"\
  519. "1: mov %[arg], %2\n\t"\
  520. op " %%ax, %w2\n\t"\
  521. "lock; cmpxchgw %w2, %[storage]\n\t"\
  522. "jne 1b"\
  523. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  524. : [arg] "ir" ((temp_storage_type)argument)\
  525. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  526. )
  527. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  528. {
  529. storage_type original = storage;
  530. temp_storage_type result;
  531. BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result);
  532. return static_cast< storage_type >(result);
  533. }
  534. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  535. {
  536. storage_type original = storage;
  537. temp_storage_type result;
  538. BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result);
  539. return static_cast< storage_type >(result);
  540. }
  541. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  542. {
  543. storage_type original = storage;
  544. temp_storage_type result;
  545. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result);
  546. return static_cast< storage_type >(result);
  547. }
  548. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  549. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  550. {
  551. return !!negate(storage, order);
  552. }
  553. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  554. {
  555. return !!bitwise_complement(storage, order);
  556. }
  557. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  558. {
  559. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  560. {
  561. __asm__ __volatile__
  562. (
  563. "lock; incw %[storage]\n\t"
  564. : [storage] "+m" (storage)
  565. :
  566. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  567. );
  568. }
  569. else
  570. {
  571. __asm__ __volatile__
  572. (
  573. "lock; addw %[argument], %[storage]\n\t"
  574. : [storage] "+m" (storage)
  575. : [argument] "iq" (v)
  576. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  577. );
  578. }
  579. }
  580. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  581. {
  582. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  583. {
  584. __asm__ __volatile__
  585. (
  586. "lock; decw %[storage]\n\t"
  587. : [storage] "+m" (storage)
  588. :
  589. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  590. );
  591. }
  592. else
  593. {
  594. __asm__ __volatile__
  595. (
  596. "lock; subw %[argument], %[storage]\n\t"
  597. : [storage] "+m" (storage)
  598. : [argument] "iq" (v)
  599. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  600. );
  601. }
  602. }
  603. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  604. {
  605. __asm__ __volatile__
  606. (
  607. "lock; negw %[storage]\n\t"
  608. : [storage] "+m" (storage)
  609. :
  610. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  611. );
  612. }
  613. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  614. {
  615. __asm__ __volatile__
  616. (
  617. "lock; andw %[argument], %[storage]\n\t"
  618. : [storage] "+m" (storage)
  619. : [argument] "iq" (v)
  620. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  621. );
  622. }
  623. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  624. {
  625. __asm__ __volatile__
  626. (
  627. "lock; orw %[argument], %[storage]\n\t"
  628. : [storage] "+m" (storage)
  629. : [argument] "iq" (v)
  630. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  631. );
  632. }
  633. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  634. {
  635. __asm__ __volatile__
  636. (
  637. "lock; xorw %[argument], %[storage]\n\t"
  638. : [storage] "+m" (storage)
  639. : [argument] "iq" (v)
  640. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  641. );
  642. }
  643. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  644. {
  645. __asm__ __volatile__
  646. (
  647. "lock; notw %[storage]\n\t"
  648. : [storage] "+m" (storage)
  649. :
  650. : "memory"
  651. );
  652. }
  653. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  654. {
  655. bool res;
  656. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  657. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  658. {
  659. __asm__ __volatile__
  660. (
  661. "lock; incw %[storage]\n\t"
  662. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  663. :
  664. : "memory"
  665. );
  666. }
  667. else
  668. {
  669. __asm__ __volatile__
  670. (
  671. "lock; addw %[argument], %[storage]\n\t"
  672. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  673. : [argument] "iq" (v)
  674. : "memory"
  675. );
  676. }
  677. #else
  678. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  679. {
  680. __asm__ __volatile__
  681. (
  682. "lock; incw %[storage]\n\t"
  683. "setnz %[result]\n\t"
  684. : [storage] "+m" (storage), [result] "=q" (res)
  685. :
  686. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  687. );
  688. }
  689. else
  690. {
  691. __asm__ __volatile__
  692. (
  693. "lock; addw %[argument], %[storage]\n\t"
  694. "setnz %[result]\n\t"
  695. : [storage] "+m" (storage), [result] "=q" (res)
  696. : [argument] "iq" (v)
  697. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  698. );
  699. }
  700. #endif
  701. return res;
  702. }
  703. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  704. {
  705. bool res;
  706. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  707. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  708. {
  709. __asm__ __volatile__
  710. (
  711. "lock; decw %[storage]\n\t"
  712. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  713. :
  714. : "memory"
  715. );
  716. }
  717. else
  718. {
  719. __asm__ __volatile__
  720. (
  721. "lock; subw %[argument], %[storage]\n\t"
  722. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  723. : [argument] "iq" (v)
  724. : "memory"
  725. );
  726. }
  727. #else
  728. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  729. {
  730. __asm__ __volatile__
  731. (
  732. "lock; decw %[storage]\n\t"
  733. "setnz %[result]\n\t"
  734. : [storage] "+m" (storage), [result] "=q" (res)
  735. :
  736. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  737. );
  738. }
  739. else
  740. {
  741. __asm__ __volatile__
  742. (
  743. "lock; subw %[argument], %[storage]\n\t"
  744. "setnz %[result]\n\t"
  745. : [storage] "+m" (storage), [result] "=q" (res)
  746. : [argument] "iq" (v)
  747. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  748. );
  749. }
  750. #endif
  751. return res;
  752. }
  753. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  754. {
  755. bool res;
  756. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  757. __asm__ __volatile__
  758. (
  759. "lock; andw %[argument], %[storage]\n\t"
  760. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  761. : [argument] "iq" (v)
  762. : "memory"
  763. );
  764. #else
  765. __asm__ __volatile__
  766. (
  767. "lock; andw %[argument], %[storage]\n\t"
  768. "setnz %[result]\n\t"
  769. : [storage] "+m" (storage), [result] "=q" (res)
  770. : [argument] "iq" (v)
  771. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  772. );
  773. #endif
  774. return res;
  775. }
  776. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  777. {
  778. bool res;
  779. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  780. __asm__ __volatile__
  781. (
  782. "lock; orw %[argument], %[storage]\n\t"
  783. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  784. : [argument] "iq" (v)
  785. : "memory"
  786. );
  787. #else
  788. __asm__ __volatile__
  789. (
  790. "lock; orw %[argument], %[storage]\n\t"
  791. "setnz %[result]\n\t"
  792. : [storage] "+m" (storage), [result] "=q" (res)
  793. : [argument] "iq" (v)
  794. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  795. );
  796. #endif
  797. return res;
  798. }
  799. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  800. {
  801. bool res;
  802. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  803. __asm__ __volatile__
  804. (
  805. "lock; xorw %[argument], %[storage]\n\t"
  806. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  807. : [argument] "iq" (v)
  808. : "memory"
  809. );
  810. #else
  811. __asm__ __volatile__
  812. (
  813. "lock; xorw %[argument], %[storage]\n\t"
  814. "setnz %[result]\n\t"
  815. : [storage] "+m" (storage), [result] "=q" (res)
  816. : [argument] "iq" (v)
  817. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  818. );
  819. #endif
  820. return res;
  821. }
  822. };
  823. template< typename Base, bool Signed >
  824. struct extra_operations< Base, 4u, Signed, true > :
  825. public gcc_x86_extra_operations_common< Base >
  826. {
  827. typedef gcc_x86_extra_operations_common< Base > base_type;
  828. typedef typename base_type::storage_type storage_type;
  829. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  830. __asm__ __volatile__\
  831. (\
  832. ".align 16\n\t"\
  833. "1: mov %[orig], %[res]\n\t"\
  834. op " %[res]\n\t"\
  835. "lock; cmpxchgl %[res], %[storage]\n\t"\
  836. "jne 1b"\
  837. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  838. : \
  839. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  840. )
  841. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  842. {
  843. storage_type original = storage;
  844. storage_type result;
  845. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  846. return original;
  847. }
  848. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  849. {
  850. storage_type original = storage;
  851. storage_type result;
  852. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  853. return original;
  854. }
  855. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  856. {
  857. storage_type original = storage;
  858. storage_type result;
  859. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  860. return result;
  861. }
  862. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  863. {
  864. storage_type original = storage;
  865. storage_type result;
  866. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  867. return result;
  868. }
  869. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  870. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  871. __asm__ __volatile__\
  872. (\
  873. ".align 16\n\t"\
  874. "1: mov %[arg], %[res]\n\t"\
  875. op " %%eax, %[res]\n\t"\
  876. "lock; cmpxchgl %[res], %[storage]\n\t"\
  877. "jne 1b"\
  878. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  879. : [arg] "ir" (argument)\
  880. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  881. )
  882. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  883. {
  884. storage_type original = storage;
  885. storage_type result;
  886. BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result);
  887. return static_cast< storage_type >(result);
  888. }
  889. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  890. {
  891. storage_type original = storage;
  892. storage_type result;
  893. BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result);
  894. return static_cast< storage_type >(result);
  895. }
  896. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  897. {
  898. storage_type original = storage;
  899. storage_type result;
  900. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result);
  901. return static_cast< storage_type >(result);
  902. }
  903. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  904. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  905. {
  906. return !!negate(storage, order);
  907. }
  908. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  909. {
  910. return !!bitwise_complement(storage, order);
  911. }
  912. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  913. {
  914. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  915. {
  916. __asm__ __volatile__
  917. (
  918. "lock; incl %[storage]\n\t"
  919. : [storage] "+m" (storage)
  920. :
  921. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  922. );
  923. }
  924. else
  925. {
  926. __asm__ __volatile__
  927. (
  928. "lock; addl %[argument], %[storage]\n\t"
  929. : [storage] "+m" (storage)
  930. : [argument] "ir" (v)
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  932. );
  933. }
  934. }
  935. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  936. {
  937. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  938. {
  939. __asm__ __volatile__
  940. (
  941. "lock; decl %[storage]\n\t"
  942. : [storage] "+m" (storage)
  943. :
  944. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  945. );
  946. }
  947. else
  948. {
  949. __asm__ __volatile__
  950. (
  951. "lock; subl %[argument], %[storage]\n\t"
  952. : [storage] "+m" (storage)
  953. : [argument] "ir" (v)
  954. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  955. );
  956. }
  957. }
  958. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  959. {
  960. __asm__ __volatile__
  961. (
  962. "lock; negl %[storage]\n\t"
  963. : [storage] "+m" (storage)
  964. :
  965. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  966. );
  967. }
  968. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  969. {
  970. __asm__ __volatile__
  971. (
  972. "lock; andl %[argument], %[storage]\n\t"
  973. : [storage] "+m" (storage)
  974. : [argument] "ir" (v)
  975. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  976. );
  977. }
  978. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  979. {
  980. __asm__ __volatile__
  981. (
  982. "lock; orl %[argument], %[storage]\n\t"
  983. : [storage] "+m" (storage)
  984. : [argument] "ir" (v)
  985. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  986. );
  987. }
  988. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  989. {
  990. __asm__ __volatile__
  991. (
  992. "lock; xorl %[argument], %[storage]\n\t"
  993. : [storage] "+m" (storage)
  994. : [argument] "ir" (v)
  995. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  996. );
  997. }
  998. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  999. {
  1000. __asm__ __volatile__
  1001. (
  1002. "lock; notl %[storage]\n\t"
  1003. : [storage] "+m" (storage)
  1004. :
  1005. : "memory"
  1006. );
  1007. }
  1008. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1009. {
  1010. bool res;
  1011. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1012. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1013. {
  1014. __asm__ __volatile__
  1015. (
  1016. "lock; incl %[storage]\n\t"
  1017. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1018. :
  1019. : "memory"
  1020. );
  1021. }
  1022. else
  1023. {
  1024. __asm__ __volatile__
  1025. (
  1026. "lock; addl %[argument], %[storage]\n\t"
  1027. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1028. : [argument] "ir" (v)
  1029. : "memory"
  1030. );
  1031. }
  1032. #else
  1033. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1034. {
  1035. __asm__ __volatile__
  1036. (
  1037. "lock; incl %[storage]\n\t"
  1038. "setnz %[result]\n\t"
  1039. : [storage] "+m" (storage), [result] "=q" (res)
  1040. :
  1041. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1042. );
  1043. }
  1044. else
  1045. {
  1046. __asm__ __volatile__
  1047. (
  1048. "lock; addl %[argument], %[storage]\n\t"
  1049. "setnz %[result]\n\t"
  1050. : [storage] "+m" (storage), [result] "=q" (res)
  1051. : [argument] "ir" (v)
  1052. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1053. );
  1054. }
  1055. #endif
  1056. return res;
  1057. }
  1058. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1059. {
  1060. bool res;
  1061. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1062. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1063. {
  1064. __asm__ __volatile__
  1065. (
  1066. "lock; decl %[storage]\n\t"
  1067. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1068. :
  1069. : "memory"
  1070. );
  1071. }
  1072. else
  1073. {
  1074. __asm__ __volatile__
  1075. (
  1076. "lock; subl %[argument], %[storage]\n\t"
  1077. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1078. : [argument] "ir" (v)
  1079. : "memory"
  1080. );
  1081. }
  1082. #else
  1083. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1084. {
  1085. __asm__ __volatile__
  1086. (
  1087. "lock; decl %[storage]\n\t"
  1088. "setnz %[result]\n\t"
  1089. : [storage] "+m" (storage), [result] "=q" (res)
  1090. :
  1091. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1092. );
  1093. }
  1094. else
  1095. {
  1096. __asm__ __volatile__
  1097. (
  1098. "lock; subl %[argument], %[storage]\n\t"
  1099. "setnz %[result]\n\t"
  1100. : [storage] "+m" (storage), [result] "=q" (res)
  1101. : [argument] "ir" (v)
  1102. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1103. );
  1104. }
  1105. #endif
  1106. return res;
  1107. }
  1108. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1109. {
  1110. bool res;
  1111. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1112. __asm__ __volatile__
  1113. (
  1114. "lock; andl %[argument], %[storage]\n\t"
  1115. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1116. : [argument] "ir" (v)
  1117. : "memory"
  1118. );
  1119. #else
  1120. __asm__ __volatile__
  1121. (
  1122. "lock; andl %[argument], %[storage]\n\t"
  1123. "setnz %[result]\n\t"
  1124. : [storage] "+m" (storage), [result] "=q" (res)
  1125. : [argument] "ir" (v)
  1126. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1127. );
  1128. #endif
  1129. return res;
  1130. }
  1131. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1132. {
  1133. bool res;
  1134. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1135. __asm__ __volatile__
  1136. (
  1137. "lock; orl %[argument], %[storage]\n\t"
  1138. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1139. : [argument] "ir" (v)
  1140. : "memory"
  1141. );
  1142. #else
  1143. __asm__ __volatile__
  1144. (
  1145. "lock; orl %[argument], %[storage]\n\t"
  1146. "setnz %[result]\n\t"
  1147. : [storage] "+m" (storage), [result] "=q" (res)
  1148. : [argument] "ir" (v)
  1149. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1150. );
  1151. #endif
  1152. return res;
  1153. }
  1154. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1155. {
  1156. bool res;
  1157. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1158. __asm__ __volatile__
  1159. (
  1160. "lock; xorl %[argument], %[storage]\n\t"
  1161. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1162. : [argument] "ir" (v)
  1163. : "memory"
  1164. );
  1165. #else
  1166. __asm__ __volatile__
  1167. (
  1168. "lock; xorl %[argument], %[storage]\n\t"
  1169. "setnz %[result]\n\t"
  1170. : [storage] "+m" (storage), [result] "=q" (res)
  1171. : [argument] "ir" (v)
  1172. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1173. );
  1174. #endif
  1175. return res;
  1176. }
  1177. };
  1178. #if defined(__x86_64__)
  1179. template< typename Base, bool Signed >
  1180. struct extra_operations< Base, 8u, Signed, true > :
  1181. public gcc_x86_extra_operations_common< Base >
  1182. {
  1183. typedef gcc_x86_extra_operations_common< Base > base_type;
  1184. typedef typename base_type::storage_type storage_type;
  1185. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  1186. __asm__ __volatile__\
  1187. (\
  1188. ".align 16\n\t"\
  1189. "1: mov %[orig], %[res]\n\t"\
  1190. op " %[res]\n\t"\
  1191. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1192. "jne 1b"\
  1193. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1194. : \
  1195. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1196. )
  1197. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1198. {
  1199. storage_type original = storage;
  1200. storage_type result;
  1201. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1202. return original;
  1203. }
  1204. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1205. {
  1206. storage_type original = storage;
  1207. storage_type result;
  1208. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1209. return original;
  1210. }
  1211. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1212. {
  1213. storage_type original = storage;
  1214. storage_type result;
  1215. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1216. return result;
  1217. }
  1218. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1219. {
  1220. storage_type original = storage;
  1221. storage_type result;
  1222. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1223. return result;
  1224. }
  1225. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1226. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  1227. __asm__ __volatile__\
  1228. (\
  1229. ".align 16\n\t"\
  1230. "1: mov %[arg], %[res]\n\t"\
  1231. op " %%rax, %[res]\n\t"\
  1232. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1233. "jne 1b"\
  1234. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1235. : [arg] "r" (argument)\
  1236. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1237. )
  1238. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1239. {
  1240. storage_type original = storage;
  1241. storage_type result;
  1242. BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result);
  1243. return static_cast< storage_type >(result);
  1244. }
  1245. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1246. {
  1247. storage_type original = storage;
  1248. storage_type result;
  1249. BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result);
  1250. return static_cast< storage_type >(result);
  1251. }
  1252. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1253. {
  1254. storage_type original = storage;
  1255. storage_type result;
  1256. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result);
  1257. return static_cast< storage_type >(result);
  1258. }
  1259. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1260. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1261. {
  1262. return !!negate(storage, order);
  1263. }
  1264. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1265. {
  1266. return !!bitwise_complement(storage, order);
  1267. }
  1268. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1269. {
  1270. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1271. {
  1272. __asm__ __volatile__
  1273. (
  1274. "lock; incq %[storage]\n\t"
  1275. : [storage] "+m" (storage)
  1276. :
  1277. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1278. );
  1279. }
  1280. else
  1281. {
  1282. __asm__ __volatile__
  1283. (
  1284. "lock; addq %[argument], %[storage]\n\t"
  1285. : [storage] "+m" (storage)
  1286. : [argument] "er" (v)
  1287. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1288. );
  1289. }
  1290. }
  1291. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1292. {
  1293. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1294. {
  1295. __asm__ __volatile__
  1296. (
  1297. "lock; decq %[storage]\n\t"
  1298. : [storage] "+m" (storage)
  1299. :
  1300. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1301. );
  1302. }
  1303. else
  1304. {
  1305. __asm__ __volatile__
  1306. (
  1307. "lock; subq %[argument], %[storage]\n\t"
  1308. : [storage] "+m" (storage)
  1309. : [argument] "er" (v)
  1310. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1311. );
  1312. }
  1313. }
  1314. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1315. {
  1316. __asm__ __volatile__
  1317. (
  1318. "lock; negq %[storage]\n\t"
  1319. : [storage] "+m" (storage)
  1320. :
  1321. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1322. );
  1323. }
  1324. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1325. {
  1326. __asm__ __volatile__
  1327. (
  1328. "lock; andq %[argument], %[storage]\n\t"
  1329. : [storage] "+m" (storage)
  1330. : [argument] "er" (v)
  1331. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1332. );
  1333. }
  1334. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1335. {
  1336. __asm__ __volatile__
  1337. (
  1338. "lock; orq %[argument], %[storage]\n\t"
  1339. : [storage] "+m" (storage)
  1340. : [argument] "er" (v)
  1341. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1342. );
  1343. }
  1344. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1345. {
  1346. __asm__ __volatile__
  1347. (
  1348. "lock; xorq %[argument], %[storage]\n\t"
  1349. : [storage] "+m" (storage)
  1350. : [argument] "er" (v)
  1351. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1352. );
  1353. }
  1354. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1355. {
  1356. __asm__ __volatile__
  1357. (
  1358. "lock; notq %[storage]\n\t"
  1359. : [storage] "+m" (storage)
  1360. :
  1361. : "memory"
  1362. );
  1363. }
  1364. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1365. {
  1366. bool res;
  1367. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1368. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1369. {
  1370. __asm__ __volatile__
  1371. (
  1372. "lock; incq %[storage]\n\t"
  1373. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1374. :
  1375. : "memory"
  1376. );
  1377. }
  1378. else
  1379. {
  1380. __asm__ __volatile__
  1381. (
  1382. "lock; addq %[argument], %[storage]\n\t"
  1383. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1384. : [argument] "er" (v)
  1385. : "memory"
  1386. );
  1387. }
  1388. #else
  1389. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1390. {
  1391. __asm__ __volatile__
  1392. (
  1393. "lock; incq %[storage]\n\t"
  1394. "setnz %[result]\n\t"
  1395. : [storage] "+m" (storage), [result] "=q" (res)
  1396. :
  1397. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1398. );
  1399. }
  1400. else
  1401. {
  1402. __asm__ __volatile__
  1403. (
  1404. "lock; addq %[argument], %[storage]\n\t"
  1405. "setnz %[result]\n\t"
  1406. : [storage] "+m" (storage), [result] "=q" (res)
  1407. : [argument] "er" (v)
  1408. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1409. );
  1410. }
  1411. #endif
  1412. return res;
  1413. }
  1414. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1415. {
  1416. bool res;
  1417. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1418. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1419. {
  1420. __asm__ __volatile__
  1421. (
  1422. "lock; decq %[storage]\n\t"
  1423. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1424. :
  1425. : "memory"
  1426. );
  1427. }
  1428. else
  1429. {
  1430. __asm__ __volatile__
  1431. (
  1432. "lock; subq %[argument], %[storage]\n\t"
  1433. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1434. : [argument] "er" (v)
  1435. : "memory"
  1436. );
  1437. }
  1438. #else
  1439. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1440. {
  1441. __asm__ __volatile__
  1442. (
  1443. "lock; decq %[storage]\n\t"
  1444. "setnz %[result]\n\t"
  1445. : [storage] "+m" (storage), [result] "=q" (res)
  1446. :
  1447. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1448. );
  1449. }
  1450. else
  1451. {
  1452. __asm__ __volatile__
  1453. (
  1454. "lock; subq %[argument], %[storage]\n\t"
  1455. "setnz %[result]\n\t"
  1456. : [storage] "+m" (storage), [result] "=q" (res)
  1457. : [argument] "er" (v)
  1458. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1459. );
  1460. }
  1461. #endif
  1462. return res;
  1463. }
  1464. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1465. {
  1466. bool res;
  1467. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1468. __asm__ __volatile__
  1469. (
  1470. "lock; andq %[argument], %[storage]\n\t"
  1471. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1472. : [argument] "er" (v)
  1473. : "memory"
  1474. );
  1475. #else
  1476. __asm__ __volatile__
  1477. (
  1478. "lock; andq %[argument], %[storage]\n\t"
  1479. "setnz %[result]\n\t"
  1480. : [storage] "+m" (storage), [result] "=q" (res)
  1481. : [argument] "er" (v)
  1482. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1483. );
  1484. #endif
  1485. return res;
  1486. }
  1487. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1488. {
  1489. bool res;
  1490. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1491. __asm__ __volatile__
  1492. (
  1493. "lock; orq %[argument], %[storage]\n\t"
  1494. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1495. : [argument] "er" (v)
  1496. : "memory"
  1497. );
  1498. #else
  1499. __asm__ __volatile__
  1500. (
  1501. "lock; orq %[argument], %[storage]\n\t"
  1502. "setnz %[result]\n\t"
  1503. : [storage] "+m" (storage), [result] "=q" (res)
  1504. : [argument] "er" (v)
  1505. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1506. );
  1507. #endif
  1508. return res;
  1509. }
  1510. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1511. {
  1512. bool res;
  1513. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1514. __asm__ __volatile__
  1515. (
  1516. "lock; xorq %[argument], %[storage]\n\t"
  1517. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1518. : [argument] "er" (v)
  1519. : "memory"
  1520. );
  1521. #else
  1522. __asm__ __volatile__
  1523. (
  1524. "lock; xorq %[argument], %[storage]\n\t"
  1525. "setnz %[result]\n\t"
  1526. : [storage] "+m" (storage), [result] "=q" (res)
  1527. : [argument] "er" (v)
  1528. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1529. );
  1530. #endif
  1531. return res;
  1532. }
  1533. };
  1534. #endif // defined(__x86_64__)
  1535. } // namespace detail
  1536. } // namespace atomics
  1537. } // namespace boost
  1538. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_