unlimited_storage.hpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. // Copyright 2015-2019 Hans Dembinski
  2. // Copyright 2019 Glen Joseph Fernandes (glenjofe@gmail.com)
  3. //
  4. // Distributed under the Boost Software License, Version 1.0.
  5. // (See accompanying file LICENSE_1_0.txt
  6. // or copy at http://www.boost.org/LICENSE_1_0.txt)
  7. #ifndef BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
  8. #define BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
  9. #include <algorithm>
  10. #include <boost/assert.hpp>
  11. #include <boost/config.hpp>
  12. #include <boost/core/alloc_construct.hpp>
  13. #include <boost/core/exchange.hpp>
  14. #include <boost/core/nvp.hpp>
  15. #include <boost/histogram/detail/array_wrapper.hpp>
  16. #include <boost/histogram/detail/iterator_adaptor.hpp>
  17. #include <boost/histogram/detail/large_int.hpp>
  18. #include <boost/histogram/detail/operators.hpp>
  19. #include <boost/histogram/detail/safe_comparison.hpp>
  20. #include <boost/histogram/fwd.hpp>
  21. #include <boost/mp11/algorithm.hpp>
  22. #include <boost/mp11/list.hpp>
  23. #include <boost/mp11/utility.hpp>
  24. #include <cmath>
  25. #include <cstdint>
  26. #include <functional>
  27. #include <iterator>
  28. #include <memory>
  29. #include <type_traits>
  30. namespace boost {
  31. namespace histogram {
  32. namespace detail {
  33. template <class T>
  34. struct is_large_int : std::false_type {};
  35. template <class A>
  36. struct is_large_int<large_int<A>> : std::true_type {};
  37. template <class T, class ReturnType>
  38. using if_arithmetic_or_large_int =
  39. std::enable_if_t<(std::is_arithmetic<T>::value || is_large_int<T>::value),
  40. ReturnType>;
  41. template <class L, class T>
  42. using next_type = mp11::mp_at_c<L, (mp11::mp_find<L, T>::value + 1)>;
  43. template <class Allocator>
  44. class construct_guard {
  45. public:
  46. using pointer = typename std::allocator_traits<Allocator>::pointer;
  47. construct_guard(Allocator& a, pointer p, std::size_t n) noexcept
  48. : a_(a), p_(p), n_(n) {}
  49. ~construct_guard() {
  50. if (p_) { a_.deallocate(p_, n_); }
  51. }
  52. void release() { p_ = pointer(); }
  53. construct_guard(const construct_guard&) = delete;
  54. construct_guard& operator=(const construct_guard&) = delete;
  55. private:
  56. Allocator& a_;
  57. pointer p_;
  58. std::size_t n_;
  59. };
  60. template <class Allocator>
  61. void* buffer_create(Allocator& a, std::size_t n) {
  62. auto ptr = a.allocate(n); // may throw
  63. static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
  64. "ptr must be trivially copyable");
  65. construct_guard<Allocator> guard(a, ptr, n);
  66. boost::alloc_construct_n(a, ptr, n);
  67. guard.release();
  68. return static_cast<void*>(ptr);
  69. }
  70. template <class Allocator, class Iterator>
  71. auto buffer_create(Allocator& a, std::size_t n, Iterator iter) {
  72. BOOST_ASSERT(n > 0u);
  73. auto ptr = a.allocate(n); // may throw
  74. static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
  75. "ptr must be trivially copyable");
  76. construct_guard<Allocator> guard(a, ptr, n);
  77. using T = typename std::allocator_traits<Allocator>::value_type;
  78. struct casting_iterator {
  79. void operator++() noexcept { ++iter_; }
  80. T operator*() noexcept {
  81. return static_cast<T>(*iter_);
  82. } // silence conversion warnings
  83. Iterator iter_;
  84. };
  85. boost::alloc_construct_n(a, ptr, n, casting_iterator{iter});
  86. guard.release();
  87. return ptr;
  88. }
  89. template <class Allocator>
  90. void buffer_destroy(Allocator& a, typename std::allocator_traits<Allocator>::pointer p,
  91. std::size_t n) {
  92. BOOST_ASSERT(p);
  93. BOOST_ASSERT(n > 0u);
  94. boost::alloc_destroy_n(a, p, n);
  95. a.deallocate(p, n);
  96. }
  97. } // namespace detail
  98. /**
  99. Memory-efficient storage for integral counters which cannot overflow.
  100. This storage provides a no-overflow-guarantee if the counters are incremented with
  101. integer weights. It maintains a contiguous array of elemental counters, one for each
  102. cell. If an operation is requested which would overflow a counter, the array is
  103. replaced with another of a wider integral type, then the operation is executed. The
  104. storage uses integers of 8, 16, 32, 64 bits, and then switches to a multiprecision
  105. integral type, similar to those in
  106. [Boost.Multiprecision](https://www.boost.org/doc/libs/develop/libs/multiprecision/doc/html/index.html).
  107. A scaling operation or adding a floating point number triggers a conversion of the
  108. elemental counters into doubles, which voids the no-overflow-guarantee.
  109. */
  110. template <class Allocator>
  111. class unlimited_storage {
  112. static_assert(
  113. std::is_same<typename std::allocator_traits<Allocator>::pointer,
  114. typename std::allocator_traits<Allocator>::value_type*>::value,
  115. "unlimited_storage requires allocator with trivial pointer type");
  116. using U8 = std::uint8_t;
  117. using U16 = std::uint16_t;
  118. using U32 = std::uint32_t;
  119. using U64 = std::uint64_t;
  120. public:
  121. static constexpr bool has_threading_support = false;
  122. using allocator_type = Allocator;
  123. using value_type = double;
  124. using large_int = detail::large_int<
  125. typename std::allocator_traits<allocator_type>::template rebind_alloc<U64>>;
  126. struct buffer_type {
  127. // cannot be moved outside of scope of unlimited_storage, large_int is dependent type
  128. using types = mp11::mp_list<U8, U16, U32, U64, large_int, double>;
  129. template <class T>
  130. static constexpr unsigned type_index() noexcept {
  131. return static_cast<unsigned>(mp11::mp_find<types, T>::value);
  132. }
  133. template <class F, class... Ts>
  134. decltype(auto) visit(F&& f, Ts&&... ts) const {
  135. // this is intentionally not a switch, the if-chain is faster in benchmarks
  136. if (type == type_index<U8>())
  137. return f(static_cast<U8*>(ptr), std::forward<Ts>(ts)...);
  138. if (type == type_index<U16>())
  139. return f(static_cast<U16*>(ptr), std::forward<Ts>(ts)...);
  140. if (type == type_index<U32>())
  141. return f(static_cast<U32*>(ptr), std::forward<Ts>(ts)...);
  142. if (type == type_index<U64>())
  143. return f(static_cast<U64*>(ptr), std::forward<Ts>(ts)...);
  144. if (type == type_index<large_int>())
  145. return f(static_cast<large_int*>(ptr), std::forward<Ts>(ts)...);
  146. return f(static_cast<double*>(ptr), std::forward<Ts>(ts)...);
  147. }
  148. buffer_type(const allocator_type& a = {}) : alloc(a) {}
  149. buffer_type(buffer_type&& o) noexcept
  150. : alloc(std::move(o.alloc))
  151. , size(boost::exchange(o.size, 0))
  152. , type(boost::exchange(o.type, 0))
  153. , ptr(boost::exchange(o.ptr, nullptr)) {}
  154. buffer_type& operator=(buffer_type&& o) noexcept {
  155. using std::swap;
  156. swap(alloc, o.alloc);
  157. swap(size, o.size);
  158. swap(type, o.type);
  159. swap(ptr, o.ptr);
  160. return *this;
  161. }
  162. buffer_type(const buffer_type& x) : alloc(x.alloc) {
  163. x.visit([this, n = x.size](const auto* xp) {
  164. using T = std::decay_t<decltype(*xp)>;
  165. this->template make<T>(n, xp);
  166. });
  167. }
  168. buffer_type& operator=(const buffer_type& o) {
  169. *this = buffer_type(o);
  170. return *this;
  171. }
  172. ~buffer_type() noexcept { destroy(); }
  173. void destroy() noexcept {
  174. BOOST_ASSERT((ptr == nullptr) == (size == 0));
  175. if (ptr == nullptr) return;
  176. visit([this](auto* p) {
  177. using T = std::decay_t<decltype(*p)>;
  178. using alloc_type =
  179. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  180. alloc_type a(alloc); // rebind allocator
  181. detail::buffer_destroy(a, p, this->size);
  182. });
  183. size = 0;
  184. type = 0;
  185. ptr = nullptr;
  186. }
  187. template <class T>
  188. void make(std::size_t n) {
  189. // note: order of commands is to not leave buffer in invalid state upon throw
  190. destroy();
  191. if (n > 0) {
  192. // rebind allocator
  193. using alloc_type =
  194. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  195. alloc_type a(alloc);
  196. ptr = detail::buffer_create(a, n); // may throw
  197. }
  198. size = n;
  199. type = type_index<T>();
  200. }
  201. template <class T, class U>
  202. void make(std::size_t n, U iter) {
  203. // note: iter may be current ptr, so create new buffer before deleting old buffer
  204. void* new_ptr = nullptr;
  205. const auto new_type = type_index<T>();
  206. if (n > 0) {
  207. // rebind allocator
  208. using alloc_type =
  209. typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
  210. alloc_type a(alloc);
  211. new_ptr = detail::buffer_create(a, n, iter); // may throw
  212. }
  213. destroy();
  214. size = n;
  215. type = new_type;
  216. ptr = new_ptr;
  217. }
  218. allocator_type alloc;
  219. std::size_t size = 0;
  220. unsigned type = 0;
  221. mutable void* ptr = nullptr;
  222. };
  223. class reference; // forward declare to make friend of const_reference
  224. /// implementation detail
  225. class const_reference
  226. : detail::partially_ordered<const_reference, const_reference, void> {
  227. public:
  228. const_reference(buffer_type& b, std::size_t i) noexcept : bref_(b), idx_(i) {
  229. BOOST_ASSERT(idx_ < bref_.size);
  230. }
  231. const_reference(const const_reference&) noexcept = default;
  232. // no assignment for const_references
  233. const_reference& operator=(const const_reference&) = delete;
  234. const_reference& operator=(const_reference&&) = delete;
  235. operator double() const noexcept {
  236. return bref_.visit(
  237. [this](const auto* p) { return static_cast<double>(p[this->idx_]); });
  238. }
  239. bool operator<(const const_reference& o) const noexcept {
  240. return apply_binary<detail::safe_less>(o);
  241. }
  242. bool operator==(const const_reference& o) const noexcept {
  243. return apply_binary<detail::safe_equal>(o);
  244. }
  245. template <class U>
  246. detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
  247. return apply_binary<detail::safe_less>(o);
  248. }
  249. template <class U>
  250. detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
  251. return apply_binary<detail::safe_greater>(o);
  252. }
  253. template <class U>
  254. detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
  255. return apply_binary<detail::safe_equal>(o);
  256. }
  257. private:
  258. template <class Binary>
  259. bool apply_binary(const const_reference& x) const noexcept {
  260. return x.bref_.visit([this, ix = x.idx_](const auto* xp) {
  261. return this->apply_binary<Binary>(xp[ix]);
  262. });
  263. }
  264. template <class Binary, class U>
  265. bool apply_binary(const U& x) const noexcept {
  266. return bref_.visit([i = idx_, &x](const auto* p) { return Binary()(p[i], x); });
  267. }
  268. protected:
  269. buffer_type& bref_;
  270. std::size_t idx_;
  271. friend class reference;
  272. };
  273. /// implementation detail
  274. class reference : public const_reference,
  275. public detail::partially_ordered<reference, reference, void> {
  276. public:
  277. reference(buffer_type& b, std::size_t i) noexcept : const_reference(b, i) {}
  278. // references do copy-construct
  279. reference(const reference& x) noexcept = default;
  280. // references do not rebind, assign through
  281. reference& operator=(const reference& x) {
  282. return operator=(static_cast<const_reference>(x));
  283. }
  284. // references do not rebind, assign through
  285. reference& operator=(const const_reference& x) {
  286. // safe for self-assignment, assigning matching type doesn't invalide buffer
  287. x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator=(xp[ix]); });
  288. return *this;
  289. }
  290. template <class U>
  291. detail::if_arithmetic_or_large_int<U, reference&> operator=(const U& x) {
  292. this->bref_.visit([this, &x](auto* p) {
  293. // gcc-8 optimizes the expression `p[this->idx_] = 0` away even at -O0,
  294. // so we merge it into the next line which is properly counted
  295. adder()((p[this->idx_] = 0, p), this->bref_, this->idx_, x);
  296. });
  297. return *this;
  298. }
  299. bool operator<(const reference& o) const noexcept {
  300. return const_reference::operator<(o);
  301. }
  302. bool operator==(const reference& o) const noexcept {
  303. return const_reference::operator==(o);
  304. }
  305. template <class U>
  306. detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
  307. return const_reference::operator<(o);
  308. }
  309. template <class U>
  310. detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
  311. return const_reference::operator>(o);
  312. }
  313. template <class U>
  314. detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
  315. return const_reference::operator==(o);
  316. }
  317. reference& operator+=(const const_reference& x) {
  318. x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator+=(xp[ix]); });
  319. return *this;
  320. }
  321. template <class U>
  322. detail::if_arithmetic_or_large_int<U, reference&> operator+=(const U& x) {
  323. this->bref_.visit(adder(), this->bref_, this->idx_, x);
  324. return *this;
  325. }
  326. reference& operator-=(const double x) { return operator+=(-x); }
  327. reference& operator*=(const double x) {
  328. this->bref_.visit(multiplier(), this->bref_, this->idx_, x);
  329. return *this;
  330. }
  331. reference& operator/=(const double x) { return operator*=(1.0 / x); }
  332. reference& operator++() {
  333. this->bref_.visit(incrementor(), this->bref_, this->idx_);
  334. return *this;
  335. }
  336. };
  337. private:
  338. template <class Value, class Reference>
  339. class iterator_impl : public detail::iterator_adaptor<iterator_impl<Value, Reference>,
  340. std::size_t, Reference, Value> {
  341. public:
  342. iterator_impl() = default;
  343. template <class V, class R>
  344. iterator_impl(const iterator_impl<V, R>& it)
  345. : iterator_impl::iterator_adaptor_(it.base()), buffer_(it.buffer_) {}
  346. iterator_impl(buffer_type* b, std::size_t i) noexcept
  347. : iterator_impl::iterator_adaptor_(i), buffer_(b) {}
  348. Reference operator*() const noexcept { return {*buffer_, this->base()}; }
  349. template <class V, class R>
  350. friend class iterator_impl;
  351. private:
  352. mutable buffer_type* buffer_ = nullptr;
  353. };
  354. public:
  355. using const_iterator = iterator_impl<const value_type, const_reference>;
  356. using iterator = iterator_impl<value_type, reference>;
  357. explicit unlimited_storage(const allocator_type& a = {}) : buffer_(a) {}
  358. unlimited_storage(const unlimited_storage&) = default;
  359. unlimited_storage& operator=(const unlimited_storage&) = default;
  360. unlimited_storage(unlimited_storage&&) = default;
  361. unlimited_storage& operator=(unlimited_storage&&) = default;
  362. // TODO
  363. // template <class Allocator>
  364. // unlimited_storage(const unlimited_storage<Allocator>& s)
  365. template <class Iterable, class = detail::requires_iterable<Iterable>>
  366. explicit unlimited_storage(const Iterable& s) {
  367. using std::begin;
  368. using std::end;
  369. auto s_begin = begin(s);
  370. auto s_end = end(s);
  371. using V = typename std::iterator_traits<decltype(begin(s))>::value_type;
  372. constexpr auto ti = buffer_type::template type_index<V>();
  373. constexpr auto nt = mp11::mp_size<typename buffer_type::types>::value;
  374. const std::size_t size = static_cast<std::size_t>(std::distance(s_begin, s_end));
  375. #ifdef BOOST_NO_CXX17_IF_CONSTEXPR
  376. if
  377. #else
  378. if constexpr
  379. #endif
  380. (ti < nt)
  381. buffer_.template make<V>(size, s_begin);
  382. else
  383. buffer_.template make<double>(size, s_begin);
  384. }
  385. template <class Iterable, class = detail::requires_iterable<Iterable>>
  386. unlimited_storage& operator=(const Iterable& s) {
  387. *this = unlimited_storage(s);
  388. return *this;
  389. }
  390. allocator_type get_allocator() const { return buffer_.alloc; }
  391. void reset(std::size_t n) { buffer_.template make<U8>(n); }
  392. std::size_t size() const noexcept { return buffer_.size; }
  393. reference operator[](std::size_t i) noexcept { return {buffer_, i}; }
  394. const_reference operator[](std::size_t i) const noexcept { return {buffer_, i}; }
  395. bool operator==(const unlimited_storage& x) const noexcept {
  396. if (size() != x.size()) return false;
  397. return buffer_.visit([&x](const auto* p) {
  398. return x.buffer_.visit([p, n = x.size()](const auto* xp) {
  399. return std::equal(p, p + n, xp, detail::safe_equal{});
  400. });
  401. });
  402. }
  403. template <class Iterable>
  404. bool operator==(const Iterable& iterable) const {
  405. if (size() != iterable.size()) return false;
  406. return buffer_.visit([&iterable](const auto* p) {
  407. return std::equal(p, p + iterable.size(), std::begin(iterable),
  408. detail::safe_equal{});
  409. });
  410. }
  411. unlimited_storage& operator*=(const double x) {
  412. buffer_.visit(multiplier(), buffer_, x);
  413. return *this;
  414. }
  415. iterator begin() noexcept { return {&buffer_, 0}; }
  416. iterator end() noexcept { return {&buffer_, size()}; }
  417. const_iterator begin() const noexcept { return {&buffer_, 0}; }
  418. const_iterator end() const noexcept { return {&buffer_, size()}; }
  419. /// implementation detail; used by unit tests, not part of generic storage interface
  420. template <class T>
  421. unlimited_storage(std::size_t s, const T* p, const allocator_type& a = {})
  422. : buffer_(std::move(a)) {
  423. buffer_.template make<T>(s, p);
  424. }
  425. template <class Archive>
  426. void serialize(Archive& ar, unsigned /* version */) {
  427. if (Archive::is_loading::value) {
  428. buffer_type tmp(buffer_.alloc);
  429. std::size_t size;
  430. ar& make_nvp("type", tmp.type);
  431. ar& make_nvp("size", size);
  432. tmp.visit([this, size](auto* tp) {
  433. BOOST_ASSERT(tp == nullptr);
  434. using T = std::decay_t<decltype(*tp)>;
  435. buffer_.template make<T>(size);
  436. });
  437. } else {
  438. ar& make_nvp("type", buffer_.type);
  439. ar& make_nvp("size", buffer_.size);
  440. }
  441. buffer_.visit([this, &ar](auto* tp) {
  442. auto w = detail::make_array_wrapper(tp, this->buffer_.size);
  443. ar& make_nvp("buffer", w);
  444. });
  445. }
  446. private:
  447. struct incrementor {
  448. template <class T>
  449. void operator()(T* tp, buffer_type& b, std::size_t i) {
  450. BOOST_ASSERT(tp && i < b.size);
  451. if (!detail::safe_increment(tp[i])) {
  452. using U = detail::next_type<typename buffer_type::types, T>;
  453. b.template make<U>(b.size, tp);
  454. ++static_cast<U*>(b.ptr)[i];
  455. }
  456. }
  457. void operator()(large_int* tp, buffer_type&, std::size_t i) { ++tp[i]; }
  458. void operator()(double* tp, buffer_type&, std::size_t i) { ++tp[i]; }
  459. };
  460. struct adder {
  461. template <class U>
  462. void operator()(double* tp, buffer_type&, std::size_t i, const U& x) {
  463. tp[i] += static_cast<double>(x);
  464. }
  465. void operator()(large_int* tp, buffer_type&, std::size_t i, const large_int& x) {
  466. tp[i] += x; // potentially adding large_int to itself is safe
  467. }
  468. template <class T, class U>
  469. void operator()(T* tp, buffer_type& b, std::size_t i, const U& x) {
  470. is_x_integral(std::is_integral<U>{}, tp, b, i, x);
  471. }
  472. template <class T, class U>
  473. void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
  474. const U& x) {
  475. // x could be reference to buffer we manipulate, make copy before changing buffer
  476. const auto v = static_cast<double>(x);
  477. b.template make<double>(b.size, tp);
  478. operator()(static_cast<double*>(b.ptr), b, i, v);
  479. }
  480. template <class T>
  481. void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
  482. const large_int& x) {
  483. // x could be reference to buffer we manipulate, make copy before changing buffer
  484. const auto v = static_cast<large_int>(x);
  485. b.template make<large_int>(b.size, tp);
  486. operator()(static_cast<large_int*>(b.ptr), b, i, v);
  487. }
  488. template <class T, class U>
  489. void is_x_integral(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
  490. is_x_unsigned(std::is_unsigned<U>{}, tp, b, i, x);
  491. }
  492. template <class T, class U>
  493. void is_x_unsigned(std::false_type, T* tp, buffer_type& b, std::size_t i,
  494. const U& x) {
  495. if (x >= 0)
  496. is_x_unsigned(std::true_type{}, tp, b, i, detail::make_unsigned(x));
  497. else
  498. is_x_integral(std::false_type{}, tp, b, i, static_cast<double>(x));
  499. }
  500. template <class T, class U>
  501. void is_x_unsigned(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
  502. if (detail::safe_radd(tp[i], x)) return;
  503. // x could be reference to buffer we manipulate, need to convert to value
  504. const auto y = x;
  505. using TN = detail::next_type<typename buffer_type::types, T>;
  506. b.template make<TN>(b.size, tp);
  507. is_x_unsigned(std::true_type{}, static_cast<TN*>(b.ptr), b, i, y);
  508. }
  509. template <class U>
  510. void is_x_unsigned(std::true_type, large_int* tp, buffer_type&, std::size_t i,
  511. const U& x) {
  512. tp[i] += x;
  513. }
  514. };
  515. struct multiplier {
  516. template <class T>
  517. void operator()(T* tp, buffer_type& b, const double x) {
  518. // potential lossy conversion that cannot be avoided
  519. b.template make<double>(b.size, tp);
  520. operator()(static_cast<double*>(b.ptr), b, x);
  521. }
  522. void operator()(double* tp, buffer_type& b, const double x) {
  523. for (auto end = tp + b.size; tp != end; ++tp) *tp *= x;
  524. }
  525. template <class T>
  526. void operator()(T* tp, buffer_type& b, std::size_t i, const double x) {
  527. b.template make<double>(b.size, tp);
  528. operator()(static_cast<double*>(b.ptr), b, i, x);
  529. }
  530. void operator()(double* tp, buffer_type&, std::size_t i, const double x) {
  531. tp[i] *= static_cast<double>(x);
  532. }
  533. };
  534. mutable buffer_type buffer_;
  535. friend struct unsafe_access;
  536. };
  537. } // namespace histogram
  538. } // namespace boost
  539. #endif