1 // Copyright 2015-2019 Hans Dembinski
2 // Copyright 2019 Glen Joseph Fernandes (glenjofe@gmail.com)
3 //
4 // Distributed under the Boost Software License, Version 1.0.
5 // (See accompanying file LICENSE_1_0.txt
6 // or copy at http://www.boost.org/LICENSE_1_0.txt)
7
8 #ifndef BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
9 #define BOOST_HISTOGRAM_UNLIMTED_STORAGE_HPP
10
11 #include <algorithm>
12 #include <boost/core/alloc_construct.hpp>
13 #include <boost/core/exchange.hpp>
14 #include <boost/core/nvp.hpp>
15 #include <boost/histogram/detail/array_wrapper.hpp>
16 #include <boost/histogram/detail/iterator_adaptor.hpp>
17 #include <boost/histogram/detail/large_int.hpp>
18 #include <boost/histogram/detail/operators.hpp>
19 #include <boost/histogram/detail/safe_comparison.hpp>
20 #include <boost/histogram/fwd.hpp>
21 #include <boost/mp11/algorithm.hpp>
22 #include <boost/mp11/list.hpp>
23 #include <boost/mp11/utility.hpp>
24 #include <cassert>
25 #include <cmath>
26 #include <cstdint>
27 #include <functional>
28 #include <iterator>
29 #include <memory>
30 #include <type_traits>
31
32 namespace boost {
33 namespace histogram {
34 namespace detail {
35
36 template <class T>
37 struct is_large_int : std::false_type {};
38
39 template <class A>
40 struct is_large_int<large_int<A>> : std::true_type {};
41
42 template <class T, class ReturnType>
43 using if_arithmetic_or_large_int =
44 std::enable_if_t<(std::is_arithmetic<T>::value || is_large_int<T>::value),
45 ReturnType>;
46
47 template <class L, class T>
48 using next_type = mp11::mp_at_c<L, (mp11::mp_find<L, T>::value + 1)>;
49
50 template <class Allocator>
51 class construct_guard {
52 public:
53 using pointer = typename std::allocator_traits<Allocator>::pointer;
54
construct_guard(Allocator & a,pointer p,std::size_t n)55 construct_guard(Allocator& a, pointer p, std::size_t n) noexcept
56 : a_(a), p_(p), n_(n) {}
57
~construct_guard()58 ~construct_guard() {
59 if (p_) { a_.deallocate(p_, n_); }
60 }
61
release()62 void release() { p_ = pointer(); }
63
64 construct_guard(const construct_guard&) = delete;
65 construct_guard& operator=(const construct_guard&) = delete;
66
67 private:
68 Allocator& a_;
69 pointer p_;
70 std::size_t n_;
71 };
72
73 template <class Allocator>
buffer_create(Allocator & a,std::size_t n)74 void* buffer_create(Allocator& a, std::size_t n) {
75 auto ptr = a.allocate(n); // may throw
76 static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
77 "ptr must be trivially copyable");
78 construct_guard<Allocator> guard(a, ptr, n);
79 boost::alloc_construct_n(a, ptr, n);
80 guard.release();
81 return static_cast<void*>(ptr);
82 }
83
84 template <class Allocator, class Iterator>
buffer_create(Allocator & a,std::size_t n,Iterator iter)85 auto buffer_create(Allocator& a, std::size_t n, Iterator iter) {
86 assert(n > 0u);
87 auto ptr = a.allocate(n); // may throw
88 static_assert(std::is_trivially_copyable<decltype(ptr)>::value,
89 "ptr must be trivially copyable");
90 construct_guard<Allocator> guard(a, ptr, n);
91 using T = typename std::allocator_traits<Allocator>::value_type;
92 struct casting_iterator {
93 void operator++() noexcept { ++iter_; }
94 T operator*() noexcept {
95 return static_cast<T>(*iter_);
96 } // silence conversion warnings
97 Iterator iter_;
98 };
99 boost::alloc_construct_n(a, ptr, n, casting_iterator{iter});
100 guard.release();
101 return ptr;
102 }
103
104 template <class Allocator>
buffer_destroy(Allocator & a,typename std::allocator_traits<Allocator>::pointer p,std::size_t n)105 void buffer_destroy(Allocator& a, typename std::allocator_traits<Allocator>::pointer p,
106 std::size_t n) {
107 assert(p);
108 assert(n > 0u);
109 boost::alloc_destroy_n(a, p, n);
110 a.deallocate(p, n);
111 }
112
113 } // namespace detail
114
115 /**
116 Memory-efficient storage for integral counters which cannot overflow.
117
118 This storage provides a no-overflow-guarantee if the counters are incremented with
119 integer weights. It maintains a contiguous array of elemental counters, one for each
120 cell. If an operation is requested which would overflow a counter, the array is
121 replaced with another of a wider integral type, then the operation is executed. The
122 storage uses integers of 8, 16, 32, 64 bits, and then switches to a multiprecision
123 integral type, similar to those in
124 [Boost.Multiprecision](https://www.boost.org/doc/libs/develop/libs/multiprecision/doc/html/index.html).
125
126 A scaling operation or adding a floating point number triggers a conversion of the
127 elemental counters into doubles, which voids the no-overflow-guarantee.
128 */
129 template <class Allocator>
130 class unlimited_storage {
131 static_assert(
132 std::is_same<typename std::allocator_traits<Allocator>::pointer,
133 typename std::allocator_traits<Allocator>::value_type*>::value,
134 "unlimited_storage requires allocator with trivial pointer type");
135 using U8 = std::uint8_t;
136 using U16 = std::uint16_t;
137 using U32 = std::uint32_t;
138 using U64 = std::uint64_t;
139
140 public:
141 static constexpr bool has_threading_support = false;
142
143 using allocator_type = Allocator;
144 using value_type = double;
145 using large_int = detail::large_int<
146 typename std::allocator_traits<allocator_type>::template rebind_alloc<U64>>;
147
148 struct buffer_type {
149 // cannot be moved outside of scope of unlimited_storage, large_int is dependent type
150 using types = mp11::mp_list<U8, U16, U32, U64, large_int, double>;
151
152 template <class T>
type_indexboost::histogram::unlimited_storage::buffer_type153 static constexpr unsigned type_index() noexcept {
154 return static_cast<unsigned>(mp11::mp_find<types, T>::value);
155 }
156
157 template <class F, class... Ts>
visitboost::histogram::unlimited_storage::buffer_type158 decltype(auto) visit(F&& f, Ts&&... ts) const {
159 // this is intentionally not a switch, the if-chain is faster in benchmarks
160 if (type == type_index<U8>())
161 return f(static_cast<U8*>(ptr), std::forward<Ts>(ts)...);
162 if (type == type_index<U16>())
163 return f(static_cast<U16*>(ptr), std::forward<Ts>(ts)...);
164 if (type == type_index<U32>())
165 return f(static_cast<U32*>(ptr), std::forward<Ts>(ts)...);
166 if (type == type_index<U64>())
167 return f(static_cast<U64*>(ptr), std::forward<Ts>(ts)...);
168 if (type == type_index<large_int>())
169 return f(static_cast<large_int*>(ptr), std::forward<Ts>(ts)...);
170 return f(static_cast<double*>(ptr), std::forward<Ts>(ts)...);
171 }
172
buffer_typeboost::histogram::unlimited_storage::buffer_type173 buffer_type(const allocator_type& a = {}) : alloc(a) {}
174
buffer_typeboost::histogram::unlimited_storage::buffer_type175 buffer_type(buffer_type&& o) noexcept
176 : alloc(std::move(o.alloc))
177 , size(boost::exchange(o.size, 0))
178 , type(boost::exchange(o.type, 0))
179 , ptr(boost::exchange(o.ptr, nullptr)) {}
180
operator =boost::histogram::unlimited_storage::buffer_type181 buffer_type& operator=(buffer_type&& o) noexcept {
182 using std::swap;
183 swap(alloc, o.alloc);
184 swap(size, o.size);
185 swap(type, o.type);
186 swap(ptr, o.ptr);
187 return *this;
188 }
189
buffer_typeboost::histogram::unlimited_storage::buffer_type190 buffer_type(const buffer_type& x) : alloc(x.alloc) {
191 x.visit([this, n = x.size](const auto* xp) {
192 using T = std::decay_t<decltype(*xp)>;
193 this->template make<T>(n, xp);
194 });
195 }
196
operator =boost::histogram::unlimited_storage::buffer_type197 buffer_type& operator=(const buffer_type& o) {
198 *this = buffer_type(o);
199 return *this;
200 }
201
~buffer_typeboost::histogram::unlimited_storage::buffer_type202 ~buffer_type() noexcept { destroy(); }
203
destroyboost::histogram::unlimited_storage::buffer_type204 void destroy() noexcept {
205 assert((ptr == nullptr) == (size == 0));
206 if (ptr == nullptr) return;
207 visit([this](auto* p) {
208 using T = std::decay_t<decltype(*p)>;
209 using alloc_type =
210 typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
211 alloc_type a(alloc); // rebind allocator
212 detail::buffer_destroy(a, p, this->size);
213 });
214 size = 0;
215 type = 0;
216 ptr = nullptr;
217 }
218
219 template <class T>
makeboost::histogram::unlimited_storage::buffer_type220 void make(std::size_t n) {
221 // note: order of commands is to not leave buffer in invalid state upon throw
222 destroy();
223 if (n > 0) {
224 // rebind allocator
225 using alloc_type =
226 typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
227 alloc_type a(alloc);
228 ptr = detail::buffer_create(a, n); // may throw
229 }
230 size = n;
231 type = type_index<T>();
232 }
233
234 template <class T, class U>
makeboost::histogram::unlimited_storage::buffer_type235 void make(std::size_t n, U iter) {
236 // note: iter may be current ptr, so create new buffer before deleting old buffer
237 void* new_ptr = nullptr;
238 const auto new_type = type_index<T>();
239 if (n > 0) {
240 // rebind allocator
241 using alloc_type =
242 typename std::allocator_traits<allocator_type>::template rebind_alloc<T>;
243 alloc_type a(alloc);
244 new_ptr = detail::buffer_create(a, n, iter); // may throw
245 }
246 destroy();
247 size = n;
248 type = new_type;
249 ptr = new_ptr;
250 }
251
252 allocator_type alloc;
253 std::size_t size = 0;
254 unsigned type = 0;
255 mutable void* ptr = nullptr;
256 };
257
258 class reference; // forward declare to make friend of const_reference
259
260 /// implementation detail
261 class const_reference
262 : detail::partially_ordered<const_reference, const_reference, void> {
263 public:
const_reference(buffer_type & b,std::size_t i)264 const_reference(buffer_type& b, std::size_t i) noexcept : bref_(b), idx_(i) {
265 assert(idx_ < bref_.size);
266 }
267
268 const_reference(const const_reference&) noexcept = default;
269
270 // no assignment for const_references
271 const_reference& operator=(const const_reference&) = delete;
272 const_reference& operator=(const_reference&&) = delete;
273
operator double() const274 operator double() const noexcept {
275 return bref_.visit(
276 [this](const auto* p) { return static_cast<double>(p[this->idx_]); });
277 }
278
operator <(const const_reference & o) const279 bool operator<(const const_reference& o) const noexcept {
280 return apply_binary<detail::safe_less>(o);
281 }
282
operator ==(const const_reference & o) const283 bool operator==(const const_reference& o) const noexcept {
284 return apply_binary<detail::safe_equal>(o);
285 }
286
287 template <class U>
operator <(const U & o) const288 detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
289 return apply_binary<detail::safe_less>(o);
290 }
291
292 template <class U>
operator >(const U & o) const293 detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
294 return apply_binary<detail::safe_greater>(o);
295 }
296
297 template <class U>
operator ==(const U & o) const298 detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
299 return apply_binary<detail::safe_equal>(o);
300 }
301
302 private:
303 template <class Binary>
apply_binary(const const_reference & x) const304 bool apply_binary(const const_reference& x) const noexcept {
305 return x.bref_.visit([this, ix = x.idx_](const auto* xp) {
306 return this->apply_binary<Binary>(xp[ix]);
307 });
308 }
309
310 template <class Binary, class U>
apply_binary(const U & x) const311 bool apply_binary(const U& x) const noexcept {
312 return bref_.visit([i = idx_, &x](const auto* p) { return Binary()(p[i], x); });
313 }
314
315 protected:
316 buffer_type& bref_;
317 std::size_t idx_;
318 friend class reference;
319 };
320
321 /// implementation detail
322 class reference : public const_reference,
323 public detail::partially_ordered<reference, reference, void> {
324 public:
reference(buffer_type & b,std::size_t i)325 reference(buffer_type& b, std::size_t i) noexcept : const_reference(b, i) {}
326
327 // references do copy-construct
328 reference(const reference& x) noexcept = default;
329
330 // references do not rebind, assign through
operator =(const reference & x)331 reference& operator=(const reference& x) {
332 return operator=(static_cast<const_reference>(x));
333 }
334
335 // references do not rebind, assign through
operator =(const const_reference & x)336 reference& operator=(const const_reference& x) {
337 // safe for self-assignment, assigning matching type doesn't invalide buffer
338 x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator=(xp[ix]); });
339 return *this;
340 }
341
342 template <class U>
operator =(const U & x)343 detail::if_arithmetic_or_large_int<U, reference&> operator=(const U& x) {
344 this->bref_.visit([this, &x](auto* p) {
345 // gcc-8 optimizes the expression `p[this->idx_] = 0` away even at -O0,
346 // so we merge it into the next line which is properly counted
347 adder()((p[this->idx_] = 0, p), this->bref_, this->idx_, x);
348 });
349 return *this;
350 }
351
operator <(const reference & o) const352 bool operator<(const reference& o) const noexcept {
353 return const_reference::operator<(o);
354 }
355
operator ==(const reference & o) const356 bool operator==(const reference& o) const noexcept {
357 return const_reference::operator==(o);
358 }
359
360 template <class U>
operator <(const U & o) const361 detail::if_arithmetic_or_large_int<U, bool> operator<(const U& o) const noexcept {
362 return const_reference::operator<(o);
363 }
364
365 template <class U>
operator >(const U & o) const366 detail::if_arithmetic_or_large_int<U, bool> operator>(const U& o) const noexcept {
367 return const_reference::operator>(o);
368 }
369
370 template <class U>
operator ==(const U & o) const371 detail::if_arithmetic_or_large_int<U, bool> operator==(const U& o) const noexcept {
372 return const_reference::operator==(o);
373 }
374
operator +=(const const_reference & x)375 reference& operator+=(const const_reference& x) {
376 x.bref_.visit([this, ix = x.idx_](const auto* xp) { this->operator+=(xp[ix]); });
377 return *this;
378 }
379
380 template <class U>
operator +=(const U & x)381 detail::if_arithmetic_or_large_int<U, reference&> operator+=(const U& x) {
382 this->bref_.visit(adder(), this->bref_, this->idx_, x);
383 return *this;
384 }
385
operator -=(const double x)386 reference& operator-=(const double x) { return operator+=(-x); }
387
operator *=(const double x)388 reference& operator*=(const double x) {
389 this->bref_.visit(multiplier(), this->bref_, this->idx_, x);
390 return *this;
391 }
392
operator /=(const double x)393 reference& operator/=(const double x) { return operator*=(1.0 / x); }
394
operator ++()395 reference& operator++() {
396 this->bref_.visit(incrementor(), this->bref_, this->idx_);
397 return *this;
398 }
399 };
400
401 private:
402 template <class Value, class Reference>
403 class iterator_impl : public detail::iterator_adaptor<iterator_impl<Value, Reference>,
404 std::size_t, Reference, Value> {
405 public:
406 iterator_impl() = default;
407 template <class V, class R>
iterator_impl(const iterator_impl<V,R> & it)408 iterator_impl(const iterator_impl<V, R>& it)
409 : iterator_impl::iterator_adaptor_(it.base()), buffer_(it.buffer_) {}
iterator_impl(buffer_type * b,std::size_t i)410 iterator_impl(buffer_type* b, std::size_t i) noexcept
411 : iterator_impl::iterator_adaptor_(i), buffer_(b) {}
412
operator *() const413 Reference operator*() const noexcept { return {*buffer_, this->base()}; }
414
415 template <class V, class R>
416 friend class iterator_impl;
417
418 private:
419 mutable buffer_type* buffer_ = nullptr;
420 };
421
422 public:
423 using const_iterator = iterator_impl<const value_type, const_reference>;
424 using iterator = iterator_impl<value_type, reference>;
425
unlimited_storage(const allocator_type & a={})426 explicit unlimited_storage(const allocator_type& a = {}) : buffer_(a) {}
427 unlimited_storage(const unlimited_storage&) = default;
428 unlimited_storage& operator=(const unlimited_storage&) = default;
429 unlimited_storage(unlimited_storage&&) = default;
430 unlimited_storage& operator=(unlimited_storage&&) = default;
431
432 // TODO
433 // template <class Allocator>
434 // unlimited_storage(const unlimited_storage<Allocator>& s)
435
436 template <class Iterable, class = detail::requires_iterable<Iterable>>
unlimited_storage(const Iterable & s)437 explicit unlimited_storage(const Iterable& s) {
438 using std::begin;
439 using std::end;
440 auto s_begin = begin(s);
441 auto s_end = end(s);
442 using V = typename std::iterator_traits<decltype(begin(s))>::value_type;
443 // must be non-const to avoid msvc warning about if constexpr
444 auto ti = buffer_type::template type_index<V>();
445 auto nt = mp11::mp_size<typename buffer_type::types>::value;
446 const std::size_t size = static_cast<std::size_t>(std::distance(s_begin, s_end));
447 if (ti < nt)
448 buffer_.template make<V>(size, s_begin);
449 else
450 buffer_.template make<double>(size, s_begin);
451 }
452
453 template <class Iterable, class = detail::requires_iterable<Iterable>>
operator =(const Iterable & s)454 unlimited_storage& operator=(const Iterable& s) {
455 *this = unlimited_storage(s);
456 return *this;
457 }
458
get_allocator() const459 allocator_type get_allocator() const { return buffer_.alloc; }
460
reset(std::size_t n)461 void reset(std::size_t n) { buffer_.template make<U8>(n); }
462
size() const463 std::size_t size() const noexcept { return buffer_.size; }
464
operator [](std::size_t i)465 reference operator[](std::size_t i) noexcept { return {buffer_, i}; }
operator [](std::size_t i) const466 const_reference operator[](std::size_t i) const noexcept { return {buffer_, i}; }
467
operator ==(const unlimited_storage & x) const468 bool operator==(const unlimited_storage& x) const noexcept {
469 if (size() != x.size()) return false;
470 return buffer_.visit([&x](const auto* p) {
471 return x.buffer_.visit([p, n = x.size()](const auto* xp) {
472 return std::equal(p, p + n, xp, detail::safe_equal{});
473 });
474 });
475 }
476
477 template <class Iterable>
operator ==(const Iterable & iterable) const478 bool operator==(const Iterable& iterable) const {
479 if (size() != iterable.size()) return false;
480 return buffer_.visit([&iterable](const auto* p) {
481 return std::equal(p, p + iterable.size(), std::begin(iterable),
482 detail::safe_equal{});
483 });
484 }
485
operator *=(const double x)486 unlimited_storage& operator*=(const double x) {
487 buffer_.visit(multiplier(), buffer_, x);
488 return *this;
489 }
490
begin()491 iterator begin() noexcept { return {&buffer_, 0}; }
end()492 iterator end() noexcept { return {&buffer_, size()}; }
begin() const493 const_iterator begin() const noexcept { return {&buffer_, 0}; }
end() const494 const_iterator end() const noexcept { return {&buffer_, size()}; }
495
496 /// implementation detail; used by unit tests, not part of generic storage interface
497 template <class T>
unlimited_storage(std::size_t s,const T * p,const allocator_type & a={})498 unlimited_storage(std::size_t s, const T* p, const allocator_type& a = {})
499 : buffer_(std::move(a)) {
500 buffer_.template make<T>(s, p);
501 }
502
503 template <class Archive>
serialize(Archive & ar,unsigned)504 void serialize(Archive& ar, unsigned /* version */) {
505 if (Archive::is_loading::value) {
506 buffer_type tmp(buffer_.alloc);
507 std::size_t size;
508 ar& make_nvp("type", tmp.type);
509 ar& make_nvp("size", size);
510 tmp.visit([this, size](auto* tp) {
511 assert(tp == nullptr);
512 using T = std::decay_t<decltype(*tp)>;
513 buffer_.template make<T>(size);
514 });
515 } else {
516 ar& make_nvp("type", buffer_.type);
517 ar& make_nvp("size", buffer_.size);
518 }
519 buffer_.visit([this, &ar](auto* tp) {
520 auto w = detail::make_array_wrapper(tp, this->buffer_.size);
521 ar& make_nvp("buffer", w);
522 });
523 }
524
525 private:
526 struct incrementor {
527 template <class T>
operator ()boost::histogram::unlimited_storage::incrementor528 void operator()(T* tp, buffer_type& b, std::size_t i) {
529 assert(tp && i < b.size);
530 if (!detail::safe_increment(tp[i])) {
531 using U = detail::next_type<typename buffer_type::types, T>;
532 b.template make<U>(b.size, tp);
533 ++static_cast<U*>(b.ptr)[i];
534 }
535 }
536
operator ()boost::histogram::unlimited_storage::incrementor537 void operator()(large_int* tp, buffer_type&, std::size_t i) { ++tp[i]; }
538
operator ()boost::histogram::unlimited_storage::incrementor539 void operator()(double* tp, buffer_type&, std::size_t i) { ++tp[i]; }
540 };
541
542 struct adder {
543 template <class U>
operator ()boost::histogram::unlimited_storage::adder544 void operator()(double* tp, buffer_type&, std::size_t i, const U& x) {
545 tp[i] += static_cast<double>(x);
546 }
547
operator ()boost::histogram::unlimited_storage::adder548 void operator()(large_int* tp, buffer_type&, std::size_t i, const large_int& x) {
549 tp[i] += x; // potentially adding large_int to itself is safe
550 }
551
552 template <class T, class U>
operator ()boost::histogram::unlimited_storage::adder553 void operator()(T* tp, buffer_type& b, std::size_t i, const U& x) {
554 is_x_integral(std::is_integral<U>{}, tp, b, i, x);
555 }
556
557 template <class T, class U>
is_x_integralboost::histogram::unlimited_storage::adder558 void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
559 const U& x) {
560 // x could be reference to buffer we manipulate, make copy before changing buffer
561 const auto v = static_cast<double>(x);
562 b.template make<double>(b.size, tp);
563 operator()(static_cast<double*>(b.ptr), b, i, v);
564 }
565
566 template <class T>
is_x_integralboost::histogram::unlimited_storage::adder567 void is_x_integral(std::false_type, T* tp, buffer_type& b, std::size_t i,
568 const large_int& x) {
569 // x could be reference to buffer we manipulate, make copy before changing buffer
570 const auto v = static_cast<large_int>(x);
571 b.template make<large_int>(b.size, tp);
572 operator()(static_cast<large_int*>(b.ptr), b, i, v);
573 }
574
575 template <class T, class U>
is_x_integralboost::histogram::unlimited_storage::adder576 void is_x_integral(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
577 is_x_unsigned(std::is_unsigned<U>{}, tp, b, i, x);
578 }
579
580 template <class T, class U>
is_x_unsignedboost::histogram::unlimited_storage::adder581 void is_x_unsigned(std::false_type, T* tp, buffer_type& b, std::size_t i,
582 const U& x) {
583 if (x >= 0)
584 is_x_unsigned(std::true_type{}, tp, b, i, detail::make_unsigned(x));
585 else
586 is_x_integral(std::false_type{}, tp, b, i, static_cast<double>(x));
587 }
588
589 template <class T, class U>
is_x_unsignedboost::histogram::unlimited_storage::adder590 void is_x_unsigned(std::true_type, T* tp, buffer_type& b, std::size_t i, const U& x) {
591 if (detail::safe_radd(tp[i], x)) return;
592 // x could be reference to buffer we manipulate, need to convert to value
593 const auto y = x;
594 using TN = detail::next_type<typename buffer_type::types, T>;
595 b.template make<TN>(b.size, tp);
596 is_x_unsigned(std::true_type{}, static_cast<TN*>(b.ptr), b, i, y);
597 }
598
599 template <class U>
is_x_unsignedboost::histogram::unlimited_storage::adder600 void is_x_unsigned(std::true_type, large_int* tp, buffer_type&, std::size_t i,
601 const U& x) {
602 tp[i] += x;
603 }
604 };
605
606 struct multiplier {
607 template <class T>
operator ()boost::histogram::unlimited_storage::multiplier608 void operator()(T* tp, buffer_type& b, const double x) {
609 // potential lossy conversion that cannot be avoided
610 b.template make<double>(b.size, tp);
611 operator()(static_cast<double*>(b.ptr), b, x);
612 }
613
operator ()boost::histogram::unlimited_storage::multiplier614 void operator()(double* tp, buffer_type& b, const double x) {
615 for (auto end = tp + b.size; tp != end; ++tp) *tp *= x;
616 }
617
618 template <class T>
operator ()boost::histogram::unlimited_storage::multiplier619 void operator()(T* tp, buffer_type& b, std::size_t i, const double x) {
620 b.template make<double>(b.size, tp);
621 operator()(static_cast<double*>(b.ptr), b, i, x);
622 }
623
operator ()boost::histogram::unlimited_storage::multiplier624 void operator()(double* tp, buffer_type&, std::size_t i, const double x) {
625 tp[i] *= static_cast<double>(x);
626 }
627 };
628
629 mutable buffer_type buffer_;
630 friend struct unsafe_access;
631 };
632
633 } // namespace histogram
634 } // namespace boost
635
636 #endif
637