auto_buffer.hpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/iterator/reverse_iterator.hpp>
  17. #include <boost/iterator/iterator_traits.hpp>
  18. #include <boost/mpl/if.hpp>
  19. #include <boost/signals2/detail/scope_guard.hpp>
  20. #include <boost/swap.hpp>
  21. #include <boost/type_traits/aligned_storage.hpp>
  22. #include <boost/type_traits/alignment_of.hpp>
  23. #include <boost/type_traits/has_nothrow_copy.hpp>
  24. #include <boost/type_traits/has_nothrow_assign.hpp>
  25. #include <boost/type_traits/has_trivial_assign.hpp>
  26. #include <boost/type_traits/has_trivial_constructor.hpp>
  27. #include <boost/type_traits/has_trivial_destructor.hpp>
  28. #include <algorithm>
  29. #include <cstring>
  30. #include <iterator>
  31. #include <memory>
  32. #include <stdexcept>
  33. namespace boost
  34. {
  35. namespace signals2
  36. {
  37. namespace detail
  38. {
  39. //
  40. // Policies for creating the stack buffer.
  41. //
  42. template< unsigned N >
  43. struct store_n_objects
  44. {
  45. BOOST_STATIC_CONSTANT( unsigned, value = N );
  46. };
  47. template< unsigned N >
  48. struct store_n_bytes
  49. {
  50. BOOST_STATIC_CONSTANT( unsigned, value = N );
  51. };
  52. namespace auto_buffer_detail
  53. {
  54. template< class Policy, class T >
  55. struct compute_buffer_size
  56. {
  57. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  58. };
  59. template< unsigned N, class T >
  60. struct compute_buffer_size< store_n_bytes<N>, T >
  61. {
  62. BOOST_STATIC_CONSTANT( unsigned, value = N );
  63. };
  64. template< class Policy, class T >
  65. struct compute_buffer_objects
  66. {
  67. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  68. };
  69. template< unsigned N, class T >
  70. struct compute_buffer_objects< store_n_bytes<N>, T >
  71. {
  72. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  73. };
  74. }
  75. struct default_grow_policy
  76. {
  77. template< class SizeType >
  78. static SizeType new_capacity( SizeType capacity )
  79. {
  80. //
  81. // @remark: we grow the capacity quite agressively.
  82. // this is justified since we aim to minimize
  83. // heap-allocations, and because we mostly use
  84. // the buffer locally.
  85. return capacity * 4u;
  86. }
  87. template< class SizeType >
  88. static bool should_shrink( SizeType, SizeType )
  89. {
  90. //
  91. // @remark: when defining a new grow policy, one might
  92. // choose that if the waated space is less
  93. // than a certain percentage, then it is of
  94. // little use to shrink.
  95. //
  96. return true;
  97. }
  98. };
  99. template< class T,
  100. class StackBufferPolicy = store_n_objects<256>,
  101. class GrowPolicy = default_grow_policy,
  102. class Allocator = std::allocator<T> >
  103. class auto_buffer;
  104. template
  105. <
  106. class T,
  107. class StackBufferPolicy,
  108. class GrowPolicy,
  109. class Allocator
  110. >
  111. class auto_buffer : Allocator
  112. {
  113. private:
  114. enum { N = auto_buffer_detail::
  115. compute_buffer_objects<StackBufferPolicy,T>::value };
  116. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  117. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  118. local_buffer;
  119. public:
  120. typedef Allocator allocator_type;
  121. typedef T value_type;
  122. typedef typename Allocator::size_type size_type;
  123. typedef typename Allocator::difference_type difference_type;
  124. typedef T* pointer;
  125. typedef typename Allocator::pointer allocator_pointer;
  126. typedef const T* const_pointer;
  127. typedef T& reference;
  128. typedef const T& const_reference;
  129. typedef pointer iterator;
  130. typedef const_pointer const_iterator;
  131. typedef boost::reverse_iterator<iterator> reverse_iterator;
  132. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  133. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  134. && sizeof(T) <= sizeof(long double),
  135. const value_type,
  136. const_reference >::type
  137. optimized_const_reference;
  138. private:
  139. pointer allocate( size_type capacity_arg )
  140. {
  141. if( capacity_arg > N )
  142. return &*get_allocator().allocate( capacity_arg );
  143. else
  144. return static_cast<T*>( members_.address() );
  145. }
  146. void deallocate( pointer where, size_type capacity_arg )
  147. {
  148. if( capacity_arg <= N )
  149. return;
  150. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  151. }
  152. template< class I >
  153. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  154. {
  155. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  156. }
  157. static void copy_rai( const T* begin, const T* end,
  158. pointer where, const boost::true_type& )
  159. {
  160. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  161. }
  162. template< class I, bool b >
  163. static void copy_rai( I begin, I end,
  164. pointer where, const boost::integral_constant<bool, b>& )
  165. {
  166. std::uninitialized_copy( begin, end, where );
  167. }
  168. template< class I >
  169. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  170. {
  171. std::uninitialized_copy( begin, end, where );
  172. }
  173. template< class I >
  174. static void copy_impl( I begin, I end, pointer where )
  175. {
  176. copy_impl( begin, end, where,
  177. typename std::iterator_traits<I>::iterator_category() );
  178. }
  179. template< class I, class I2 >
  180. static void assign_impl( I begin, I end, I2 where )
  181. {
  182. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  183. }
  184. template< class I, class I2 >
  185. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  186. {
  187. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  188. }
  189. template< class I, class I2 >
  190. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  191. {
  192. for( ; begin != end; ++begin, ++where )
  193. *where = *begin;
  194. }
  195. void unchecked_push_back_n( size_type n, const boost::true_type& )
  196. {
  197. std::uninitialized_fill( end(), end() + n, T() );
  198. size_ += n;
  199. }
  200. void unchecked_push_back_n( size_type n, const boost::false_type& )
  201. {
  202. for( size_type i = 0u; i < n; ++i )
  203. unchecked_push_back();
  204. }
  205. void auto_buffer_destroy( pointer where, const boost::false_type& )
  206. {
  207. (*where).~T();
  208. }
  209. void auto_buffer_destroy( pointer, const boost::true_type& )
  210. { }
  211. void auto_buffer_destroy( pointer where )
  212. {
  213. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  214. }
  215. void auto_buffer_destroy()
  216. {
  217. BOOST_ASSERT( is_valid() );
  218. if( buffer_ ) // do we need this check? Yes, but only
  219. // for N = 0u + local instances in one_sided_swap()
  220. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  221. }
  222. void destroy_back_n( size_type n, const boost::false_type& )
  223. {
  224. BOOST_ASSERT( n > 0 );
  225. pointer buffer = buffer_ + size_ - 1u;
  226. pointer new_end = buffer - n;
  227. for( ; buffer > new_end; --buffer )
  228. auto_buffer_destroy( buffer );
  229. }
  230. void destroy_back_n( size_type, const boost::true_type& )
  231. { }
  232. void destroy_back_n( size_type n )
  233. {
  234. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  235. }
  236. void auto_buffer_destroy( const boost::false_type& x )
  237. {
  238. if( size_ )
  239. destroy_back_n( size_, x );
  240. deallocate( buffer_, members_.capacity_ );
  241. }
  242. void auto_buffer_destroy( const boost::true_type& )
  243. {
  244. deallocate( buffer_, members_.capacity_ );
  245. }
  246. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  247. {
  248. pointer new_buffer = allocate( new_capacity ); // strong
  249. scope_guard guard = make_obj_guard( *this,
  250. &auto_buffer::deallocate,
  251. new_buffer,
  252. new_capacity );
  253. copy_impl( begin(), end(), new_buffer ); // strong
  254. guard.dismiss(); // nothrow
  255. return new_buffer;
  256. }
  257. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  258. {
  259. pointer new_buffer = allocate( new_capacity ); // strong
  260. copy_impl( begin(), end(), new_buffer ); // nothrow
  261. return new_buffer;
  262. }
  263. void reserve_impl( size_type new_capacity )
  264. {
  265. pointer new_buffer = move_to_new_buffer( new_capacity,
  266. boost::has_nothrow_copy<T>() );
  267. auto_buffer_destroy();
  268. buffer_ = new_buffer;
  269. members_.capacity_ = new_capacity;
  270. BOOST_ASSERT( size_ <= members_.capacity_ );
  271. }
  272. size_type new_capacity_impl( size_type n )
  273. {
  274. BOOST_ASSERT( n > members_.capacity_ );
  275. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  276. // @todo: consider to check for allocator.max_size()
  277. return (std::max)(new_capacity,n);
  278. }
  279. static void swap_helper( auto_buffer& l, auto_buffer& r,
  280. const boost::true_type& )
  281. {
  282. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  283. auto_buffer temp( l.begin(), l.end() );
  284. assign_impl( r.begin(), r.end(), l.begin() );
  285. assign_impl( temp.begin(), temp.end(), r.begin() );
  286. boost::swap( l.size_, r.size_ );
  287. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  288. }
  289. static void swap_helper( auto_buffer& l, auto_buffer& r,
  290. const boost::false_type& )
  291. {
  292. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  293. size_type min_size = (std::min)(l.size_,r.size_);
  294. size_type max_size = (std::max)(l.size_,r.size_);
  295. size_type diff = max_size - min_size;
  296. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  297. auto_buffer* largest = smallest == &l ? &r : &l;
  298. // @remark: the implementation below is not as fast
  299. // as it could be if we assumed T had a default
  300. // constructor.
  301. size_type i = 0u;
  302. for( ; i < min_size; ++i )
  303. boost::swap( (*smallest)[i], (*largest)[i] );
  304. for( ; i < max_size; ++i )
  305. smallest->unchecked_push_back( (*largest)[i] );
  306. largest->pop_back_n( diff );
  307. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  308. }
  309. void one_sided_swap( auto_buffer& temp ) // nothrow
  310. {
  311. BOOST_ASSERT( !temp.is_on_stack() );
  312. auto_buffer_destroy();
  313. // @remark: must be nothrow
  314. get_allocator() = temp.get_allocator();
  315. members_.capacity_ = temp.members_.capacity_;
  316. buffer_ = temp.buffer_;
  317. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  318. size_ = temp.size_;
  319. temp.buffer_ = 0;
  320. BOOST_ASSERT( temp.is_valid() );
  321. }
  322. template< class I >
  323. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  324. std::input_iterator_tag )
  325. {
  326. for( ; begin_arg != end_arg; ++begin_arg )
  327. {
  328. before = insert( before, *begin_arg );
  329. ++before;
  330. }
  331. }
  332. void grow_back( size_type n, const boost::true_type& )
  333. {
  334. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  335. size_ += n;
  336. }
  337. void grow_back( size_type n, const boost::false_type& )
  338. {
  339. unchecked_push_back_n(n);
  340. }
  341. void grow_back( size_type n )
  342. {
  343. grow_back( n, boost::has_trivial_constructor<T>() );
  344. }
  345. void grow_back_one( const boost::true_type& )
  346. {
  347. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  348. size_ += 1;
  349. }
  350. void grow_back_one( const boost::false_type& )
  351. {
  352. unchecked_push_back();
  353. }
  354. void grow_back_one()
  355. {
  356. grow_back_one( boost::has_trivial_constructor<T>() );
  357. }
  358. template< class I >
  359. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  360. std::forward_iterator_tag )
  361. {
  362. difference_type n = std::distance(begin_arg, end_arg);
  363. if( size_ + n <= members_.capacity_ )
  364. {
  365. bool is_back_insertion = before == cend();
  366. if( !is_back_insertion )
  367. {
  368. grow_back( n );
  369. iterator where = const_cast<T*>(before);
  370. std::copy( before, cend() - n, where + n );
  371. assign_impl( begin_arg, end_arg, where );
  372. }
  373. else
  374. {
  375. unchecked_push_back( begin_arg, end_arg );
  376. }
  377. BOOST_ASSERT( is_valid() );
  378. return;
  379. }
  380. auto_buffer temp( new_capacity_impl( size_ + n ) );
  381. temp.unchecked_push_back( cbegin(), before );
  382. temp.unchecked_push_back( begin_arg, end_arg );
  383. temp.unchecked_push_back( before, cend() );
  384. one_sided_swap( temp );
  385. BOOST_ASSERT( is_valid() );
  386. }
  387. public:
  388. bool is_valid() const // invariant
  389. {
  390. // @remark: allowed for N==0 and when
  391. // using a locally instance
  392. // in insert()/one_sided_swap()
  393. if( buffer_ == 0 )
  394. return true;
  395. if( members_.capacity_ < N )
  396. return false;
  397. if( !is_on_stack() && members_.capacity_ <= N )
  398. return false;
  399. if( buffer_ == members_.address() )
  400. if( members_.capacity_ > N )
  401. return false;
  402. if( size_ > members_.capacity_ )
  403. return false;
  404. return true;
  405. }
  406. auto_buffer()
  407. : members_( N ),
  408. buffer_( static_cast<T*>(members_.address()) ),
  409. size_( 0u )
  410. {
  411. BOOST_ASSERT( is_valid() );
  412. }
  413. auto_buffer( const auto_buffer& r )
  414. : members_( (std::max)(r.size_,size_type(N)) ),
  415. buffer_( allocate( members_.capacity_ ) ),
  416. size_( 0 )
  417. {
  418. copy_impl( r.begin(), r.end(), buffer_ );
  419. size_ = r.size_;
  420. BOOST_ASSERT( is_valid() );
  421. }
  422. auto_buffer& operator=( const auto_buffer& r ) // basic
  423. {
  424. if( this == &r )
  425. return *this;
  426. difference_type diff = size_ - r.size_;
  427. if( diff >= 0 )
  428. {
  429. pop_back_n( static_cast<size_type>(diff) );
  430. assign_impl( r.begin(), r.end(), begin() );
  431. }
  432. else
  433. {
  434. if( members_.capacity_ >= r.size() )
  435. {
  436. unchecked_push_back_n( static_cast<size_type>(-diff) );
  437. assign_impl( r.begin(), r.end(), begin() );
  438. }
  439. else
  440. {
  441. // @remark: we release memory as early as possible
  442. // since we only give the basic guarantee
  443. auto_buffer_destroy();
  444. buffer_ = 0;
  445. pointer new_buffer = allocate( r.size() );
  446. scope_guard guard = make_obj_guard( *this,
  447. &auto_buffer::deallocate,
  448. new_buffer,
  449. r.size() );
  450. copy_impl( r.begin(), r.end(), new_buffer );
  451. guard.dismiss();
  452. buffer_ = new_buffer;
  453. members_.capacity_ = r.size();
  454. size_ = members_.capacity_;
  455. }
  456. }
  457. BOOST_ASSERT( size() == r.size() );
  458. BOOST_ASSERT( is_valid() );
  459. return *this;
  460. }
  461. explicit auto_buffer( size_type capacity_arg )
  462. : members_( (std::max)(capacity_arg, size_type(N)) ),
  463. buffer_( allocate(members_.capacity_) ),
  464. size_( 0 )
  465. {
  466. BOOST_ASSERT( is_valid() );
  467. }
  468. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  469. : members_( (std::max)(size_arg, size_type(N)) ),
  470. buffer_( allocate(members_.capacity_) ),
  471. size_( 0 )
  472. {
  473. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  474. size_ = size_arg;
  475. BOOST_ASSERT( is_valid() );
  476. }
  477. auto_buffer( size_type capacity_arg, const allocator_type& a )
  478. : allocator_type( a ),
  479. members_( (std::max)(capacity_arg, size_type(N)) ),
  480. buffer_( allocate(members_.capacity_) ),
  481. size_( 0 )
  482. {
  483. BOOST_ASSERT( is_valid() );
  484. }
  485. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  486. const allocator_type& a )
  487. : allocator_type( a ),
  488. members_( (std::max)(size_arg, size_type(N)) ),
  489. buffer_( allocate(members_.capacity_) ),
  490. size_( 0 )
  491. {
  492. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  493. size_ = size_arg;
  494. BOOST_ASSERT( is_valid() );
  495. }
  496. template< class ForwardIterator >
  497. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  498. :
  499. members_( std::distance(begin_arg, end_arg) ),
  500. buffer_( allocate(members_.capacity_) ),
  501. size_( 0 )
  502. {
  503. copy_impl( begin_arg, end_arg, buffer_ );
  504. size_ = members_.capacity_;
  505. if( members_.capacity_ < N )
  506. members_.capacity_ = N;
  507. BOOST_ASSERT( is_valid() );
  508. }
  509. template< class ForwardIterator >
  510. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  511. const allocator_type& a )
  512. : allocator_type( a ),
  513. members_( std::distance(begin_arg, end_arg) ),
  514. buffer_( allocate(members_.capacity_) ),
  515. size_( 0 )
  516. {
  517. copy_impl( begin_arg, end_arg, buffer_ );
  518. size_ = members_.capacity_;
  519. if( members_.capacity_ < N )
  520. members_.capacity_ = N;
  521. BOOST_ASSERT( is_valid() );
  522. }
  523. ~auto_buffer()
  524. {
  525. auto_buffer_destroy();
  526. }
  527. public:
  528. bool empty() const
  529. {
  530. return size_ == 0;
  531. }
  532. bool full() const
  533. {
  534. return size_ == members_.capacity_;
  535. }
  536. bool is_on_stack() const
  537. {
  538. return members_.capacity_ <= N;
  539. }
  540. size_type size() const
  541. {
  542. return size_;
  543. }
  544. size_type capacity() const
  545. {
  546. return members_.capacity_;
  547. }
  548. public:
  549. pointer data()
  550. {
  551. return buffer_;
  552. }
  553. const_pointer data() const
  554. {
  555. return buffer_;
  556. }
  557. allocator_type& get_allocator()
  558. {
  559. return static_cast<allocator_type&>(*this);
  560. }
  561. const allocator_type& get_allocator() const
  562. {
  563. return static_cast<const allocator_type&>(*this);
  564. }
  565. public:
  566. iterator begin()
  567. {
  568. return buffer_;
  569. }
  570. const_iterator begin() const
  571. {
  572. return buffer_;
  573. }
  574. iterator end()
  575. {
  576. return buffer_ + size_;
  577. }
  578. const_iterator end() const
  579. {
  580. return buffer_ + size_;
  581. }
  582. reverse_iterator rbegin()
  583. {
  584. return reverse_iterator(end());
  585. }
  586. const_reverse_iterator rbegin() const
  587. {
  588. return const_reverse_iterator(end());
  589. }
  590. reverse_iterator rend()
  591. {
  592. return reverse_iterator(begin());
  593. }
  594. const_reverse_iterator rend() const
  595. {
  596. return const_reverse_iterator(begin());
  597. }
  598. const_iterator cbegin() const
  599. {
  600. return const_cast<const auto_buffer*>(this)->begin();
  601. }
  602. const_iterator cend() const
  603. {
  604. return const_cast<const auto_buffer*>(this)->end();
  605. }
  606. const_reverse_iterator crbegin() const
  607. {
  608. return const_cast<const auto_buffer*>(this)->rbegin();
  609. }
  610. const_reverse_iterator crend() const
  611. {
  612. return const_cast<const auto_buffer*>(this)->rend();
  613. }
  614. public:
  615. reference front()
  616. {
  617. return buffer_[0];
  618. }
  619. optimized_const_reference front() const
  620. {
  621. return buffer_[0];
  622. }
  623. reference back()
  624. {
  625. return buffer_[size_-1];
  626. }
  627. optimized_const_reference back() const
  628. {
  629. return buffer_[size_-1];
  630. }
  631. reference operator[]( size_type n )
  632. {
  633. BOOST_ASSERT( n < size_ );
  634. return buffer_[n];
  635. }
  636. optimized_const_reference operator[]( size_type n ) const
  637. {
  638. BOOST_ASSERT( n < size_ );
  639. return buffer_[n];
  640. }
  641. void unchecked_push_back()
  642. {
  643. BOOST_ASSERT( !full() );
  644. new (buffer_ + size_) T;
  645. ++size_;
  646. }
  647. void unchecked_push_back_n( size_type n )
  648. {
  649. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  650. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  651. }
  652. void unchecked_push_back( optimized_const_reference x ) // non-growing
  653. {
  654. BOOST_ASSERT( !full() );
  655. new (buffer_ + size_) T( x );
  656. ++size_;
  657. }
  658. template< class ForwardIterator >
  659. void unchecked_push_back( ForwardIterator begin_arg,
  660. ForwardIterator end_arg ) // non-growing
  661. {
  662. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  663. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  664. size_ += std::distance(begin_arg, end_arg);
  665. }
  666. void reserve_precisely( size_type n )
  667. {
  668. BOOST_ASSERT( members_.capacity_ >= N );
  669. if( n <= members_.capacity_ )
  670. return;
  671. reserve_impl( n );
  672. BOOST_ASSERT( members_.capacity_ == n );
  673. }
  674. void reserve( size_type n ) // strong
  675. {
  676. BOOST_ASSERT( members_.capacity_ >= N );
  677. if( n <= members_.capacity_ )
  678. return;
  679. reserve_impl( new_capacity_impl( n ) );
  680. BOOST_ASSERT( members_.capacity_ >= n );
  681. }
  682. void push_back()
  683. {
  684. if( size_ != members_.capacity_ )
  685. {
  686. unchecked_push_back();
  687. }
  688. else
  689. {
  690. reserve( size_ + 1u );
  691. unchecked_push_back();
  692. }
  693. }
  694. void push_back( optimized_const_reference x )
  695. {
  696. if( size_ != members_.capacity_ )
  697. {
  698. unchecked_push_back( x );
  699. }
  700. else
  701. {
  702. reserve( size_ + 1u );
  703. unchecked_push_back( x );
  704. }
  705. }
  706. template< class ForwardIterator >
  707. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  708. {
  709. difference_type diff = std::distance(begin_arg, end_arg);
  710. if( size_ + diff > members_.capacity_ )
  711. reserve( size_ + diff );
  712. unchecked_push_back( begin_arg, end_arg );
  713. }
  714. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  715. {
  716. // @todo: consider if we want to support x in 'this'
  717. if( size_ < members_.capacity_ )
  718. {
  719. bool is_back_insertion = before == cend();
  720. iterator where = const_cast<T*>(before);
  721. if( !is_back_insertion )
  722. {
  723. grow_back_one();
  724. std::copy( before, cend() - 1u, where + 1u );
  725. *where = x;
  726. BOOST_ASSERT( is_valid() );
  727. }
  728. else
  729. {
  730. unchecked_push_back( x );
  731. }
  732. return where;
  733. }
  734. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  735. temp.unchecked_push_back( cbegin(), before );
  736. iterator result = temp.end();
  737. temp.unchecked_push_back( x );
  738. temp.unchecked_push_back( before, cend() );
  739. one_sided_swap( temp );
  740. BOOST_ASSERT( is_valid() );
  741. return result;
  742. }
  743. void insert( const_iterator before, size_type n,
  744. optimized_const_reference x )
  745. {
  746. // @todo: see problems above
  747. if( size_ + n <= members_.capacity_ )
  748. {
  749. grow_back( n );
  750. iterator where = const_cast<T*>(before);
  751. std::copy( before, cend() - n, where + n );
  752. std::fill( where, where + n, x );
  753. BOOST_ASSERT( is_valid() );
  754. return;
  755. }
  756. auto_buffer temp( new_capacity_impl( size_ + n ) );
  757. temp.unchecked_push_back( cbegin(), before );
  758. std::uninitialized_fill_n( temp.end(), n, x );
  759. temp.size_ += n;
  760. temp.unchecked_push_back( before, cend() );
  761. one_sided_swap( temp );
  762. BOOST_ASSERT( is_valid() );
  763. }
  764. template< class ForwardIterator >
  765. void insert( const_iterator before,
  766. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  767. {
  768. typedef typename std::iterator_traits<ForwardIterator>
  769. ::iterator_category category;
  770. insert_impl( before, begin_arg, end_arg, category() );
  771. }
  772. void pop_back()
  773. {
  774. BOOST_ASSERT( !empty() );
  775. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  776. --size_;
  777. }
  778. void pop_back_n( size_type n )
  779. {
  780. BOOST_ASSERT( n <= size_ );
  781. if( n )
  782. {
  783. destroy_back_n( n );
  784. size_ -= n;
  785. }
  786. }
  787. void clear()
  788. {
  789. pop_back_n( size_ );
  790. }
  791. iterator erase( const_iterator where )
  792. {
  793. BOOST_ASSERT( !empty() );
  794. BOOST_ASSERT( cbegin() <= where );
  795. BOOST_ASSERT( cend() > where );
  796. unsigned elements = cend() - where - 1u;
  797. if( elements > 0u )
  798. {
  799. const_iterator start = where + 1u;
  800. std::copy( start, start + elements,
  801. const_cast<T*>(where) );
  802. }
  803. pop_back();
  804. BOOST_ASSERT( !full() );
  805. iterator result = const_cast<T*>( where );
  806. BOOST_ASSERT( result <= end() );
  807. return result;
  808. }
  809. iterator erase( const_iterator from, const_iterator to )
  810. {
  811. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  812. !empty() );
  813. BOOST_ASSERT( cbegin() <= from );
  814. BOOST_ASSERT( cend() >= to );
  815. unsigned elements = std::distance(to,cend());
  816. if( elements > 0u )
  817. {
  818. BOOST_ASSERT( elements > 0u );
  819. std::copy( to, to + elements,
  820. const_cast<T*>(from) );
  821. }
  822. pop_back_n( std::distance(from,to) );
  823. BOOST_ASSERT( !full() );
  824. iterator result = const_cast<T*>( from );
  825. BOOST_ASSERT( result <= end() );
  826. return result;
  827. }
  828. void shrink_to_fit()
  829. {
  830. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  831. return;
  832. reserve_impl( size_ );
  833. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  834. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  835. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  836. }
  837. pointer uninitialized_grow( size_type n ) // strong
  838. {
  839. if( size_ + n > members_.capacity_ )
  840. reserve( size_ + n );
  841. pointer res = end();
  842. size_ += n;
  843. return res;
  844. }
  845. void uninitialized_shrink( size_type n ) // nothrow
  846. {
  847. // @remark: test for wrap-around
  848. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  849. size_ -= n;
  850. }
  851. void uninitialized_resize( size_type n )
  852. {
  853. if( n > size() )
  854. uninitialized_grow( n - size() );
  855. else if( n < size() )
  856. uninitialized_shrink( size() - n );
  857. BOOST_ASSERT( size() == n );
  858. }
  859. // nothrow - if both buffer are on the heap, or
  860. // - if one buffer is on the heap and one has
  861. // 'has_allocated_buffer() == false', or
  862. // - if copy-construction cannot throw
  863. // basic - otherwise (better guarantee impossible)
  864. // requirement: the allocator must be no-throw-swappable
  865. void swap( auto_buffer& r )
  866. {
  867. bool on_stack = is_on_stack();
  868. bool r_on_stack = r.is_on_stack();
  869. bool both_on_heap = !on_stack && !r_on_stack;
  870. if( both_on_heap )
  871. {
  872. boost::swap( get_allocator(), r.get_allocator() );
  873. boost::swap( members_.capacity_, r.members_.capacity_ );
  874. boost::swap( buffer_, r.buffer_ );
  875. boost::swap( size_, r.size_ );
  876. BOOST_ASSERT( is_valid() );
  877. BOOST_ASSERT( r.is_valid() );
  878. return;
  879. }
  880. BOOST_ASSERT( on_stack || r_on_stack );
  881. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  882. (!on_stack && r_on_stack);
  883. //
  884. // Remark: we now know that we can copy into
  885. // the unused stack buffer.
  886. //
  887. if( exactly_one_on_stack )
  888. {
  889. auto_buffer* one_on_stack = on_stack ? this : &r;
  890. auto_buffer* other = on_stack ? &r : this;
  891. pointer new_buffer = static_cast<T*>(other->members_.address());
  892. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  893. new_buffer ); // strong
  894. one_on_stack->auto_buffer_destroy(); // nothrow
  895. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  896. boost::swap( members_.capacity_, r.members_.capacity_ );
  897. boost::swap( size_, r.size_ );
  898. one_on_stack->buffer_ = other->buffer_;
  899. other->buffer_ = new_buffer;
  900. BOOST_ASSERT( other->is_on_stack() );
  901. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  902. BOOST_ASSERT( is_valid() );
  903. BOOST_ASSERT( r.is_valid() );
  904. return;
  905. }
  906. BOOST_ASSERT( on_stack && r_on_stack );
  907. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  908. BOOST_ASSERT( is_valid() );
  909. BOOST_ASSERT( r.is_valid() );
  910. }
  911. private:
  912. typedef boost::aligned_storage< N * sizeof(T),
  913. boost::alignment_of<T>::value >
  914. storage;
  915. struct members_type : storage /* to enable EBO */
  916. {
  917. size_type capacity_;
  918. members_type( size_type capacity )
  919. : capacity_(capacity)
  920. { }
  921. void* address() const
  922. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  923. };
  924. members_type members_;
  925. pointer buffer_;
  926. size_type size_;
  927. };
  928. template< class T, class SBP, class GP, class A >
  929. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  930. {
  931. l.swap( r );
  932. }
  933. template< class T, class SBP, class GP, class A >
  934. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  935. const auto_buffer<T,SBP,GP,A>& r )
  936. {
  937. if( l.size() != r.size() )
  938. return false;
  939. return std::equal( l.begin(), l.end(), r.begin() );
  940. }
  941. template< class T, class SBP, class GP, class A >
  942. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  943. const auto_buffer<T,SBP,GP,A>& r )
  944. {
  945. return !(l == r);
  946. }
  947. template< class T, class SBP, class GP, class A >
  948. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  949. const auto_buffer<T,SBP,GP,A>& r )
  950. {
  951. return std::lexicographical_compare( l.begin(), l.end(),
  952. r.begin(), r.end() );
  953. }
  954. template< class T, class SBP, class GP, class A >
  955. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  956. const auto_buffer<T,SBP,GP,A>& r )
  957. {
  958. return (r < l);
  959. }
  960. template< class T, class SBP, class GP, class A >
  961. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  962. const auto_buffer<T,SBP,GP,A>& r )
  963. {
  964. return !(l > r);
  965. }
  966. template< class T, class SBP, class GP, class A >
  967. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  968. const auto_buffer<T,SBP,GP,A>& r )
  969. {
  970. return !(l < r);
  971. }
  972. } // namespace detail
  973. } // namespace signals2
  974. }
  975. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  976. #pragma warning(pop)
  977. #endif
  978. #endif