]> git.lyx.org Git - lyx.git/blob - 3rdparty/boost/boost/signals2/detail/auto_buffer.hpp
Update to boost 1.72
[lyx.git] / 3rdparty / boost / boost / signals2 / detail / auto_buffer.hpp
1 // Copyright Thorsten Ottosen, 2009.
2 // Distributed under the Boost Software License, Version 1.0. (See
3 // accompanying file LICENSE_1_0.txt or copy at
4 // http://www.boost.org/LICENSE_1_0.txt)
5
6 #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
7 #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
8
9 #include <boost/detail/workaround.hpp>
10
11 #if defined(_MSC_VER)
12 # pragma once
13 #endif
14
15 #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
16 #pragma warning(push)
17 #pragma warning(disable:4996)
18 #endif
19
20 #include <boost/assert.hpp>
21 #include <boost/iterator/reverse_iterator.hpp>
22 #include <boost/iterator/iterator_traits.hpp>
23 #include <boost/mpl/if.hpp>
24 #include <boost/signals2/detail/scope_guard.hpp>
25 #include <boost/swap.hpp>
26 #include <boost/type_traits/aligned_storage.hpp>
27 #include <boost/type_traits/alignment_of.hpp>
28 #include <boost/type_traits/has_nothrow_copy.hpp>
29 #include <boost/type_traits/has_nothrow_assign.hpp>
30 #include <boost/type_traits/has_trivial_assign.hpp>
31 #include <boost/type_traits/has_trivial_constructor.hpp>
32 #include <boost/type_traits/has_trivial_destructor.hpp>
33 #include <algorithm>
34 #include <cstring>
35 #include <iterator>
36 #include <memory>
37 #include <stdexcept>
38
39 namespace boost
40 {
41 namespace signals2
42 {
43 namespace detail
44 {
45     //
46     // Policies for creating the stack buffer.
47     //
48     template< unsigned N >
49     struct store_n_objects
50     {
51         BOOST_STATIC_CONSTANT( unsigned, value = N );
52     };
53
54     template< unsigned N >
55     struct store_n_bytes
56     {
57         BOOST_STATIC_CONSTANT( unsigned, value = N );
58     };
59
60     namespace auto_buffer_detail
61     {
62         template< class Policy, class T >
63         struct compute_buffer_size
64         {
65             BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
66         };
67
68         template< unsigned N, class T >
69         struct compute_buffer_size< store_n_bytes<N>, T >
70         {
71             BOOST_STATIC_CONSTANT( unsigned, value = N );
72         };
73
74         template< class Policy, class T >
75         struct compute_buffer_objects
76         {
77             BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
78         };
79
80         template< unsigned N, class T >
81         struct compute_buffer_objects< store_n_bytes<N>, T >
82         {
83             BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
84         };
85     }
86
87     struct default_grow_policy
88     {
89         template< class SizeType >
90         static SizeType new_capacity( SizeType capacity )
91         {
92             //
93             // @remark: we grow the capacity quite agressively.
94             //          this is justified since we aim to minimize
95             //          heap-allocations, and because we mostly use
96             //          the buffer locally.
97             return capacity * 4u;
98         }
99
100         template< class SizeType >
101         static bool should_shrink( SizeType, SizeType )
102         {
103             //
104             // @remark: when defining a new grow policy, one might
105             //          choose that if the waated space is less
106             //          than a certain percentage, then it is of
107             //          little use to shrink.
108             //
109             return true;
110         }
111     };
112
113     template< class T,
114               class StackBufferPolicy = store_n_objects<256>,
115               class GrowPolicy        = default_grow_policy,
116               class Allocator         = std::allocator<T> >
117     class auto_buffer;
118
119
120
121     template
122     <
123         class T,
124         class StackBufferPolicy,
125         class GrowPolicy,
126         class Allocator
127     >
128     class auto_buffer : Allocator
129     {
130     private:
131         enum { N = auto_buffer_detail::
132                    compute_buffer_objects<StackBufferPolicy,T>::value };
133
134         BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
135
136         typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
137                                                          local_buffer;
138
139     public:
140         typedef Allocator                                allocator_type;
141         typedef T                                        value_type;
142         typedef typename Allocator::size_type            size_type;
143         typedef typename Allocator::difference_type      difference_type;
144         typedef T*                                       pointer;
145         typedef typename Allocator::pointer              allocator_pointer;
146         typedef const T*                                 const_pointer;
147         typedef T&                                       reference;
148         typedef const T&                                 const_reference;
149         typedef pointer                                  iterator;
150         typedef const_pointer                            const_iterator;
151         typedef boost::reverse_iterator<iterator>        reverse_iterator;
152         typedef boost::reverse_iterator<const_iterator>  const_reverse_iterator;
153         typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
154                                            && sizeof(T) <= sizeof(long double),
155                                           const value_type,
156                                           const_reference >::type
157                                                       optimized_const_reference;
158     private:
159
160         pointer allocate( size_type capacity_arg )
161         {
162             if( capacity_arg > N )
163                 return &*get_allocator().allocate( capacity_arg );
164             else
165                 return static_cast<T*>( members_.address() );
166         }
167
168         void deallocate( pointer where, size_type capacity_arg )
169         {
170             if( capacity_arg <= N )
171                 return;
172             get_allocator().deallocate( allocator_pointer(where), capacity_arg );
173         }
174
175         template< class I >
176         static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
177         {
178             copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
179         }
180
181         static void copy_rai( const T* begin, const T* end,
182                               pointer where, const boost::true_type& )
183         {
184             std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
185         }
186
187         template< class I, bool b >
188         static void copy_rai( I begin, I end,
189                               pointer where, const boost::integral_constant<bool, b>& )
190         {
191             std::uninitialized_copy( begin, end, where );
192         }
193
194         template< class I >
195         static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
196         {
197             std::uninitialized_copy( begin, end, where );
198         }
199
200         template< class I >
201         static void copy_impl( I begin, I end, pointer where )
202         {
203             copy_impl( begin, end, where,
204                        typename std::iterator_traits<I>::iterator_category() );
205         }
206
207         template< class I, class I2 >
208         static void assign_impl( I begin, I end, I2 where )
209         {
210             assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
211         }
212
213         template< class I, class I2 >
214         static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
215         {
216             std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
217         }
218
219         template< class I, class I2 >
220         static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
221         {
222             for( ; begin != end; ++begin, ++where )
223                 *where = *begin;
224         }
225
226         void unchecked_push_back_n( size_type n, const boost::true_type& )
227         {
228             std::uninitialized_fill( end(), end() + n, T() );
229             size_ += n;
230         }
231
232         void unchecked_push_back_n( size_type n, const boost::false_type& )
233         {
234             for( size_type i = 0u; i < n; ++i )
235                 unchecked_push_back();
236         }
237
238         void auto_buffer_destroy( pointer where, const boost::false_type& )
239         {
240             (*where).~T();
241         }
242
243         void auto_buffer_destroy( pointer, const boost::true_type& )
244         { }
245
246         void auto_buffer_destroy( pointer where )
247         {
248             auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
249         }
250
251         void auto_buffer_destroy()
252         {
253             BOOST_ASSERT( is_valid() );
254             if( buffer_ ) // do we need this check? Yes, but only
255                 // for N = 0u + local instances in one_sided_swap()
256                 auto_buffer_destroy( boost::has_trivial_destructor<T>() );
257         }
258
259         void destroy_back_n( size_type n, const boost::false_type& )
260         {
261             BOOST_ASSERT( n > 0 );
262             pointer buffer  = buffer_ + size_ - 1u;
263             pointer new_end = buffer - n;
264             for( ; buffer > new_end; --buffer )
265                 auto_buffer_destroy( buffer );
266         }
267
268         void destroy_back_n( size_type, const boost::true_type& )
269         { }
270
271         void destroy_back_n( size_type n )
272         {
273             destroy_back_n( n, boost::has_trivial_destructor<T>() );
274         }
275
276         void auto_buffer_destroy( const boost::false_type& x )
277         {
278             if( size_ )
279                 destroy_back_n( size_, x );
280             deallocate( buffer_, members_.capacity_ );
281         }
282
283         void auto_buffer_destroy( const boost::true_type& )
284         {
285             deallocate( buffer_, members_.capacity_ );
286         }
287
288         pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
289         {
290             pointer new_buffer = allocate( new_capacity ); // strong
291             scope_guard guard = make_obj_guard( *this,
292                                                 &auto_buffer::deallocate,
293                                                 new_buffer,
294                                                 new_capacity );
295             copy_impl( begin(), end(), new_buffer ); // strong
296             guard.dismiss();                         // nothrow
297             return new_buffer;
298         }
299
300         pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
301         {
302             pointer new_buffer = allocate( new_capacity ); // strong
303             copy_impl( begin(), end(), new_buffer );       // nothrow
304             return new_buffer;
305         }
306
307         void reserve_impl( size_type new_capacity )
308         {
309             pointer new_buffer = move_to_new_buffer( new_capacity,
310                                                  boost::has_nothrow_copy<T>() );
311             auto_buffer_destroy();
312             buffer_   = new_buffer;
313             members_.capacity_ = new_capacity;
314             BOOST_ASSERT( size_ <= members_.capacity_ );
315         }
316
317         size_type new_capacity_impl( size_type n )
318         {
319             BOOST_ASSERT( n > members_.capacity_ );
320             size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
321             // @todo: consider to check for allocator.max_size()
322             return (std::max)(new_capacity,n);
323         }
324
325         static void swap_helper( auto_buffer& l, auto_buffer& r,
326                                  const boost::true_type& )
327         {
328             BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
329
330             auto_buffer temp( l.begin(), l.end() );
331             assign_impl( r.begin(), r.end(), l.begin() );
332             assign_impl( temp.begin(), temp.end(), r.begin() );
333             boost::swap( l.size_, r.size_ );
334             boost::swap( l.members_.capacity_, r.members_.capacity_ );
335         }
336
337         static void swap_helper( auto_buffer& l, auto_buffer& r,
338                                  const boost::false_type& )
339         {
340             BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
341             size_type min_size    = (std::min)(l.size_,r.size_);
342             size_type max_size    = (std::max)(l.size_,r.size_);
343             size_type diff        = max_size - min_size;
344             auto_buffer* smallest = l.size_ == min_size ? &l : &r;
345             auto_buffer* largest  = smallest == &l ? &r : &l;
346
347             // @remark: the implementation below is not as fast
348             //          as it could be if we assumed T had a default
349             //          constructor.
350
351             size_type i = 0u;
352             for(  ; i < min_size; ++i )
353                 boost::swap( (*smallest)[i], (*largest)[i] );
354
355             for( ; i < max_size; ++i )
356                 smallest->unchecked_push_back( (*largest)[i] );
357
358             largest->pop_back_n( diff );
359             boost::swap( l.members_.capacity_, r.members_.capacity_ );
360         }
361
362         void one_sided_swap( auto_buffer& temp ) // nothrow
363         {
364             BOOST_ASSERT( !temp.is_on_stack() );
365             auto_buffer_destroy();
366             // @remark: must be nothrow
367             get_allocator()    = temp.get_allocator();
368             members_.capacity_ = temp.members_.capacity_;
369             buffer_            = temp.buffer_;
370             BOOST_ASSERT( temp.size_ >= size_ + 1u );
371             size_              = temp.size_;
372             temp.buffer_       = 0;
373             BOOST_ASSERT( temp.is_valid() );
374         }
375
376         template< class I >
377         void insert_impl( const_iterator before, I begin_arg, I end_arg,
378                           std::input_iterator_tag )
379         {
380             for( ; begin_arg != end_arg; ++begin_arg )
381             {
382                 before = insert( before, *begin_arg );
383                 ++before;
384             }
385         }
386
387         void grow_back( size_type n, const boost::true_type& )
388         {
389             BOOST_ASSERT( size_ + n <= members_.capacity_ );
390             size_ += n;
391         }
392
393         void grow_back( size_type n, const boost::false_type& )
394         {
395             unchecked_push_back_n(n);
396         }
397
398         void grow_back( size_type n )
399         {
400             grow_back( n, boost::has_trivial_constructor<T>() );
401         }
402
403         void grow_back_one( const boost::true_type& )
404         {
405             BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
406             size_ += 1;
407         }
408
409         void grow_back_one( const boost::false_type& )
410         {
411             unchecked_push_back();
412         }
413
414         void grow_back_one()
415         {
416             grow_back_one( boost::has_trivial_constructor<T>() );
417         }
418
419         template< class I >
420         void insert_impl( const_iterator before, I begin_arg, I end_arg,
421                           std::forward_iterator_tag )
422         {
423             difference_type n = std::distance(begin_arg, end_arg);
424
425             if( size_ + n <= members_.capacity_ )
426             {
427                 bool is_back_insertion = before == cend();
428                 if( !is_back_insertion )
429                 {
430                     grow_back( n );
431                     iterator where = const_cast<T*>(before);
432                     std::copy( before, cend() - n, where + n );
433                     assign_impl( begin_arg, end_arg, where );
434                 }
435                 else
436                 {
437                     unchecked_push_back( begin_arg, end_arg );
438                 }
439                 BOOST_ASSERT( is_valid() );
440                 return;
441             }
442
443             auto_buffer temp( new_capacity_impl( size_ + n ) );
444             temp.unchecked_push_back( cbegin(), before );
445             temp.unchecked_push_back( begin_arg, end_arg );
446             temp.unchecked_push_back( before, cend() );
447             one_sided_swap( temp );
448             BOOST_ASSERT( is_valid() );
449         }
450
451     public:
452         bool is_valid() const // invariant
453         {
454             // @remark: allowed for N==0 and when
455             //          using a locally instance
456             //          in insert()/one_sided_swap()
457             if( buffer_ == 0 )
458                 return true;
459
460             if( members_.capacity_ < N )
461                 return false;
462
463             if( !is_on_stack() && members_.capacity_ <= N )
464                 return false;
465
466             if( buffer_ == members_.address() )
467                 if( members_.capacity_ > N )
468                     return false;
469
470             if( size_ > members_.capacity_ )
471                 return false;
472
473             return true;
474         }
475
476         auto_buffer()
477             : members_( N ),
478               buffer_( static_cast<T*>(members_.address()) ),
479               size_( 0u )
480         {
481             BOOST_ASSERT( is_valid() );
482         }
483
484         auto_buffer( const auto_buffer& r )
485             : members_( (std::max)(r.size_,size_type(N)) ),
486               buffer_( allocate( members_.capacity_ ) ),
487               size_( 0 )
488         {
489             copy_impl( r.begin(), r.end(), buffer_ );
490             size_ = r.size_;
491             BOOST_ASSERT( is_valid() );
492         }
493
494         auto_buffer& operator=( const auto_buffer& r ) // basic
495         {
496             if( this == &r )
497                 return *this;
498
499             difference_type diff = size_ - r.size_;
500             if( diff >= 0 )
501             {
502                 pop_back_n( static_cast<size_type>(diff) );
503                 assign_impl( r.begin(), r.end(), begin() );
504             }
505             else
506             {
507                 if( members_.capacity_ >= r.size() )
508                 {
509                     unchecked_push_back_n( static_cast<size_type>(-diff) );
510                     assign_impl( r.begin(), r.end(), begin() );
511                 }
512                 else
513                 {
514                     // @remark: we release memory as early as possible
515                     //          since we only give the basic guarantee
516                     auto_buffer_destroy();
517                     buffer_ = 0;
518                     pointer new_buffer = allocate( r.size() );
519                     scope_guard guard = make_obj_guard( *this,
520                                                         &auto_buffer::deallocate,
521                                                         new_buffer,
522                                                         r.size() );
523                     copy_impl( r.begin(), r.end(), new_buffer );
524                     guard.dismiss();
525                     buffer_            = new_buffer;
526                     members_.capacity_ = r.size();
527                     size_              = members_.capacity_;
528                 }
529             }
530
531             BOOST_ASSERT( size() == r.size() );
532             BOOST_ASSERT( is_valid() );
533             return *this;
534         }
535
536         explicit auto_buffer( size_type capacity_arg )
537             : members_( (std::max)(capacity_arg, size_type(N)) ),
538               buffer_( allocate(members_.capacity_) ),
539               size_( 0 )
540         {
541             BOOST_ASSERT( is_valid() );
542         }
543
544         auto_buffer( size_type size_arg, optimized_const_reference init_value )
545             : members_( (std::max)(size_arg, size_type(N)) ),
546               buffer_( allocate(members_.capacity_) ),
547               size_( 0 )
548         {
549             std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
550             size_ = size_arg;
551             BOOST_ASSERT( is_valid() );
552         }
553
554         auto_buffer( size_type capacity_arg, const allocator_type& a )
555             : allocator_type( a ),
556               members_( (std::max)(capacity_arg, size_type(N)) ),
557               buffer_( allocate(members_.capacity_) ),
558               size_( 0 )
559         {
560             BOOST_ASSERT( is_valid() );
561         }
562
563         auto_buffer( size_type size_arg, optimized_const_reference init_value,
564                      const allocator_type& a )
565             : allocator_type( a ),
566               members_( (std::max)(size_arg, size_type(N)) ),
567               buffer_( allocate(members_.capacity_) ),
568               size_( 0 )
569         {
570             std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
571             size_ = size_arg;
572             BOOST_ASSERT( is_valid() );
573         }
574
575         template< class ForwardIterator >
576         auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
577             :
578               members_( std::distance(begin_arg, end_arg) ),
579               buffer_( allocate(members_.capacity_) ),
580               size_( 0 )
581         {
582             copy_impl( begin_arg, end_arg, buffer_ );
583             size_ = members_.capacity_;
584             if( members_.capacity_ < N )
585                 members_.capacity_ = N;
586             BOOST_ASSERT( is_valid() );
587         }
588
589         template< class ForwardIterator >
590         auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
591                      const allocator_type& a )
592             : allocator_type( a ),
593               members_( std::distance(begin_arg, end_arg) ),
594               buffer_( allocate(members_.capacity_) ),
595               size_( 0 )
596         {
597             copy_impl( begin_arg, end_arg, buffer_ );
598             size_ = members_.capacity_;
599             if( members_.capacity_ < N )
600                 members_.capacity_ = N;
601             BOOST_ASSERT( is_valid() );
602         }
603
604         ~auto_buffer()
605         {
606             auto_buffer_destroy();
607         }
608
609     public:
610         bool empty() const
611         {
612             return size_ == 0;
613         }
614
615         bool full() const
616         {
617             return size_ == members_.capacity_;
618         }
619
620         bool is_on_stack() const
621         {
622             return members_.capacity_ <= N;
623         }
624
625         size_type size() const
626         {
627             return size_;
628         }
629
630         size_type capacity() const
631         {
632             return members_.capacity_;
633         }
634
635     public:
636         pointer data()
637         {
638             return buffer_;
639         }
640
641         const_pointer data() const
642         {
643             return buffer_;
644         }
645
646         allocator_type& get_allocator()
647         {
648             return static_cast<allocator_type&>(*this);
649         }
650
651         const allocator_type& get_allocator() const
652         {
653             return static_cast<const allocator_type&>(*this);
654         }
655
656     public:
657         iterator begin()
658         {
659             return buffer_;
660         }
661
662         const_iterator begin() const
663         {
664             return buffer_;
665         }
666
667         iterator end()
668         {
669             return buffer_ + size_;
670         }
671
672         const_iterator end() const
673         {
674             return buffer_ + size_;
675         }
676
677         reverse_iterator rbegin()
678         {
679             return reverse_iterator(end());
680         }
681
682         const_reverse_iterator rbegin() const
683         {
684             return const_reverse_iterator(end());
685         }
686
687         reverse_iterator rend()
688         {
689             return reverse_iterator(begin());
690         }
691
692         const_reverse_iterator rend() const
693         {
694             return const_reverse_iterator(begin());
695         }
696
697         const_iterator cbegin() const
698         {
699             return const_cast<const auto_buffer*>(this)->begin();
700         }
701
702         const_iterator cend() const
703         {
704             return const_cast<const auto_buffer*>(this)->end();
705         }
706
707         const_reverse_iterator crbegin() const
708         {
709             return const_cast<const auto_buffer*>(this)->rbegin();
710         }
711
712         const_reverse_iterator crend() const
713         {
714             return const_cast<const auto_buffer*>(this)->rend();
715         }
716
717     public:
718         reference front()
719         {
720             return buffer_[0];
721         }
722
723         optimized_const_reference front() const
724         {
725             return buffer_[0];
726         }
727
728         reference back()
729         {
730             return buffer_[size_-1];
731         }
732
733         optimized_const_reference back() const
734         {
735             return buffer_[size_-1];
736         }
737
738         reference operator[]( size_type n )
739         {
740             BOOST_ASSERT( n < size_ );
741             return buffer_[n];
742         }
743
744         optimized_const_reference operator[]( size_type n ) const
745         {
746             BOOST_ASSERT( n < size_ );
747             return buffer_[n];
748         }
749
750         void unchecked_push_back()
751         {
752             BOOST_ASSERT( !full() );
753             new (buffer_ + size_) T;
754             ++size_;
755         }
756
757         void unchecked_push_back_n( size_type n )
758         {
759             BOOST_ASSERT( size_ + n <= members_.capacity_ );
760             unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
761         }
762
763         void unchecked_push_back( optimized_const_reference x ) // non-growing
764         {
765             BOOST_ASSERT( !full() );
766             new (buffer_ + size_) T( x );
767             ++size_;
768         }
769
770         template< class ForwardIterator >
771         void unchecked_push_back( ForwardIterator begin_arg,
772                                   ForwardIterator end_arg ) // non-growing
773         {
774             BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
775             copy_impl( begin_arg, end_arg, buffer_ + size_ );
776             size_ += std::distance(begin_arg, end_arg);
777         }
778
779         void reserve_precisely( size_type n )
780         {
781             BOOST_ASSERT( members_.capacity_  >= N );
782
783             if( n <= members_.capacity_ )
784                 return;
785             reserve_impl( n );
786             BOOST_ASSERT( members_.capacity_ == n );
787         }
788
789         void reserve( size_type n ) // strong
790         {
791             BOOST_ASSERT( members_.capacity_  >= N );
792
793             if( n <= members_.capacity_ )
794                 return;
795
796             reserve_impl( new_capacity_impl( n ) );
797             BOOST_ASSERT( members_.capacity_ >= n );
798         }
799
800         void push_back()
801         {
802             if( size_ != members_.capacity_ )
803             {
804                 unchecked_push_back();
805             }
806             else
807             {
808                 reserve( size_ + 1u );
809                 unchecked_push_back();
810             }
811         }
812
813         void push_back( optimized_const_reference x )
814         {
815             if( size_ != members_.capacity_ )
816             {
817                 unchecked_push_back( x );
818             }
819             else
820             {
821                reserve( size_ + 1u );
822                unchecked_push_back( x );
823             }
824         }
825
826         template< class ForwardIterator >
827         void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
828         {
829             difference_type diff = std::distance(begin_arg, end_arg);
830             if( size_ + diff > members_.capacity_ )
831                 reserve( size_ + diff );
832             unchecked_push_back( begin_arg, end_arg );
833         }
834
835         iterator insert( const_iterator before, optimized_const_reference x ) // basic
836         {
837             // @todo: consider if we want to support x in 'this'
838             if( size_ < members_.capacity_ )
839             {
840                 bool is_back_insertion = before == cend();
841                 iterator where = const_cast<T*>(before);
842
843                 if( !is_back_insertion )
844                 {
845                     grow_back_one();
846                     std::copy( before, cend() - 1u, where + 1u );
847                     *where = x;
848                     BOOST_ASSERT( is_valid() );
849                  }
850                 else
851                 {
852                     unchecked_push_back( x );
853                 }
854                 return where;
855             }
856
857             auto_buffer temp( new_capacity_impl( size_ + 1u ) );
858             temp.unchecked_push_back( cbegin(), before );
859             iterator result = temp.end();
860             temp.unchecked_push_back( x );
861             temp.unchecked_push_back( before, cend() );
862             one_sided_swap( temp );
863             BOOST_ASSERT( is_valid() );
864             return result;
865         }
866
867         void insert( const_iterator before, size_type n,
868                      optimized_const_reference x )
869         {
870             // @todo: see problems above
871             if( size_ + n <= members_.capacity_ )
872             {
873                 grow_back( n );
874                 iterator where = const_cast<T*>(before);
875                 std::copy( before, cend() - n, where + n );
876                 std::fill( where, where + n, x );
877                 BOOST_ASSERT( is_valid() );
878                 return;
879             }
880
881             auto_buffer temp( new_capacity_impl( size_ + n ) );
882             temp.unchecked_push_back( cbegin(), before );
883             std::uninitialized_fill_n( temp.end(), n, x );
884             temp.size_ += n;
885             temp.unchecked_push_back( before, cend() );
886             one_sided_swap( temp );
887             BOOST_ASSERT( is_valid() );
888         }
889
890         template< class ForwardIterator >
891         void insert( const_iterator before,
892                      ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
893         {
894             typedef typename std::iterator_traits<ForwardIterator>
895                 ::iterator_category category;
896             insert_impl( before, begin_arg, end_arg, category() );
897         }
898
899         void pop_back()
900         {
901             BOOST_ASSERT( !empty() );
902             auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
903             --size_;
904         }
905
906         void pop_back_n( size_type n )
907         {
908             BOOST_ASSERT( n <= size_ );
909             if( n )
910             {
911                 destroy_back_n( n );
912                 size_ -= n;
913             }
914         }
915
916         void clear()
917         {
918             pop_back_n( size_ );
919         }
920
921         iterator erase( const_iterator where )
922         {
923             BOOST_ASSERT( !empty() );
924             BOOST_ASSERT( cbegin() <= where );
925             BOOST_ASSERT( cend() > where );
926
927             unsigned elements = cend() - where - 1u;
928
929             if( elements > 0u )
930             {
931                 const_iterator start = where + 1u;
932                 std::copy( start, start + elements,
933                            const_cast<T*>(where) );
934             }
935             pop_back();
936             BOOST_ASSERT( !full() );
937             iterator result = const_cast<T*>( where );
938             BOOST_ASSERT( result <= end() );
939             return result;
940         }
941
942         iterator erase( const_iterator from, const_iterator to )
943         {
944             BOOST_ASSERT( !(std::distance(from,to)>0) ||
945                           !empty() );
946             BOOST_ASSERT( cbegin() <= from );
947             BOOST_ASSERT( cend() >= to );
948
949             unsigned elements = std::distance(to,cend());
950
951             if( elements > 0u )
952             {
953                 BOOST_ASSERT( elements > 0u );
954                 std::copy( to, to + elements,
955                            const_cast<T*>(from) );
956             }
957             pop_back_n( std::distance(from,to) );
958             BOOST_ASSERT( !full() );
959             iterator result = const_cast<T*>( from );
960             BOOST_ASSERT( result <= end() );
961             return result;
962         }
963
964         void shrink_to_fit()
965         {
966             if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
967                 return;
968
969             reserve_impl( size_ );
970             members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
971             BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
972             BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
973         }
974
975         pointer uninitialized_grow( size_type n ) // strong
976         {
977             if( size_ + n > members_.capacity_ )
978                 reserve( size_ + n );
979
980             pointer res = end();
981             size_ += n;
982             return res;
983         }
984
985         void uninitialized_shrink( size_type n ) // nothrow
986         {
987             // @remark: test for wrap-around
988             BOOST_ASSERT( size_ - n <= members_.capacity_ );
989             size_ -= n;
990         }
991
992         void uninitialized_resize( size_type n )
993         {
994             if( n > size() )
995                 uninitialized_grow( n - size() );
996             else if( n < size() )
997                 uninitialized_shrink( size() - n );
998
999            BOOST_ASSERT( size() == n );
1000         }
1001
1002         // nothrow  - if both buffer are on the heap, or
1003         //          - if one buffer is on the heap and one has
1004         //            'has_allocated_buffer() == false', or
1005         //          - if copy-construction cannot throw
1006         // basic    - otherwise (better guarantee impossible)
1007         // requirement: the allocator must be no-throw-swappable
1008         void swap( auto_buffer& r )
1009         {
1010             bool on_stack      = is_on_stack();
1011             bool r_on_stack    = r.is_on_stack();
1012             bool both_on_heap  = !on_stack && !r_on_stack;
1013             if( both_on_heap )
1014             {
1015                 boost::swap( get_allocator(), r.get_allocator() );
1016                 boost::swap( members_.capacity_, r.members_.capacity_ );
1017                 boost::swap( buffer_, r.buffer_ );
1018                 boost::swap( size_, r.size_ );
1019                 BOOST_ASSERT( is_valid() );
1020                 BOOST_ASSERT( r.is_valid() );
1021                 return;
1022             }
1023
1024             BOOST_ASSERT( on_stack || r_on_stack );
1025             bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
1026                                         (!on_stack && r_on_stack);
1027
1028             //
1029             // Remark: we now know that we can copy into
1030             //         the unused stack buffer.
1031             //
1032             if( exactly_one_on_stack )
1033             {
1034                 auto_buffer* one_on_stack = on_stack ? this : &r;
1035                 auto_buffer* other        = on_stack ? &r : this;
1036                 pointer new_buffer = static_cast<T*>(other->members_.address());
1037                 copy_impl( one_on_stack->begin(), one_on_stack->end(),
1038                            new_buffer );                            // strong
1039                 one_on_stack->auto_buffer_destroy();                       // nothrow
1040                 boost::swap( get_allocator(), r.get_allocator() );  // assume nothrow
1041                 boost::swap( members_.capacity_, r.members_.capacity_ );
1042                 boost::swap( size_, r.size_ );
1043                 one_on_stack->buffer_ = other->buffer_;
1044                 other->buffer_        = new_buffer;
1045                 BOOST_ASSERT( other->is_on_stack() );
1046                 BOOST_ASSERT( !one_on_stack->is_on_stack() );
1047                 BOOST_ASSERT( is_valid() );
1048                 BOOST_ASSERT( r.is_valid() );
1049                 return;
1050             }
1051
1052             BOOST_ASSERT( on_stack && r_on_stack );
1053             swap_helper( *this, r, boost::has_trivial_assign<T>() );
1054             BOOST_ASSERT( is_valid() );
1055             BOOST_ASSERT( r.is_valid() );
1056         }
1057
1058     private:
1059         typedef boost::aligned_storage< N * sizeof(T),
1060                                         boost::alignment_of<T>::value >
1061                                storage;
1062
1063         struct members_type : storage /* to enable EBO */
1064         {
1065             size_type capacity_;
1066
1067             members_type( size_type capacity )
1068                : capacity_(capacity)
1069             { }
1070
1071             void* address() const
1072             { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
1073         };
1074
1075         members_type members_;
1076         pointer      buffer_;
1077         size_type    size_;
1078
1079     };
1080
1081     template< class T, class SBP, class GP, class A >
1082     inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
1083     {
1084         l.swap( r );
1085     }
1086
1087     template< class T, class SBP, class GP, class A >
1088     inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
1089                             const auto_buffer<T,SBP,GP,A>& r )
1090     {
1091         if( l.size() != r.size() )
1092             return false;
1093         return std::equal( l.begin(), l.end(), r.begin() );
1094     }
1095
1096     template< class T, class SBP, class GP, class A >
1097     inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
1098                             const auto_buffer<T,SBP,GP,A>& r )
1099     {
1100         return !(l == r);
1101     }
1102
1103     template< class T, class SBP, class GP, class A >
1104     inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
1105                            const auto_buffer<T,SBP,GP,A>& r )
1106     {
1107         return std::lexicographical_compare( l.begin(), l.end(),
1108                                              r.begin(), r.end() );
1109     }
1110
1111     template< class T, class SBP, class GP, class A >
1112     inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
1113                            const auto_buffer<T,SBP,GP,A>& r )
1114     {
1115         return (r < l);
1116     }
1117
1118     template< class T, class SBP, class GP, class A >
1119     inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
1120                             const auto_buffer<T,SBP,GP,A>& r )
1121     {
1122         return !(l > r);
1123     }
1124
1125     template< class T, class SBP, class GP, class A >
1126     inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
1127                             const auto_buffer<T,SBP,GP,A>& r )
1128     {
1129         return !(l < r);
1130     }
1131
1132 } // namespace detail
1133 } // namespace signals2
1134 }
1135
1136 #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
1137 #pragma warning(pop)
1138 #endif
1139
1140 #endif