Lines Matching refs:__base
1191 typedef __deque_base<value_type, allocator_type> __base;
1193 typedef typename __base::__alloc_traits __alloc_traits;
1194 typedef typename __base::reference reference;
1195 typedef typename __base::const_reference const_reference;
1196 typedef typename __base::iterator iterator;
1197 typedef typename __base::const_iterator const_iterator;
1198 typedef typename __base::size_type size_type;
1199 typedef typename __base::difference_type difference_type;
1201 typedef typename __base::pointer pointer;
1202 typedef typename __base::const_pointer const_pointer;
1211 _LIBCPP_INLINE_VISIBILITY explicit deque(const allocator_type& __a) : __base(__a) {}
1238 deque(deque&& __c) _NOEXCEPT_(is_nothrow_move_constructible<__base>::value);
1263 iterator begin() _NOEXCEPT {return __base::begin();}
1265 const_iterator begin() const _NOEXCEPT {return __base::begin();}
1267 iterator end() _NOEXCEPT {return __base::end();}
1269 const_iterator end() const _NOEXCEPT {return __base::end();}
1273 {return reverse_iterator(__base::end());}
1276 {return const_reverse_iterator(__base::end());}
1279 {return reverse_iterator(__base::begin());}
1282 {return const_reverse_iterator(__base::begin());}
1286 {return __base::begin();}
1289 {return __base::end();}
1292 {return const_reverse_iterator(__base::end());}
1295 {return const_reverse_iterator(__base::begin());}
1299 size_type size() const _NOEXCEPT {return __base::size();}
1302 {return __alloc_traits::max_size(__base::__alloc());}
1307 bool empty() const _NOEXCEPT {return __base::size() == 0;}
1357 bool __invariants() const {return __base::__invariants();}
1359 typedef typename __base::__map_const_pointer __map_const_pointer;
1364 return __n / __base::__block_size + (__n % __base::__block_size != 0);
1369 return __base::__map_.size() == 0 ? 0 : __base::__map_.size() * __base::__block_size - 1;
1374 return __base::__start_;
1379 return __capacity() - (__base::__start_ + __base::size());
1413 if (__base::__alloc() != __c.__alloc())
1418 __base::__alloc() = __c.__alloc();
1419 __base::__map_.__alloc() = __c.__map_.__alloc();
1441 : __base(__a)
1457 : __base(__a)
1475 : __base(__a)
1482 : __base(__alloc_traits::select_on_container_copy_construction(__c.__alloc()))
1489 : __base(__a)
1504 : __base(__a)
1528 _NOEXCEPT_(is_nothrow_move_constructible<__base>::value)
1529 : __base(_VSTD::move(__c))
1536 : __base(_VSTD::move(__c), __a)
1561 if (__base::__alloc() != __c.__alloc())
1577 __base::__move_assign(__c);
1589 iterator __i = __base::begin();
1590 iterator __e = __base::end();
1605 if (static_cast<size_type>(__l - __f) > __base::size())
1607 _RAIter __m = __f + __base::size();
1608 _VSTD::copy(__f, __m, __base::begin());
1612 __erase_to_end(_VSTD::copy(__f, __l, __base::begin()));
1619 if (__n > __base::size())
1621 _VSTD::fill_n(__base::begin(), __base::size(), __v);
1622 __n -= __base::size();
1626 __erase_to_end(_VSTD::fill_n(__base::begin(), __n, __v));
1634 return __base::__alloc();
1641 if (__n > __base::size())
1642 __append(__n - __base::size());
1643 else if (__n < __base::size())
1644 __erase_to_end(__base::begin() + __n);
1651 if (__n > __base::size())
1652 __append(__n - __base::size(), __v);
1653 else if (__n < __base::size())
1654 __erase_to_end(__base::begin() + __n);
1661 allocator_type& __a = __base::__alloc();
1664 while (__base::__map_.size() > 0)
1666 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
1667 __base::__map_.pop_back();
1669 __base::__start_ = 0;
1673 if (__front_spare() >= __base::__block_size)
1675 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
1676 __base::__map_.pop_front();
1677 __base::__start_ -= __base::__block_size;
1679 if (__back_spare() >= __base::__block_size)
1681 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
1682 __base::__map_.pop_back();
1685 __base::__map_.shrink_to_fit();
1693 size_type __p = __base::__start_ + __i;
1694 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1702 size_type __p = __base::__start_ + __i;
1703 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1711 if (__i >= __base::size())
1712 __base::__throw_out_of_range();
1713 size_type __p = __base::__start_ + __i;
1714 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1722 if (__i >= __base::size())
1723 __base::__throw_out_of_range();
1724 size_type __p = __base::__start_ + __i;
1725 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1733 return *(*(__base::__map_.begin() + __base::__start_ / __base::__block_size)
1734 + __base::__start_ % __base::__block_size);
1742 return *(*(__base::__map_.begin() + __base::__start_ / __base::__block_size)
1743 + __base::__start_ % __base::__block_size);
1751 size_type __p = __base::size() + __base::__start_ - 1;
1752 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1760 size_type __p = __base::size() + __base::__start_ - 1;
1761 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1768 allocator_type& __a = __base::__alloc();
1772 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), __v);
1773 ++__base::size();
1782 allocator_type& __a = __base::__alloc();
1786 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::move(__v));
1787 ++__base::size();
1797 allocator_type& __a = __base::__alloc();
1801 …__alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::forward<_Args>(__args)...);
1802 ++__base::size();
1812 allocator_type& __a = __base::__alloc();
1816 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), __v);
1817 --__base::__start_;
1818 ++__base::size();
1827 allocator_type& __a = __base::__alloc();
1831 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::move(__v));
1832 --__base::__start_;
1833 ++__base::size();
1843 allocator_type& __a = __base::__alloc();
1847 …__alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::forward<_Args>(__args)…
1848 --__base::__start_;
1849 ++__base::size();
1859 size_type __pos = __p - __base::begin();
1860 size_type __to_end = __base::size() - __pos;
1861 allocator_type& __a = __base::__alloc();
1869 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), __v);
1870 --__base::__start_;
1871 ++__base::size();
1876 iterator __b = __base::begin();
1881 --__base::__start_;
1882 ++__base::size();
1893 size_type __de = __base::size() - __pos;
1896 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), __v);
1897 ++__base::size();
1902 iterator __e = __base::end();
1907 ++__base::size();
1913 return __base::begin() + __pos;
1922 size_type __pos = __p - __base::begin();
1923 size_type __to_end = __base::size() - __pos;
1924 allocator_type& __a = __base::__alloc();
1932 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::move(__v));
1933 --__base::__start_;
1934 ++__base::size();
1938 iterator __b = __base::begin();
1941 --__base::__start_;
1942 ++__base::size();
1953 size_type __de = __base::size() - __pos;
1956 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::move(__v));
1957 ++__base::size();
1961 iterator __e = __base::end();
1964 ++__base::size();
1970 return __base::begin() + __pos;
1980 size_type __pos = __p - __base::begin();
1981 size_type __to_end = __base::size() - __pos;
1982 allocator_type& __a = __base::__alloc();
1990 …__alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::forward<_Args>(__args)…
1991 --__base::__start_;
1992 ++__base::size();
1997 iterator __b = __base::begin();
2000 --__base::__start_;
2001 ++__base::size();
2012 size_type __de = __base::size() - __pos;
2015 …__alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::forward<_Args>(__args)...);
2016 ++__base::size();
2021 iterator __e = __base::end();
2024 ++__base::size();
2030 return __base::begin() + __pos;
2040 size_type __pos = __p - __base::begin();
2041 size_type __to_end = __base::size() - __pos;
2042 allocator_type& __a = __base::__alloc();
2049 iterator __old_begin = __base::begin();
2053 for (size_type __m = __n - __pos; __m; --__m, --__base::__start_, ++__base::size())
2074 iterator __old_end = __base::end();
2076 size_type __de = __base::size() - __pos;
2079 for (size_type __m = __n - __de; __m; --__m, ++__i, ++__base::size())
2093 return __base::begin() + __pos;
2103 __split_buffer<value_type, allocator_type&> __buf(__base::__alloc());
2116 size_type __pos = __p - __base::begin();
2117 size_type __to_end = __base::size() - __pos;
2118 allocator_type& __a = __base::__alloc();
2125 iterator __old_begin = __base::begin();
2131 for (_BiIter __j = __m; __j != __f; --__base::__start_, ++__base::size())
2141 --__base::__start_;
2142 ++__base::size();
2156 iterator __old_end = __base::end();
2159 size_type __de = __base::size() - __pos;
2163 for (_BiIter __j = __m; __j != __l; ++__i, ++__j, ++__base::size())
2170 for (iterator __j = __oen; __j != __old_end; ++__i, ++__j, ++__base::size())
2177 return __base::begin() + __pos;
2198 allocator_type& __a = __base::__alloc();
2203 for (iterator __i = __base::end(); __f != __l; ++__i, ++__f, ++__base::size())
2211 allocator_type& __a = __base::__alloc();
2216 for (iterator __i = __base::end(); __n; --__n, ++__i, ++__base::size())
2224 allocator_type& __a = __base::__alloc();
2229 for (iterator __i = __base::end(); __n; --__n, ++__i, ++__base::size())
2239 allocator_type& __a = __base::__alloc();
2240 if (__back_spare() >= __base::__block_size)
2242 __base::__start_ += __base::__block_size;
2243 pointer __pt = __base::__map_.back();
2244 __base::__map_.pop_back();
2245 __base::__map_.push_front(__pt);
2247 // Else if __base::__map_.size() < __base::__map_.capacity() then we need to allocate 1 buffer
2248 else if (__base::__map_.size() < __base::__map_.capacity())
2252 if (__base::__map_.__front_spare() > 0)
2253 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2256 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2258 pointer __pt = __base::__map_.back();
2259 __base::__map_.pop_back();
2260 __base::__map_.push_front(__pt);
2262 __base::__start_ = __base::__map_.size() == 1 ?
2263 __base::__block_size / 2 :
2264 __base::__start_ + __base::__block_size;
2269 __split_buffer<pointer, typename __base::__pointer_allocator&>
2270 __buf(max<size_type>(2 * __base::__map_.capacity(), 1),
2271 0, __base::__map_.__alloc());
2276 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2281 __alloc_traits::deallocate(__a, __buf.front(), __base::__block_size);
2285 for (typename __base::__map_pointer __i = __base::__map_.begin();
2286 __i != __base::__map_.end(); ++__i)
2288 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2289 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2290 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2291 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2292 __base::__start_ = __base::__map_.size() == 1 ?
2293 __base::__block_size / 2 :
2294 __base::__start_ + __base::__block_size;
2304 allocator_type& __a = __base::__alloc();
2305 size_type __nb = __recommend_blocks(__n + __base::__map_.empty());
2307 size_type __back_capacity = __back_spare() / __base::__block_size;
2313 __base::__start_ += __base::__block_size * __back_capacity;
2316 pointer __pt = __base::__map_.back();
2317 __base::__map_.pop_back();
2318 __base::__map_.push_front(__pt);
2322 else if (__nb <= __base::__map_.capacity() - __base::__map_.size())
2326 … for (; __nb > 0; --__nb, __base::__start_ += __base::__block_size - (__base::__map_.size() == 1))
2328 if (__base::__map_.__front_spare() == 0)
2330 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2333 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2335 __base::__start_ += __back_capacity * __base::__block_size;
2338 pointer __pt = __base::__map_.back();
2339 __base::__map_.pop_back();
2340 __base::__map_.push_front(__pt);
2346 size_type __ds = (__nb + __back_capacity) * __base::__block_size - __base::__map_.empty();
2347 __split_buffer<pointer, typename __base::__pointer_allocator&>
2348 __buf(max<size_type>(2* __base::__map_.capacity(),
2349 __nb + __base::__map_.size()),
2350 0, __base::__map_.__alloc());
2356 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2361 for (typename __base::__map_pointer __i = __buf.begin();
2363 __alloc_traits::deallocate(__a, *__i, __base::__block_size);
2369 __buf.push_back(__base::__map_.back());
2370 __base::__map_.pop_back();
2372 for (typename __base::__map_pointer __i = __base::__map_.begin();
2373 __i != __base::__map_.end(); ++__i)
2375 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2376 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2377 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2378 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2379 __base::__start_ += __ds;
2389 allocator_type& __a = __base::__alloc();
2390 if (__front_spare() >= __base::__block_size)
2392 __base::__start_ -= __base::__block_size;
2393 pointer __pt = __base::__map_.front();
2394 __base::__map_.pop_front();
2395 __base::__map_.push_back(__pt);
2398 else if (__base::__map_.size() < __base::__map_.capacity())
2402 if (__base::__map_.__back_spare() != 0)
2403 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2406 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2408 pointer __pt = __base::__map_.front();
2409 __base::__map_.pop_front();
2410 __base::__map_.push_back(__pt);
2416 __split_buffer<pointer, typename __base::__pointer_allocator&>
2417 __buf(max<size_type>(2* __base::__map_.capacity(), 1),
2418 __base::__map_.size(),
2419 __base::__map_.__alloc());
2424 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2429 __alloc_traits::deallocate(__a, __buf.back(), __base::__block_size);
2433 for (typename __base::__map_pointer __i = __base::__map_.end();
2434 __i != __base::__map_.begin();)
2436 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2437 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2438 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2439 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2449 allocator_type& __a = __base::__alloc();
2450 size_type __nb = __recommend_blocks(__n + __base::__map_.empty());
2452 size_type __front_capacity = __front_spare() / __base::__block_size;
2458 __base::__start_ -= __base::__block_size * __front_capacity;
2461 pointer __pt = __base::__map_.front();
2462 __base::__map_.pop_front();
2463 __base::__map_.push_back(__pt);
2467 else if (__nb <= __base::__map_.capacity() - __base::__map_.size())
2473 if (__base::__map_.__back_spare() == 0)
2475 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2477 for (; __nb > 0; --__nb, ++__front_capacity, __base::__start_ +=
2478 __base::__block_size - (__base::__map_.size() == 1))
2479 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2481 __base::__start_ -= __base::__block_size * __front_capacity;
2484 pointer __pt = __base::__map_.front();
2485 __base::__map_.pop_front();
2486 __base::__map_.push_back(__pt);
2492 size_type __ds = __front_capacity * __base::__block_size;
2493 __split_buffer<pointer, typename __base::__pointer_allocator&>
2494 __buf(max<size_type>(2* __base::__map_.capacity(),
2495 __nb + __base::__map_.size()),
2496 __base::__map_.size() - __front_capacity,
2497 __base::__map_.__alloc());
2503 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2508 for (typename __base::__map_pointer __i = __buf.begin();
2510 __alloc_traits::deallocate(__a, *__i, __base::__block_size);
2516 __buf.push_back(__base::__map_.front());
2517 __base::__map_.pop_front();
2519 for (typename __base::__map_pointer __i = __base::__map_.end();
2520 __i != __base::__map_.begin();)
2522 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2523 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2524 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2525 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2526 __base::__start_ -= __ds;
2534 allocator_type& __a = __base::__alloc();
2535 __alloc_traits::destroy(__a, __to_raw_pointer(*(__base::__map_.begin() +
2536 __base::__start_ / __base::__block_size) +
2537 __base::__start_ % __base::__block_size));
2538 --__base::size();
2539 if (++__base::__start_ >= 2 * __base::__block_size)
2541 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2542 __base::__map_.pop_front();
2543 __base::__start_ -= __base::__block_size;
2551 allocator_type& __a = __base::__alloc();
2552 size_type __p = __base::size() + __base::__start_ - 1;
2553 __alloc_traits::destroy(__a, __to_raw_pointer(*(__base::__map_.begin() +
2554 __p / __base::__block_size) +
2555 __p % __base::__block_size));
2556 --__base::size();
2557 if (__back_spare() >= 2 * __base::__block_size)
2559 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2560 __base::__map_.pop_back();
2578 pointer __fe = *__f.__m_iter_ + __base::__block_size;
2632 allocator_type& __a = __base::__alloc();
2634 // for (; __f != __l; ++__r, ++__f, ++__base::size())
2640 pointer __fe = *__f.__m_iter_ + __base::__block_size;
2649 for (; __fb != __fe; ++__fb, ++__r, ++__base::size())
2663 allocator_type& __a = __base::__alloc();
2668 // --__base::__start_;
2669 // ++__base::size();
2688 --__base::__start_;
2689 ++__base::size();
2701 iterator __b = __base::begin();
2704 allocator_type& __a = __base::__alloc();
2705 if (__pos < (__base::size() - 1) / 2)
2709 --__base::size();
2710 ++__base::__start_;
2711 if (__front_spare() >= 2 * __base::__block_size)
2713 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2714 __base::__map_.pop_front();
2715 __base::__start_ -= __base::__block_size;
2720 iterator __i = _VSTD::move(_VSTD::next(__p), __base::end(), __p);
2722 --__base::size();
2723 if (__back_spare() >= 2 * __base::__block_size)
2725 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2726 __base::__map_.pop_back();
2729 return __base::begin() + __pos;
2737 iterator __b = __base::begin();
2742 allocator_type& __a = __base::__alloc();
2743 if (__pos < (__base::size() - __n) / 2)
2748 __base::size() -= __n;
2749 __base::__start_ += __n;
2750 while (__front_spare() >= 2 * __base::__block_size)
2752 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2753 __base::__map_.pop_front();
2754 __base::__start_ -= __base::__block_size;
2759 iterator __i = _VSTD::move(__p + __n, __base::end(), __p);
2760 for (iterator __e = __base::end(); __i != __e; ++__i)
2762 __base::size() -= __n;
2763 while (__back_spare() >= 2 * __base::__block_size)
2765 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2766 __base::__map_.pop_back();
2770 return __base::begin() + __pos;
2777 iterator __e = __base::end();
2781 allocator_type& __a = __base::__alloc();
2782 iterator __b = __base::begin();
2786 __base::size() -= __n;
2787 while (__back_spare() >= 2 * __base::__block_size)
2789 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2790 __base::__map_.pop_back();
2802 __base::swap(__c);
2810 __base::clear();