• Home
  • Raw
  • Download

Lines Matching refs:__base

1179     typedef __deque_base<value_type, allocator_type> __base;
1181 typedef typename __base::__alloc_traits __alloc_traits;
1182 typedef typename __base::reference reference;
1183 typedef typename __base::const_reference const_reference;
1184 typedef typename __base::iterator iterator;
1185 typedef typename __base::const_iterator const_iterator;
1186 typedef typename __base::size_type size_type;
1187 typedef typename __base::difference_type difference_type;
1189 typedef typename __base::pointer pointer;
1190 typedef typename __base::const_pointer const_pointer;
1199 _LIBCPP_INLINE_VISIBILITY deque(const allocator_type& __a) : __base(__a) {}
1223 deque(deque&& __c) _NOEXCEPT_(is_nothrow_move_constructible<__base>::value);
1248 iterator begin() _NOEXCEPT {return __base::begin();}
1250 const_iterator begin() const _NOEXCEPT {return __base::begin();}
1252 iterator end() _NOEXCEPT {return __base::end();}
1254 const_iterator end() const _NOEXCEPT {return __base::end();}
1258 {return reverse_iterator(__base::end());}
1261 {return const_reverse_iterator(__base::end());}
1264 {return reverse_iterator(__base::begin());}
1267 {return const_reverse_iterator(__base::begin());}
1271 {return __base::begin();}
1274 {return __base::end();}
1277 {return const_reverse_iterator(__base::end());}
1280 {return const_reverse_iterator(__base::begin());}
1284 size_type size() const _NOEXCEPT {return __base::size();}
1287 {return __alloc_traits::max_size(__base::__alloc());}
1292 bool empty() const _NOEXCEPT {return __base::size() == 0;}
1342 bool __invariants() const {return __base::__invariants();}
1347 return __n / __base::__block_size + (__n % __base::__block_size != 0);
1352 return __base::__map_.size() == 0 ? 0 : __base::__map_.size() * __base::__block_size - 1;
1357 return __base::__start_;
1362 return __capacity() - (__base::__start_ + __base::size());
1396 if (__base::__alloc() != __c.__alloc())
1401 __base::__alloc() = __c.__alloc();
1402 __base::__map_.__alloc() = __c.__map_.__alloc();
1430 : __base(__a)
1448 : __base(__a)
1455 : __base(__alloc_traits::select_on_container_copy_construction(__c.__alloc()))
1462 : __base(__a)
1477 : __base(__a)
1501 _NOEXCEPT_(is_nothrow_move_constructible<__base>::value)
1502 : __base(_VSTD::move(__c))
1509 : __base(_VSTD::move(__c), __a)
1534 if (__base::__alloc() != __c.__alloc())
1550 __base::__move_assign(__c);
1562 iterator __i = __base::begin();
1563 iterator __e = __base::end();
1578 if (static_cast<size_type>(__l - __f) > __base::size())
1580 _RAIter __m = __f + __base::size();
1581 _VSTD::copy(__f, __m, __base::begin());
1585 __erase_to_end(_VSTD::copy(__f, __l, __base::begin()));
1592 if (__n > __base::size())
1594 _VSTD::fill_n(__base::begin(), __base::size(), __v);
1595 __n -= __base::size();
1599 __erase_to_end(_VSTD::fill_n(__base::begin(), __n, __v));
1607 return __base::__alloc();
1614 if (__n > __base::size())
1615 __append(__n - __base::size());
1616 else if (__n < __base::size())
1617 __erase_to_end(__base::begin() + __n);
1624 if (__n > __base::size())
1625 __append(__n - __base::size(), __v);
1626 else if (__n < __base::size())
1627 __erase_to_end(__base::begin() + __n);
1634 allocator_type& __a = __base::__alloc();
1637 while (__base::__map_.size() > 0)
1639 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
1640 __base::__map_.pop_back();
1642 __base::__start_ = 0;
1646 if (__front_spare() >= __base::__block_size)
1648 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
1649 __base::__map_.pop_front();
1650 __base::__start_ -= __base::__block_size;
1652 if (__back_spare() >= __base::__block_size)
1654 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
1655 __base::__map_.pop_back();
1658 __base::__map_.shrink_to_fit();
1666 size_type __p = __base::__start_ + __i;
1667 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1675 size_type __p = __base::__start_ + __i;
1676 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1684 if (__i >= __base::size())
1685 __base::__throw_out_of_range();
1686 size_type __p = __base::__start_ + __i;
1687 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1695 if (__i >= __base::size())
1696 __base::__throw_out_of_range();
1697 size_type __p = __base::__start_ + __i;
1698 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1706 return *(*(__base::__map_.begin() + __base::__start_ / __base::__block_size)
1707 + __base::__start_ % __base::__block_size);
1715 return *(*(__base::__map_.begin() + __base::__start_ / __base::__block_size)
1716 + __base::__start_ % __base::__block_size);
1724 size_type __p = __base::size() + __base::__start_ - 1;
1725 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1733 size_type __p = __base::size() + __base::__start_ - 1;
1734 return *(*(__base::__map_.begin() + __p / __base::__block_size) + __p % __base::__block_size);
1741 allocator_type& __a = __base::__alloc();
1745 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), __v);
1746 ++__base::size();
1755 allocator_type& __a = __base::__alloc();
1759 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::move(__v));
1760 ++__base::size();
1770 allocator_type& __a = __base::__alloc();
1774 …__alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::forward<_Args>(__args)...);
1775 ++__base::size();
1785 allocator_type& __a = __base::__alloc();
1789 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), __v);
1790 --__base::__start_;
1791 ++__base::size();
1800 allocator_type& __a = __base::__alloc();
1804 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::move(__v));
1805 --__base::__start_;
1806 ++__base::size();
1816 allocator_type& __a = __base::__alloc();
1820 …__alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::forward<_Args>(__args)…
1821 --__base::__start_;
1822 ++__base::size();
1832 size_type __pos = __p - __base::begin();
1833 size_type __to_end = __base::size() - __pos;
1834 allocator_type& __a = __base::__alloc();
1842 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), __v);
1843 --__base::__start_;
1844 ++__base::size();
1849 iterator __b = __base::begin();
1854 --__base::__start_;
1855 ++__base::size();
1866 size_type __de = __base::size() - __pos;
1869 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), __v);
1870 ++__base::size();
1875 iterator __e = __base::end();
1880 ++__base::size();
1886 return __base::begin() + __pos;
1895 size_type __pos = __p - __base::begin();
1896 size_type __to_end = __base::size() - __pos;
1897 allocator_type& __a = __base::__alloc();
1905 __alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::move(__v));
1906 --__base::__start_;
1907 ++__base::size();
1911 iterator __b = __base::begin();
1914 --__base::__start_;
1915 ++__base::size();
1926 size_type __de = __base::size() - __pos;
1929 __alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::move(__v));
1930 ++__base::size();
1934 iterator __e = __base::end();
1937 ++__base::size();
1943 return __base::begin() + __pos;
1953 size_type __pos = __p - __base::begin();
1954 size_type __to_end = __base::size() - __pos;
1955 allocator_type& __a = __base::__alloc();
1963 …__alloc_traits::construct(__a, _VSTD::addressof(*--__base::begin()), _VSTD::forward<_Args>(__args)…
1964 --__base::__start_;
1965 ++__base::size();
1970 iterator __b = __base::begin();
1973 --__base::__start_;
1974 ++__base::size();
1985 size_type __de = __base::size() - __pos;
1988 …__alloc_traits::construct(__a, _VSTD::addressof(*__base::end()), _VSTD::forward<_Args>(__args)...);
1989 ++__base::size();
1994 iterator __e = __base::end();
1997 ++__base::size();
2003 return __base::begin() + __pos;
2013 size_type __pos = __p - __base::begin();
2014 size_type __to_end = __base::size() - __pos;
2015 allocator_type& __a = __base::__alloc();
2022 iterator __old_begin = __base::begin();
2026 for (size_type __m = __n - __pos; __m; --__m, --__base::__start_, ++__base::size())
2047 iterator __old_end = __base::end();
2049 size_type __de = __base::size() - __pos;
2052 for (size_type __m = __n - __de; __m; --__m, ++__i, ++__base::size())
2066 return __base::begin() + __pos;
2076 __split_buffer<value_type, allocator_type&> __buf(__base::__alloc());
2089 size_type __pos = __p - __base::begin();
2090 size_type __to_end = __base::size() - __pos;
2091 allocator_type& __a = __base::__alloc();
2098 iterator __old_begin = __base::begin();
2104 for (_BiIter __j = __m; __j != __f; --__base::__start_, ++__base::size())
2114 --__base::__start_;
2115 ++__base::size();
2129 iterator __old_end = __base::end();
2132 size_type __de = __base::size() - __pos;
2136 for (_BiIter __j = __m; __j != __l; ++__i, ++__j, ++__base::size())
2143 for (iterator __j = __oen; __j != __old_end; ++__i, ++__j, ++__base::size())
2150 return __base::begin() + __pos;
2171 allocator_type& __a = __base::__alloc();
2176 for (iterator __i = __base::end(); __f != __l; ++__i, ++__f, ++__base::size())
2184 allocator_type& __a = __base::__alloc();
2189 for (iterator __i = __base::end(); __n; --__n, ++__i, ++__base::size())
2197 allocator_type& __a = __base::__alloc();
2202 for (iterator __i = __base::end(); __n; --__n, ++__i, ++__base::size())
2212 allocator_type& __a = __base::__alloc();
2213 if (__back_spare() >= __base::__block_size)
2215 __base::__start_ += __base::__block_size;
2216 pointer __pt = __base::__map_.back();
2217 __base::__map_.pop_back();
2218 __base::__map_.push_front(__pt);
2220 // Else if __base::__map_.size() < __base::__map_.capacity() then we need to allocate 1 buffer
2221 else if (__base::__map_.size() < __base::__map_.capacity())
2225 if (__base::__map_.__front_spare() > 0)
2226 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2229 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2231 pointer __pt = __base::__map_.back();
2232 __base::__map_.pop_back();
2233 __base::__map_.push_front(__pt);
2235 __base::__start_ = __base::__map_.size() == 1 ?
2236 __base::__block_size / 2 :
2237 __base::__start_ + __base::__block_size;
2242 __split_buffer<pointer, typename __base::__pointer_allocator&>
2243 __buf(max<size_type>(2 * __base::__map_.capacity(), 1),
2244 0, __base::__map_.__alloc());
2249 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2254 __alloc_traits::deallocate(__a, __buf.front(), __base::__block_size);
2258 for (typename __base::__map_pointer __i = __base::__map_.begin();
2259 __i != __base::__map_.end(); ++__i)
2261 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2262 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2263 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2264 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2265 __base::__start_ = __base::__map_.size() == 1 ?
2266 __base::__block_size / 2 :
2267 __base::__start_ + __base::__block_size;
2277 allocator_type& __a = __base::__alloc();
2278 size_type __nb = __recommend_blocks(__n + __base::__map_.empty());
2280 size_type __back_capacity = __back_spare() / __base::__block_size;
2286 __base::__start_ += __base::__block_size * __back_capacity;
2289 pointer __pt = __base::__map_.back();
2290 __base::__map_.pop_back();
2291 __base::__map_.push_front(__pt);
2295 else if (__nb <= __base::__map_.capacity() - __base::__map_.size())
2299 … for (; __nb > 0; --__nb, __base::__start_ += __base::__block_size - (__base::__map_.size() == 1))
2301 if (__base::__map_.__front_spare() == 0)
2303 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2306 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2308 __base::__start_ += __back_capacity * __base::__block_size;
2311 pointer __pt = __base::__map_.back();
2312 __base::__map_.pop_back();
2313 __base::__map_.push_front(__pt);
2319 size_type __ds = (__nb + __back_capacity) * __base::__block_size - __base::__map_.empty();
2320 __split_buffer<pointer, typename __base::__pointer_allocator&>
2321 __buf(max<size_type>(2* __base::__map_.capacity(),
2322 __nb + __base::__map_.size()),
2323 0, __base::__map_.__alloc());
2329 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2334 for (typename __base::__map_pointer __i = __buf.begin();
2336 __alloc_traits::deallocate(__a, *__i, __base::__block_size);
2342 __buf.push_back(__base::__map_.back());
2343 __base::__map_.pop_back();
2345 for (typename __base::__map_pointer __i = __base::__map_.begin();
2346 __i != __base::__map_.end(); ++__i)
2348 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2349 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2350 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2351 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2352 __base::__start_ += __ds;
2362 allocator_type& __a = __base::__alloc();
2363 if (__front_spare() >= __base::__block_size)
2365 __base::__start_ -= __base::__block_size;
2366 pointer __pt = __base::__map_.front();
2367 __base::__map_.pop_front();
2368 __base::__map_.push_back(__pt);
2371 else if (__base::__map_.size() < __base::__map_.capacity())
2375 if (__base::__map_.__back_spare() != 0)
2376 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2379 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2381 pointer __pt = __base::__map_.front();
2382 __base::__map_.pop_front();
2383 __base::__map_.push_back(__pt);
2389 __split_buffer<pointer, typename __base::__pointer_allocator&>
2390 __buf(max<size_type>(2* __base::__map_.capacity(), 1),
2391 __base::__map_.size(),
2392 __base::__map_.__alloc());
2397 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2402 __alloc_traits::deallocate(__a, __buf.back(), __base::__block_size);
2406 for (typename __base::__map_pointer __i = __base::__map_.end();
2407 __i != __base::__map_.begin();)
2409 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2410 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2411 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2412 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2422 allocator_type& __a = __base::__alloc();
2423 size_type __nb = __recommend_blocks(__n + __base::__map_.empty());
2425 size_type __front_capacity = __front_spare() / __base::__block_size;
2431 __base::__start_ -= __base::__block_size * __front_capacity;
2434 pointer __pt = __base::__map_.front();
2435 __base::__map_.pop_front();
2436 __base::__map_.push_back(__pt);
2440 else if (__nb <= __base::__map_.capacity() - __base::__map_.size())
2446 if (__base::__map_.__back_spare() == 0)
2448 __base::__map_.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2450 for (; __nb > 0; --__nb, ++__front_capacity, __base::__start_ +=
2451 __base::__block_size - (__base::__map_.size() == 1))
2452 __base::__map_.push_front(__alloc_traits::allocate(__a, __base::__block_size));
2454 __base::__start_ -= __base::__block_size * __front_capacity;
2457 pointer __pt = __base::__map_.front();
2458 __base::__map_.pop_front();
2459 __base::__map_.push_back(__pt);
2465 size_type __ds = __front_capacity * __base::__block_size;
2466 __split_buffer<pointer, typename __base::__pointer_allocator&>
2467 __buf(max<size_type>(2* __base::__map_.capacity(),
2468 __nb + __base::__map_.size()),
2469 __base::__map_.size() - __front_capacity,
2470 __base::__map_.__alloc());
2476 __buf.push_back(__alloc_traits::allocate(__a, __base::__block_size));
2481 for (typename __base::__map_pointer __i = __buf.begin();
2483 __alloc_traits::deallocate(__a, *__i, __base::__block_size);
2489 __buf.push_back(__base::__map_.front());
2490 __base::__map_.pop_front();
2492 for (typename __base::__map_pointer __i = __base::__map_.end();
2493 __i != __base::__map_.begin();)
2495 _VSTD::swap(__base::__map_.__first_, __buf.__first_);
2496 _VSTD::swap(__base::__map_.__begin_, __buf.__begin_);
2497 _VSTD::swap(__base::__map_.__end_, __buf.__end_);
2498 _VSTD::swap(__base::__map_.__end_cap(), __buf.__end_cap());
2499 __base::__start_ -= __ds;
2507 allocator_type& __a = __base::__alloc();
2508 __alloc_traits::destroy(__a, *(__base::__map_.begin() +
2509 __base::__start_ / __base::__block_size) +
2510 __base::__start_ % __base::__block_size);
2511 --__base::size();
2512 if (++__base::__start_ >= 2 * __base::__block_size)
2514 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2515 __base::__map_.pop_front();
2516 __base::__start_ -= __base::__block_size;
2524 allocator_type& __a = __base::__alloc();
2525 size_type __p = __base::size() + __base::__start_ - 1;
2526 __alloc_traits::destroy(__a, *(__base::__map_.begin() +
2527 __p / __base::__block_size) +
2528 __p % __base::__block_size);
2529 --__base::size();
2530 if (__back_spare() >= 2 * __base::__block_size)
2532 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2533 __base::__map_.pop_back();
2551 pointer __fe = *__f.__m_iter_ + __base::__block_size;
2605 allocator_type& __a = __base::__alloc();
2607 // for (; __f != __l; ++__r, ++__f, ++__base::size())
2613 pointer __fe = *__f.__m_iter_ + __base::__block_size;
2622 for (; __fb != __fe; ++__fb, ++__r, ++__base::size())
2636 allocator_type& __a = __base::__alloc();
2641 // --__base::__start_;
2642 // ++__base::size();
2661 --__base::__start_;
2662 ++__base::size();
2674 iterator __b = __base::begin();
2677 allocator_type& __a = __base::__alloc();
2678 if (__pos < (__base::size() - 1) / 2)
2682 --__base::size();
2683 ++__base::__start_;
2684 if (__front_spare() >= 2 * __base::__block_size)
2686 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2687 __base::__map_.pop_front();
2688 __base::__start_ -= __base::__block_size;
2693 iterator __i = _VSTD::move(_VSTD::next(__p), __base::end(), __p);
2695 --__base::size();
2696 if (__back_spare() >= 2 * __base::__block_size)
2698 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2699 __base::__map_.pop_back();
2702 return __base::begin() + __pos;
2710 iterator __b = __base::begin();
2715 allocator_type& __a = __base::__alloc();
2716 if (__pos < (__base::size() - __n) / 2)
2721 __base::size() -= __n;
2722 __base::__start_ += __n;
2723 while (__front_spare() >= 2 * __base::__block_size)
2725 __alloc_traits::deallocate(__a, __base::__map_.front(), __base::__block_size);
2726 __base::__map_.pop_front();
2727 __base::__start_ -= __base::__block_size;
2732 iterator __i = _VSTD::move(__p + __n, __base::end(), __p);
2733 for (iterator __e = __base::end(); __i != __e; ++__i)
2735 __base::size() -= __n;
2736 while (__back_spare() >= 2 * __base::__block_size)
2738 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2739 __base::__map_.pop_back();
2743 return __base::begin() + __pos;
2750 iterator __e = __base::end();
2754 allocator_type& __a = __base::__alloc();
2755 iterator __b = __base::begin();
2759 __base::size() -= __n;
2760 while (__back_spare() >= 2 * __base::__block_size)
2762 __alloc_traits::deallocate(__a, __base::__map_.back(), __base::__block_size);
2763 __base::__map_.pop_back();
2775 __base::swap(__c);
2783 __base::clear();