• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
6 #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
7 
8 #include <algorithm>
9 #include <cstddef>
10 #include <iterator>
11 #include <type_traits>
12 #include <utility>
13 
14 #include "base/check.h"
15 #include "base/containers/span.h"
16 #include "base/containers/vector_buffer.h"
17 #include "base/dcheck_is_on.h"
18 #include "base/memory/raw_ptr_exclusion.h"
19 #include "base/numerics/checked_math.h"
20 #include "base/numerics/safe_conversions.h"
21 #include "base/ranges/algorithm.h"
22 #include "base/ranges/from_range.h"
23 
24 #if DCHECK_IS_ON()
25 #include <ostream>
26 #endif
27 
28 // base::circular_deque is similar to std::deque. Unlike std::deque, the
29 // storage is provided in a flat circular buffer conceptually similar to a
30 // vector. The beginning and end will wrap around as necessary so that
31 // pushes and pops will be constant time as long as a capacity expansion is
32 // not required.
33 //
34 // The API should be identical to std::deque with the following differences:
35 //
36 //  - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
37 //    iterators.
38 //
39 //  - Insertions may resize the vector and so are not constant time (std::deque
40 //    guarantees constant time for insertions at the ends).
41 //
42 //  - Container-wide comparisons are not implemented. If you want to compare
43 //    two containers, use an algorithm so the expensive iteration is explicit.
44 //
45 // If you want a similar container with only a queue API, use base::queue in
46 // base/containers/queue.h.
47 //
48 // Constructors:
49 //   circular_deque();
50 //   circular_deque(size_t count);
51 //   circular_deque(size_t count, const T& value);
52 //   circular_deque(InputIterator first, InputIterator last);
53 //   circular_deque(base::from_range_t, Range range);
54 //   circular_deque(const circular_deque&);
55 //   circular_deque(circular_deque&&);
56 //   circular_deque(std::initializer_list<value_type>);
57 //
58 // Assignment functions:
59 //   circular_deque& operator=(const circular_deque&);
60 //   circular_deque& operator=(circular_deque&&);
61 //   circular_deque& operator=(std::initializer_list<T>);
62 //   void assign(size_t count, const T& value);
63 //   void assign(InputIterator first, InputIterator last);
64 //   void assign(std::initializer_list<T> value);
65 //   void assign_range(Range range);
66 //
67 // Random accessors:
68 //   T& at(size_t);
69 //   const T& at(size_t) const;
70 //   T& operator[](size_t);
71 //   const T& operator[](size_t) const;
72 //
73 // End accessors:
74 //   T& front();
75 //   const T& front() const;
76 //   T& back();
77 //   const T& back() const;
78 //
79 // Iterator functions:
80 //   iterator               begin();
81 //   const_iterator         begin() const;
82 //   const_iterator         cbegin() const;
83 //   iterator               end();
84 //   const_iterator         end() const;
85 //   const_iterator         cend() const;
86 //   reverse_iterator       rbegin();
87 //   const_reverse_iterator rbegin() const;
88 //   const_reverse_iterator crbegin() const;
89 //   reverse_iterator       rend();
90 //   const_reverse_iterator rend() const;
91 //   const_reverse_iterator crend() const;
92 //
93 // Memory management:
94 //   void reserve(size_t);  // SEE IMPLEMENTATION FOR SOME GOTCHAS.
95 //   size_t capacity() const;
96 //   void shrink_to_fit();
97 //
98 // Size management:
99 //   void clear();
100 //   bool empty() const;
101 //   size_t size() const;
102 //   void resize(size_t);
103 //   void resize(size_t count, const T& value);
104 //
105 // Positional insert and erase:
106 //   void insert(const_iterator pos, size_type count, const T& value);
107 //   void insert(const_iterator pos,
108 //               InputIterator first, InputIterator last);
109 //   iterator insert(const_iterator pos, const T& value);
110 //   iterator insert(const_iterator pos, T&& value);
111 //   iterator emplace(const_iterator pos, Args&&... args);
112 //   iterator erase(const_iterator pos);
113 //   iterator erase(const_iterator first, const_iterator last);
114 //
115 // End insert and erase:
116 //   void push_front(const T&);
117 //   void push_front(T&&);
118 //   void push_back(const T&);
119 //   void push_back(T&&);
120 //   T& emplace_front(Args&&...);
121 //   T& emplace_back(Args&&...);
122 //   void pop_front();
123 //   void pop_back();
124 //
125 // General:
126 //   void swap(circular_deque&);
127 
128 namespace base {
129 
130 template <class T>
131 class circular_deque;
132 
133 namespace internal {
134 
135 // Start allocating nonempty buffers with this many entries. This is the
136 // external capacity so the internal buffer will be one larger (= 4) which is
137 // more even for the allocator. See the descriptions of internal vs. external
138 // capacity on the comment above the buffer_ variable below.
139 constexpr size_t kCircularBufferInitialCapacity = 3;
140 
141 template <typename T>
142 class circular_deque_const_iterator {
143  public:
144   using difference_type = ptrdiff_t;
145   using value_type = T;
146   using pointer = const T*;
147   using reference = const T&;
148   using iterator_category = std::random_access_iterator_tag;
149 
150   circular_deque_const_iterator() = default;
151 
152   // Dereferencing.
153   const T& operator*() const {
154     CHECK_NE(index_, end_);
155     CheckUnstableUsage();
156     CheckValidIndex(index_);
157     // SAFETY: Increment() and Decrement() and Add() operations ensure that
158     // `index_` stays inside [begin_, end_] (while supporting wrap around for
159     // the structure. This maintains that `index_` always points at a
160     // valid position for the `buffer_`. We also CHECK above that `index_` is
161     // not `end_` making it a valid pointer to dereference.
162     return UNSAFE_BUFFERS(buffer_[index_]);
163   }
164   const T* operator->() const {
165     CHECK_NE(index_, end_);
166     CheckUnstableUsage();
167     CheckValidIndex(index_);
168     // SAFETY: Increment() and Decrement() and Add() operations ensure that
169     // `index_` stays inside [begin_, end_] while supporting wrap around for
170     // the structure. This maintains that `index_` always points at a
171     // valid position for the `buffer_`. We also CHECK above that `index_` is
172     // not `end_` making it a valid pointer to dereference.
173     return &UNSAFE_BUFFERS(buffer_[index_]);
174   }
175   const value_type& operator[](difference_type i) const { return *(*this + i); }
176 
177   // Increment and decrement.
178   circular_deque_const_iterator& operator++() {
179     Increment();
180     return *this;
181   }
182   circular_deque_const_iterator operator++(int) {
183     circular_deque_const_iterator ret = *this;
184     Increment();
185     return ret;
186   }
187   circular_deque_const_iterator& operator--() {
188     Decrement();
189     return *this;
190   }
191   circular_deque_const_iterator operator--(int) {
192     circular_deque_const_iterator ret = *this;
193     Decrement();
194     return ret;
195   }
196 
197   // Random access mutation.
198   friend circular_deque_const_iterator operator+(
199       const circular_deque_const_iterator& iter,
200       difference_type offset) {
201     circular_deque_const_iterator ret = iter;
202     ret.Add(offset);
203     return ret;
204   }
205   circular_deque_const_iterator& operator+=(difference_type offset) {
206     Add(offset);
207     return *this;
208   }
209   friend circular_deque_const_iterator operator-(
210       const circular_deque_const_iterator& iter,
211       difference_type offset) {
212     circular_deque_const_iterator ret = iter;
213     ret.Add(-offset);
214     return ret;
215   }
216   circular_deque_const_iterator& operator-=(difference_type offset) {
217     Add(-offset);
218     return *this;
219   }
220 
221   friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
222                                   const circular_deque_const_iterator& rhs) {
223     lhs.CheckComparable(rhs);
224     return static_cast<std::ptrdiff_t>(lhs.OffsetFromBegin() -
225                                        rhs.OffsetFromBegin());
226   }
227 
228   // Comparisons.
229   friend bool operator==(const circular_deque_const_iterator& lhs,
230                          const circular_deque_const_iterator& rhs) {
231     lhs.CheckComparable(rhs);
232     return lhs.index_ == rhs.index_;
233   }
234   friend std::strong_ordering operator<=>(
235       const circular_deque_const_iterator& lhs,
236       const circular_deque_const_iterator& rhs) {
237     lhs.CheckComparable(rhs);
238     // The order is based on the position of the element in the circular_dequeue
239     // rather than `index_` at which the element is stored in the ring buffer.
240     return lhs.OffsetFromBegin() <=> rhs.OffsetFromBegin();
241   }
242 
243  protected:
244   friend class circular_deque<T>;
245 
circular_deque_const_iterator(const circular_deque<T> * parent,size_t index)246   circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
247       : buffer_(parent->buffer_.data()),
248         cap_(parent->buffer_.capacity()),
249         begin_(parent->begin_),
250         end_(parent->end_),
251         index_(index) {
252     if (begin_ <= end_) {
253       CHECK_GE(index_, begin_);
254       CHECK_LE(index_, end_);
255     } else if (index_ >= begin_) {
256       CHECK(index_ < cap_);
257     } else {
258       CHECK(index_ <= end_);
259     }
260 #if DCHECK_IS_ON()
261     parent_deque_ = parent;
262     created_generation_ = parent->generation_;
263 #endif  // DCHECK_IS_ON()
264   }
265 
266   // Returns the offset from the beginning index of the buffer to the current
267   // item.
OffsetFromBegin()268   size_t OffsetFromBegin() const {
269     if (index_ >= begin_) {
270       return index_ - begin_;  // On the same side as begin.
271     }
272     return cap_ - begin_ + index_;
273   }
274 
275   // The size of the deque, ie. the number of elements in it.
Size()276   size_t Size() const {
277     if (begin_ <= end_) {
278       return end_ - begin_;
279     }
280     return cap_ - begin_ + end_;
281   }
282 
283   // Most uses will be ++ and -- so use a simplified implementation.
Increment()284   void Increment() {
285     CheckUnstableUsage();
286     CheckValidIndex(index_);
287     CHECK_NE(index_, end_);
288     index_++;
289     if (index_ == cap_) {
290       index_ = 0u;
291     }
292   }
Decrement()293   void Decrement() {
294     CheckUnstableUsage();
295     CheckValidIndexOrEnd(index_);
296     CHECK_NE(index_, begin_);
297     if (index_ == 0u) {
298       index_ = cap_ - 1u;
299     } else {
300       index_--;
301     }
302   }
Add(difference_type delta)303   void Add(difference_type delta) {
304     CheckUnstableUsage();
305 #if DCHECK_IS_ON()
306     if (delta <= 0) {
307       CheckValidIndexOrEnd(index_);
308     } else {
309       CheckValidIndex(index_);
310     }
311 #endif
312     // It should be valid to add 0 to any iterator, even if the container is
313     // empty and the iterator points to end(). The modulo below will divide
314     // by 0 if the buffer capacity is empty, so it's important to check for
315     // this case explicitly.
316     if (delta == 0) {
317       return;
318     }
319 
320     const auto offset_from_begin =
321         // The max allocation size is PTRDIFF_MAX, so this value can't be larger
322         // than fits in ptrdiff_t.
323         static_cast<difference_type>(OffsetFromBegin());
324     const auto deque_size =
325         // The max allocation size is PTRDIFF_MAX, so this value can't be larger
326         // than fits in ptrdiff_t.
327         static_cast<difference_type>(Size());
328     if (delta >= 0) {
329       // Check `offset_from_begin + delta <= deque_size` without overflowing.
330       CHECK_LE(delta, deque_size - offset_from_begin);
331     } else {
332       // Check `offset_from_begin + delta >= 0` without overflowing. We avoid
333       // negating a negative `delta` which can overflow. Instead negate the
334       // positive number which can not.
335       CHECK_GE(delta, -offset_from_begin) << offset_from_begin;
336     }
337     const auto new_offset =
338         // The above checks verify that `offset_from_begin + delta` is in the
339         // range [0, deque_size] and does not overflow, so it also fits in
340         // `size_t`.
341         static_cast<size_t>(offset_from_begin + delta);
342     index_ = (new_offset + begin_) % cap_;
343   }
344 
345 #if DCHECK_IS_ON()
CheckValidIndexOrEnd(size_t index)346   void CheckValidIndexOrEnd(size_t index) const {
347     parent_deque_->CheckValidIndexOrEnd(index_);
348   }
CheckValidIndex(size_t index)349   void CheckValidIndex(size_t index) const {
350     parent_deque_->CheckValidIndex(index_);
351   }
CheckUnstableUsage()352   void CheckUnstableUsage() const {
353     DCHECK(parent_deque_);
354     // Since circular_deque doesn't guarantee stability, any attempt to
355     // dereference this iterator after a mutation (i.e. the generation doesn't
356     // match the original) in the container is illegal.
357     DCHECK_EQ(created_generation_, parent_deque_->generation_)
358         << "circular_deque iterator dereferenced after mutation.";
359   }
CheckComparable(const circular_deque_const_iterator & other)360   void CheckComparable(const circular_deque_const_iterator& other) const {
361     DCHECK_EQ(parent_deque_, other.parent_deque_);
362     // Since circular_deque doesn't guarantee stability, two iterators that
363     // are compared must have been generated without mutating the container.
364     // If this fires, the container was mutated between generating the two
365     // iterators being compared.
366     DCHECK_EQ(created_generation_, other.created_generation_);
367   }
368 #else
CheckUnstableUsage()369   inline void CheckUnstableUsage() const {}
CheckComparable(const circular_deque_const_iterator &)370   inline void CheckComparable(const circular_deque_const_iterator&) const {}
CheckValidIndexOrEnd(size_t index)371   void CheckValidIndexOrEnd(size_t index) const {}
CheckValidIndex(size_t index)372   void CheckValidIndex(size_t index) const {}
373 #endif  // DCHECK_IS_ON()
374 
375   // `buffer_` is not a raw_ptr<...> for performance reasons: Usually
376   // on-stack pointer, pointing back to the collection being iterated, owned by
377   // object that iterates over it.  Additionally this is supported by the
378   // analysis of sampling profiler data and tab_search:top100:2020.
379   RAW_PTR_EXCLUSION const T* buffer_ = nullptr;
380 
381   size_t cap_ = 0u;
382   size_t begin_ = 0u;
383   size_t end_ = 0u;
384   size_t index_ = 0u;
385 
386 #if DCHECK_IS_ON()
387   RAW_PTR_EXCLUSION const circular_deque<T>* parent_deque_ = nullptr;
388   // The generation of the parent deque when this iterator was created. The
389   // container will update the generation for every modification so we can
390   // test if the container was modified by comparing them.
391   uint64_t created_generation_ = 0u;
392 #endif  // DCHECK_IS_ON()
393 };
394 
395 template <typename T>
396 class circular_deque_iterator : public circular_deque_const_iterator<T> {
397   using base = circular_deque_const_iterator<T>;
398 
399  public:
400   friend class circular_deque<T>;
401 
402   using difference_type = std::ptrdiff_t;
403   using value_type = T;
404   using pointer = T*;
405   using reference = T&;
406   using iterator_category = std::random_access_iterator_tag;
407 
408   // Expose the base class' constructor.
circular_deque_iterator()409   circular_deque_iterator() : circular_deque_const_iterator<T>() {}
410 
411   // Dereferencing.
412   T& operator*() const { return const_cast<T&>(base::operator*()); }
413   T* operator->() const { return const_cast<T*>(base::operator->()); }
414   T& operator[](difference_type i) {
415     return const_cast<T&>(base::operator[](i));
416   }
417 
418   // Random access mutation.
419   friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
420                                            difference_type offset) {
421     circular_deque_iterator ret = iter;
422     ret.Add(offset);
423     return ret;
424   }
425   circular_deque_iterator& operator+=(difference_type offset) {
426     base::Add(offset);
427     return *this;
428   }
429   friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
430                                            difference_type offset) {
431     circular_deque_iterator ret = iter;
432     ret.Add(-offset);
433     return ret;
434   }
435   circular_deque_iterator& operator-=(difference_type offset) {
436     base::Add(-offset);
437     return *this;
438   }
439 
440   // Increment and decrement.
441   circular_deque_iterator& operator++() {
442     base::Increment();
443     return *this;
444   }
445   circular_deque_iterator operator++(int) {
446     circular_deque_iterator ret = *this;
447     base::Increment();
448     return ret;
449   }
450   circular_deque_iterator& operator--() {
451     base::Decrement();
452     return *this;
453   }
454   circular_deque_iterator operator--(int) {
455     circular_deque_iterator ret = *this;
456     base::Decrement();
457     return ret;
458   }
459 
460  private:
circular_deque_iterator(const circular_deque<T> * parent,size_t index)461   circular_deque_iterator(const circular_deque<T>* parent, size_t index)
462       : circular_deque_const_iterator<T>(parent, index) {}
463 };
464 
465 }  // namespace internal
466 
467 template <typename T>
468 class circular_deque {
469  private:
470   using VectorBuffer = internal::VectorBuffer<T>;
471 
472  public:
473   using value_type = T;
474   using size_type = size_t;
475   using difference_type = std::ptrdiff_t;
476   using reference = value_type&;
477   using const_reference = const value_type&;
478   using pointer = value_type*;
479   using const_pointer = const value_type*;
480 
481   using iterator = internal::circular_deque_iterator<T>;
482   using const_iterator = internal::circular_deque_const_iterator<T>;
483   using reverse_iterator = std::reverse_iterator<iterator>;
484   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
485 
486   // ---------------------------------------------------------------------------
487   // Constructor
488 
489   // Constructs an empty deque.
490   constexpr circular_deque() = default;
491 
492   // Constructs with `count` copies of a default-constructed T.
circular_deque(size_type count)493   explicit circular_deque(size_type count) { resize(count); }
494 
495   // Constructs with `count` copies of `value`.
circular_deque(size_type count,const T & value)496   circular_deque(size_type count, const T& value) { resize(count, value); }
497 
498   // Construct a deque by constructing its elements from each element in
499   // `[first, last)`.
500   //
501   // Prefer using the `from_range_t` constructor, which builds a deque from a
502   // range, instead of from problematic iterator pairs.
503   //
504   // # Safety
505   // The `first` and `last` iterators must be from the same container, with
506   // `first <= last`.
507   template <class InputIterator>
requires(std::input_iterator<InputIterator>)508     requires(std::input_iterator<InputIterator>)
509   UNSAFE_BUFFER_USAGE circular_deque(InputIterator first, InputIterator last)
510       : circular_deque() {
511     // SAFETY: The caller is responsible for giving iterator from the same
512     // container.
513     UNSAFE_BUFFERS(assign(first, last));
514   }
515 
516   // Constructs a deque from the elements in a range (a container or span),
517   // typically by copy-constructing if the range also holds objects of type
518   // `T`.
519   //
520   // Example:
521   // ```
522   // int values[] = {1, 3};
523   // circular_deque<int> deq(base::from_range, values);
524   // ```
525   template <typename Range>
requires(std::ranges::input_range<Range>)526     requires(std::ranges::input_range<Range>)
527   circular_deque(base::from_range_t, Range&& value) : circular_deque() {
528     assign_range(std::forward<Range>(value));
529   }
530 
531   // Copy/move.
circular_deque(const circular_deque & other)532   circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
533     assign_range(other);
534   }
circular_deque(circular_deque && other)535   circular_deque(circular_deque&& other) noexcept
536       : buffer_(std::move(other.buffer_)),
537         begin_(std::exchange(other.begin_, 0u)),
538         end_(std::exchange(other.end_, 0u)) {}
539 
circular_deque(std::initializer_list<value_type> init)540   circular_deque(std::initializer_list<value_type> init) { assign(init); }
541 
~circular_deque()542   ~circular_deque() { DestructRange(begin_, end_); }
543 
544   // ---------------------------------------------------------------------------
545   // Assignments.
546   //
547   // All of these may invalidate iterators and references.
548 
549   circular_deque& operator=(const circular_deque& other) {
550     if (&other == this) {
551       return *this;
552     }
553 
554     reserve(other.size());
555     assign_range(other);
556     return *this;
557   }
558   circular_deque& operator=(circular_deque&& other) noexcept {
559     if (&other == this) {
560       return *this;
561     }
562 
563     // We're about to overwrite the buffer, so don't free it in clear to
564     // avoid doing it twice.
565     ClearRetainCapacity();
566     buffer_ = std::move(other.buffer_);
567     begin_ = std::exchange(other.begin_, 0u);
568     end_ = std::exchange(other.end_, 0u);
569     IncrementGeneration();
570     return *this;
571   }
572   circular_deque& operator=(std::initializer_list<value_type> ilist) {
573     reserve(ilist.size());
574     assign_range(ilist);
575     return *this;
576   }
577 
assign(size_type count,const value_type & value)578   void assign(size_type count, const value_type& value) {
579     ClearRetainCapacity();
580     reserve(count);
581     for (size_t i = 0; i < count; i++) {
582       emplace_back(value);
583     }
584     IncrementGeneration();
585   }
586 
587   // Constructs and appends new elements into the container from each element in
588   // `[first, last)`, typically by copy-constructing if the iterators are also
589   // over objects of type `T`.
590   //
591   // # Safety
592   // Requires that `first` and `last` are valid iterators into a container, with
593   // `first <= last`.
594   template <typename InputIterator>
requires(std::input_iterator<InputIterator>)595     requires(std::input_iterator<InputIterator>)
596   UNSAFE_BUFFER_USAGE void assign(InputIterator first, InputIterator last) {
597     // Possible future enhancement, dispatch on iterator tag type. For forward
598     // iterators we can use std::difference to preallocate the space required
599     // and only do one copy.
600     ClearRetainCapacity();
601     while (first != last) {
602       emplace_back(*first);
603       // SAFETY: Pointers are iterators, so `first` may be a pointer. We require
604       // the caller to provide valid pointers such that `last` is for the same
605       // allocation and `first <= last`, and we've checked in the loop condition
606       // that `first != last` so incrementing will stay a valid pointer for the
607       // allocation.
608       UNSAFE_BUFFERS(++first);
609     }
610     IncrementGeneration();
611   }
612 
613   // Copies and appends new elements into the container from each element in
614   // the initializer list.
assign(std::initializer_list<value_type> value)615   void assign(std::initializer_list<value_type> value) { assign_range(value); }
616 
617   // Constructs and appends new elements into the container from each element in
618   // a range (a container or span), typically by copy-constructing if
619   // the range also holds objects of type `T`.
620   template <typename Range>
requires(std::ranges::input_range<Range>)621     requires(std::ranges::input_range<Range>)
622   void assign_range(Range&& range) {
623     reserve(std::ranges::distance(range));
624     // SAFETY: begin() and end() produce iterators from the same container with
625     // begin <= end.
626     UNSAFE_BUFFERS(assign(std::ranges::begin(range), std::ranges::end(range)));
627   }
628 
629   // ---------------------------------------------------------------------------
630   // Accessors.
631   //
632   // Since this class assumes no exceptions, at() and operator[] are equivalent.
633 
at(size_type i)634   const value_type& at(size_type i) const {
635     CHECK_LT(i, size());
636     size_t right_size = buffer_.capacity() - begin_;
637     if (begin_ <= end_ || i < right_size) {
638       return buffer_[begin_ + i];
639     }
640     return buffer_[i - right_size];
641   }
at(size_type i)642   value_type& at(size_type i) {
643     return const_cast<value_type&>(std::as_const(*this).at(i));
644   }
645 
646   const value_type& operator[](size_type i) const { return at(i); }
647   value_type& operator[](size_type i) { return at(i); }
648 
front()649   value_type& front() {
650     CHECK(!empty());
651     return buffer_[begin_];
652   }
front()653   const value_type& front() const {
654     CHECK(!empty());
655     return buffer_[begin_];
656   }
657 
back()658   value_type& back() {
659     CHECK(!empty());
660     return *(end() - 1);
661   }
back()662   const value_type& back() const {
663     CHECK(!empty());
664     return *(end() - 1);
665   }
666 
667   // ---------------------------------------------------------------------------
668   // Iterators.
669 
begin()670   iterator begin() { return iterator(this, begin_); }
begin()671   const_iterator begin() const { return const_iterator(this, begin_); }
cbegin()672   const_iterator cbegin() const { return const_iterator(this, begin_); }
673 
end()674   iterator end() { return iterator(this, end_); }
end()675   const_iterator end() const { return const_iterator(this, end_); }
cend()676   const_iterator cend() const { return const_iterator(this, end_); }
677 
rbegin()678   reverse_iterator rbegin() { return reverse_iterator(end()); }
rbegin()679   const_reverse_iterator rbegin() const {
680     return const_reverse_iterator(end());
681   }
crbegin()682   const_reverse_iterator crbegin() const { return rbegin(); }
683 
rend()684   reverse_iterator rend() { return reverse_iterator(begin()); }
rend()685   const_reverse_iterator rend() const {
686     return const_reverse_iterator(begin());
687   }
crend()688   const_reverse_iterator crend() const { return rend(); }
689 
690   // ---------------------------------------------------------------------------
691   // Memory management.
692 
693   // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
694   // the buffer when elements are deleted and there is "too much" wasted space.
695   // So if you call reserve() with a large size in anticipation of pushing many
696   // elements, but pop an element before the queue is full, the capacity you
697   // reserved may be lost.
698   //
699   // As a result, it's only worthwhile to call reserve() when you're adding
700   // many things at once with no intermediate operations.
reserve(size_type new_capacity)701   void reserve(size_type new_capacity) {
702     if (new_capacity > capacity()) {
703       SetCapacityTo(new_capacity);
704     }
705   }
706 
capacity()707   size_type capacity() const {
708     // One item is wasted to indicate end().
709     return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
710   }
711 
shrink_to_fit()712   void shrink_to_fit() {
713     if (empty()) {
714       // Optimize empty case to really delete everything if there was
715       // something.
716       if (buffer_.capacity()) {
717         buffer_ = VectorBuffer();
718       }
719     } else {
720       SetCapacityTo(size());
721     }
722   }
723 
724   // ---------------------------------------------------------------------------
725   // Size management.
726 
727   // This will additionally reset the capacity() to 0.
clear()728   void clear() {
729     // This can't resize(0) because that requires a default constructor to
730     // compile, which not all contained classes may implement.
731     ClearRetainCapacity();
732     buffer_ = VectorBuffer();
733   }
734 
empty()735   bool empty() const { return begin_ == end_; }
736 
size()737   size_type size() const {
738     if (begin_ <= end_) {
739       return end_ - begin_;
740     }
741     return buffer_.capacity() - begin_ + end_;
742   }
743 
744   // When reducing size, the elements are deleted from the end. When expanding
745   // size, elements are added to the end with |value| or the default
746   // constructed version. Even when using resize(count) to shrink, a default
747   // constructor is required for the code to compile, even though it will not
748   // be called.
749   //
750   // There are two versions rather than using a default value to avoid
751   // creating a temporary when shrinking (when it's not needed). Plus if
752   // the default constructor is desired when expanding usually just calling it
753   // for each element is faster than making a default-constructed temporary and
754   // copying it.
resize(size_type count)755   void resize(size_type count) {
756     // SEE BELOW VERSION if you change this. The code is mostly the same.
757     if (count > size()) {
758       // This could be slighly more efficient but expanding a queue with
759       // identical elements is unusual and the extra computations of emplacing
760       // one-by-one will typically be small relative to calling the constructor
761       // for every item.
762       ExpandCapacityIfNecessary(count - size());
763       while (size() < count) {
764         emplace_back();
765       }
766     } else if (count < size()) {
767       size_t new_end = (begin_ + count) % buffer_.capacity();
768       DestructRange(new_end, end_);
769       end_ = new_end;
770 
771       ShrinkCapacityIfNecessary();
772     }
773     IncrementGeneration();
774   }
resize(size_type count,const value_type & value)775   void resize(size_type count, const value_type& value) {
776     // SEE ABOVE VERSION if you change this. The code is mostly the same.
777     if (count > size()) {
778       ExpandCapacityIfNecessary(count - size());
779       while (size() < count) {
780         emplace_back(value);
781       }
782     } else if (count < size()) {
783       size_t new_end = (begin_ + count) % buffer_.capacity();
784       DestructRange(new_end, end_);
785       end_ = new_end;
786 
787       ShrinkCapacityIfNecessary();
788     }
789     IncrementGeneration();
790   }
791 
792   // ---------------------------------------------------------------------------
793   // Insert and erase.
794   //
795   // Insertion and deletion in the middle is O(n) and invalidates all existing
796   // iterators.
797   //
798   // The implementation of insert isn't optimized as much as it could be. If
799   // the insertion requires that the buffer be grown, it will first be grown
800   // and everything moved, and then the items will be inserted, potentially
801   // moving some items twice. This simplifies the implemntation substantially
802   // and means less generated templatized code. Since this is an uncommon
803   // operation for deques, and already relatively slow, it doesn't seem worth
804   // the benefit to optimize this.
805 
insert(const_iterator pos,size_type count,const T & value)806   void insert(const_iterator pos, size_type count, const T& value) {
807     ValidateIterator(pos);
808 
809     // Optimize insert at the beginning.
810     if (pos == begin()) {
811       ExpandCapacityIfNecessary(count);
812       for (size_t i = 0; i < count; i++) {
813         push_front(value);
814       }
815       return;
816     }
817 
818     CHECK_LT(pos.index_, buffer_.capacity());
819     iterator insert_cur(this, pos.index_);
820     iterator insert_end;
821     MakeRoomFor(count, &insert_cur, &insert_end);
822     while (insert_cur < insert_end) {
823       std::construct_at(buffer_.get_at(insert_cur.index_), value);
824       ++insert_cur;
825     }
826 
827     IncrementGeneration();
828   }
829 
830   template <class InputIterator>
requires(std::input_iterator<InputIterator>)831     requires(std::input_iterator<InputIterator>)
832   void insert(const_iterator pos, InputIterator first, InputIterator last) {
833     ValidateIterator(pos);
834 
835     const size_t inserted_items =
836         checked_cast<size_t>(std::distance(first, last));
837     if (inserted_items == 0u) {
838       return;  // Can divide by 0 when doing modulo below, so return early.
839     }
840 
841     // Make a hole to copy the items into.
842     iterator insert_cur;
843     iterator insert_end;
844     if (pos == begin()) {
845       // Optimize insert at the beginning, nothing needs to be shifted and the
846       // hole is the |inserted_items| block immediately before |begin_|.
847       ExpandCapacityIfNecessary(inserted_items);
848       const size_t old_begin = begin_;
849       begin_ = (old_begin + buffer_.capacity() - inserted_items) %
850                buffer_.capacity();
851       insert_cur = begin();
852       insert_end = iterator(this, old_begin);
853     } else {
854       CHECK_LT(pos.index_, buffer_.capacity());
855       insert_cur = iterator(this, pos.index_);
856       MakeRoomFor(inserted_items, &insert_cur, &insert_end);
857     }
858 
859     // Copy the items.
860     while (insert_cur < insert_end) {
861       std::construct_at(buffer_.get_at(insert_cur.index_), *first);
862       ++insert_cur;
863       // SAFETY: The input iterator may be a pointer, in which case we will
864       // produce UB if `first` is incremented past `last`. We use checked_cast
865       // of std::distance to an unsigned value above, which ensures that `last
866       // >= first`. Then we need that `insert_end - insert_cur <= last - first`:
867       // - If inserting at the start, pos == begin() and `insert_cur` is
868       //   positioned at `begin_ - (last - first)`, and `insert_end` is
869       //   positioned at `begin_` so we have
870       //   `insert_end - insert_cur == last - first`.
871       // - If inserting elsewhere, `MakeRoomFor(last - first, ...)` returns an
872       // iterator
873       //   pair with distance of `last - first`, so we have
874       //   `insert_end - insert_cur == last - first`.
875       UNSAFE_BUFFERS(++first);
876     }
877 
878     IncrementGeneration();
879   }
880 
881   // These all return an iterator to the inserted item. Existing iterators will
882   // be invalidated.
insert(const_iterator pos,const T & value)883   iterator insert(const_iterator pos, const T& value) {
884     return emplace(pos, value);
885   }
insert(const_iterator pos,T && value)886   iterator insert(const_iterator pos, T&& value) {
887     return emplace(pos, std::move(value));
888   }
889   template <class... Args>
emplace(const_iterator pos,Args &&...args)890   iterator emplace(const_iterator pos, Args&&... args) {
891     ValidateIterator(pos);
892 
893     // Optimize insert at beginning which doesn't require shifting.
894     if (pos == cbegin()) {
895       emplace_front(std::forward<Args>(args)...);
896       return begin();
897     }
898 
899     // Do this before we make the new iterators we return.
900     IncrementGeneration();
901 
902     CHECK_LT(pos.index_, buffer_.capacity());
903     iterator insert_begin(this, pos.index_);
904     iterator insert_end;
905     MakeRoomFor(1, &insert_begin, &insert_end);
906     std::construct_at(buffer_.get_at(insert_begin.index_),
907                       std::forward<Args>(args)...);
908 
909     return insert_begin;
910   }
911 
912   // Calling erase() won't automatically resize the buffer smaller like resize
913   // or the pop functions. Erase is slow and relatively uncommon, and for
914   // normal deque usage a pop will normally be done on a regular basis that
915   // will prevent excessive buffer usage over long periods of time. It's not
916   // worth having the extra code for every template instantiation of erase()
917   // to resize capacity downward to a new buffer.
erase(const_iterator pos)918   iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
erase(const_iterator pos_begin,const_iterator pos_end)919   iterator erase(const_iterator pos_begin, const_iterator pos_end) {
920     ValidateIterator(pos_begin);
921     ValidateIterator(pos_end);
922 
923     IncrementGeneration();
924 
925     if (pos_begin.index_ == pos_end.index_) {
926       // Nothing deleted. Need to return early to avoid falling through to
927       // moving items on top of themselves.
928       return iterator(this, pos_begin.index_);
929     }
930 
931     // First, call the destructor on the deleted items.
932     DestructRange(pos_begin.index_, pos_end.index_);
933 
934     if (pos_begin.index_ == begin_) {
935       // This deletion is from the beginning. Nothing needs to be copied, only
936       // begin_ needs to be updated.
937       begin_ = pos_end.index_;
938       return iterator(this, pos_end.index_);
939     }
940 
941     // In an erase operation, the shifted items all move logically to the left,
942     // so move them from left-to-right.
943     //
944     // The elements are being moved to memory where the T objects were
945     // previously destroyed.
946     //
947     // TODO(danakj): We could skip destruction and do MoveAssignRange here, for
948     // the elements that are being replaced.
949     size_t move_src = pos_end.index_;
950     const size_t move_src_end = end_;
951     size_t move_dest = pos_begin.index_;
952     const size_t cap = buffer_.capacity();
953     while (move_src != move_src_end) {
954       VectorBuffer::MoveConstructRange(buffer_.subspan(move_src, 1u),
955                                        buffer_.subspan(move_dest, 1u));
956       move_src = (move_src + 1u) % cap;
957       move_dest = (move_dest + 1u) % cap;
958     }
959 
960     end_ = move_dest;
961 
962     // Since we did not reallocate and only changed things after the erase
963     // element(s), the input iterator's index points to the thing following the
964     // deletion.
965     return iterator(this, pos_begin.index_);
966   }
967 
968   // ---------------------------------------------------------------------------
969   // Begin/end operations.
970 
push_front(const T & value)971   void push_front(const T& value) { emplace_front(value); }
push_front(T && value)972   void push_front(T&& value) { emplace_front(std::move(value)); }
973 
push_back(const T & value)974   void push_back(const T& value) { emplace_back(value); }
push_back(T && value)975   void push_back(T&& value) { emplace_back(std::move(value)); }
976 
977   template <class... Args>
emplace_front(Args &&...args)978   reference emplace_front(Args&&... args) {
979     ExpandCapacityIfNecessary(1);
980     if (begin_ == 0) {
981       begin_ = buffer_.capacity() - 1;
982     } else {
983       begin_--;
984     }
985     IncrementGeneration();
986     std::construct_at(buffer_.get_at(begin_), std::forward<Args>(args)...);
987     return front();
988   }
989 
990   template <class... Args>
emplace_back(Args &&...args)991   reference emplace_back(Args&&... args) {
992     ExpandCapacityIfNecessary(1);
993     std::construct_at(buffer_.get_at(end_), std::forward<Args>(args)...);
994     if (end_ == buffer_.capacity() - 1) {
995       end_ = 0;
996     } else {
997       end_++;
998     }
999     IncrementGeneration();
1000     return back();
1001   }
1002 
pop_front()1003   void pop_front() {
1004     CHECK(!empty());
1005     DestructRange(begin_, begin_ + 1u);
1006     begin_++;
1007     if (begin_ == buffer_.capacity()) {
1008       begin_ = 0;
1009     }
1010 
1011     ShrinkCapacityIfNecessary();
1012 
1013     // Technically popping will not invalidate any iterators since the
1014     // underlying buffer will be stable. But in the future we may want to add a
1015     // feature that resizes the buffer smaller if there is too much wasted
1016     // space. This ensures we can make such a change safely.
1017     IncrementGeneration();
1018   }
pop_back()1019   void pop_back() {
1020     CHECK(!empty());
1021     if (end_ == 0) {
1022       end_ = buffer_.capacity() - 1;
1023     } else {
1024       end_--;
1025     }
1026     DestructRange(end_, end_ + 1u);
1027 
1028     ShrinkCapacityIfNecessary();
1029 
1030     // See pop_front comment about why this is here.
1031     IncrementGeneration();
1032   }
1033 
1034   // ---------------------------------------------------------------------------
1035   // General operations.
1036 
swap(circular_deque & other)1037   void swap(circular_deque& other) {
1038     std::swap(buffer_, other.buffer_);
1039     std::swap(begin_, other.begin_);
1040     std::swap(end_, other.end_);
1041     IncrementGeneration();
1042   }
1043 
swap(circular_deque & lhs,circular_deque & rhs)1044   friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
1045 
1046  private:
1047   friend internal::circular_deque_iterator<T>;
1048   friend internal::circular_deque_const_iterator<T>;
1049 
1050   // Moves the items in the given circular buffer to the current one. The source
1051   // is moved from so will become invalid. The destination buffer must have
1052   // already been allocated with enough size.
1053   //
1054   // # Safety
1055   // `from_begin` and `from_end` must be less-than and less-than-or-equal-to the
1056   // capacity of `from_buf` respectively, with `from_begin <= from_end`, or
1057   // Undefined Behaviour may result.
MoveBuffer(VectorBuffer & from_buf,size_t from_begin,size_t from_end,VectorBuffer & to_buf,size_t * to_begin,size_t * to_end)1058   UNSAFE_BUFFER_USAGE static void MoveBuffer(VectorBuffer& from_buf,
1059                                              size_t from_begin,
1060                                              size_t from_end,
1061                                              VectorBuffer& to_buf,
1062                                              size_t* to_begin,
1063                                              size_t* to_end) {
1064     *to_begin = 0;
1065     if (from_begin < from_end) {
1066       // Contiguous.
1067       VectorBuffer::MoveConstructRange(
1068           from_buf.subspan(from_begin, from_end - from_begin),
1069           to_buf.subspan(0u, from_end - from_begin));
1070       *to_end = from_end - from_begin;
1071     } else if (from_begin > from_end) {
1072       // Discontiguous, copy the right side to the beginning of the new buffer.
1073       span<T> right_side = from_buf.subspan(from_begin);
1074       VectorBuffer::MoveConstructRange(right_side,
1075                                        to_buf.subspan(0u, right_side.size()));
1076       // Append the left side.
1077       span<T> left_side = from_buf.subspan(0u, from_end);
1078       VectorBuffer::MoveConstructRange(
1079           left_side, to_buf.subspan(right_side.size(), left_side.size()));
1080       *to_end = left_side.size() + right_side.size();
1081     } else {
1082       // No items.
1083       *to_end = 0;
1084     }
1085   }
1086 
1087   // Expands the buffer size. This assumes the size is larger than the
1088   // number of elements in the vector (it won't call delete on anything).
SetCapacityTo(size_t new_capacity)1089   void SetCapacityTo(size_t new_capacity) {
1090     // Use the capacity + 1 as the internal buffer size to differentiate
1091     // empty and full (see definition of buffer_ below).
1092     VectorBuffer new_buffer(new_capacity + 1u);
1093     // SAFETY: This class maintains an invariant that `begin_` and `end_` are
1094     // less than `buffer_`'s capacity.
1095     UNSAFE_BUFFERS(
1096         MoveBuffer(buffer_, begin_, end_, new_buffer, &begin_, &end_));
1097     buffer_ = std::move(new_buffer);
1098   }
ExpandCapacityIfNecessary(size_t additional_elts)1099   void ExpandCapacityIfNecessary(size_t additional_elts) {
1100     const size_t cur_size = size();
1101     const size_t cur_capacity = capacity();
1102 
1103     // Protect against overflow when adding `additional_elts`, and exceeding the
1104     // max allocation size.
1105     CHECK_LE(additional_elts, PTRDIFF_MAX - cur_size);
1106 
1107     size_t min_new_capacity = cur_size + additional_elts;
1108     if (cur_capacity >= min_new_capacity) {
1109       return;  // Already enough room.
1110     }
1111 
1112     min_new_capacity =
1113         std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
1114 
1115     // std::vector always grows by at least 50%. WTF::Deque grows by at least
1116     // 25%. We expect queue workloads to generally stay at a similar size and
1117     // grow less than a vector might, so use 25%.
1118     SetCapacityTo(std::max(min_new_capacity, cur_capacity + cur_capacity / 4u));
1119   }
1120 
ShrinkCapacityIfNecessary()1121   void ShrinkCapacityIfNecessary() {
1122     // Don't auto-shrink below this size.
1123     if (capacity() <= internal::kCircularBufferInitialCapacity) {
1124       return;
1125     }
1126 
1127     // Shrink when 100% of the size() is wasted.
1128     size_t sz = size();
1129     size_t empty_spaces = capacity() - sz;
1130     if (empty_spaces < sz) {
1131       return;
1132     }
1133 
1134     // Leave 1/4 the size as free capacity, not going below the initial
1135     // capacity.
1136     size_t new_capacity =
1137         std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
1138     if (new_capacity < capacity()) {
1139       // Count extra item to convert to internal capacity.
1140       SetCapacityTo(new_capacity);
1141     }
1142   }
1143 
1144   // Backend for clear() but does not resize the internal buffer.
ClearRetainCapacity()1145   void ClearRetainCapacity() {
1146     // This can't resize(0) because that requires a default constructor to
1147     // compile, which not all contained classes may implement.
1148 
1149     // SAFETY: This class maintains an invariant that `begin_` and `end_` are
1150     // less than `buffer_`'s capacity. `new_end` is computed modulo the capacity
1151     // so it is in range.
1152     DestructRange(begin_, end_);
1153     begin_ = 0;
1154     end_ = 0;
1155     IncrementGeneration();
1156   }
1157 
1158   // Calls destructors for the given begin->end indices. The indices may wrap
1159   // around. The buffer is not resized, and the begin_ and end_ members are
1160   // not changed.
DestructRange(size_t begin,size_t end)1161   void DestructRange(size_t begin, size_t end) {
1162     if (end == begin) {
1163       return;
1164     } else if (end > begin) {
1165       VectorBuffer::DestructRange(buffer_.subspan(begin, end - begin));
1166     } else {
1167       VectorBuffer::DestructRange(buffer_.subspan(begin));
1168       VectorBuffer::DestructRange(buffer_.subspan(0u, end));
1169     }
1170   }
1171 
1172   // Makes room for |count| items starting at |*insert_begin|. Since iterators
1173   // are not stable across buffer resizes, |*insert_begin| will be updated to
1174   // point to the beginning of the newly opened position in the new array (it's
1175   // in/out), and the end of the newly opened position (it's out-only).
MakeRoomFor(size_t count,iterator * insert_begin,iterator * insert_end)1176   void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
1177     if (count == 0) {
1178       *insert_end = *insert_begin;
1179       return;
1180     }
1181 
1182     // The offset from the beginning will be stable across reallocations.
1183     size_t begin_offset = insert_begin->OffsetFromBegin();
1184     ExpandCapacityIfNecessary(count);
1185 
1186     // Update the new end and prepare the iterators for copying. The newly
1187     // used space contains uninitialized memory.
1188     const size_t cap = buffer_.capacity();
1189     size_t src = end_;
1190     end_ = (end_ + count) % cap;
1191     size_t dest = end_;
1192 
1193     *insert_begin = iterator(this, (begin_ + begin_offset) % cap);
1194     *insert_end = iterator(this, (insert_begin->index_ + count) % cap);
1195 
1196     // Move the elements. This will always involve shifting logically to the
1197     // right, so move in a right-to-left order.
1198     while (true) {
1199       if (src == insert_begin->index_) {
1200         break;
1201       }
1202       src = (src + cap - 1u) % cap;
1203       dest = (dest + cap - 1u) % cap;
1204       VectorBuffer::MoveConstructRange(buffer_.subspan(src, 1u),
1205                                        buffer_.subspan(dest, 1u));
1206     }
1207   }
1208 
1209 #if DCHECK_IS_ON()
1210   // Asserts the given index is dereferencable. The index is an index into the
1211   // buffer, not an index used by operator[] or at() which will be offsets from
1212   // begin.
CheckValidIndex(size_t i)1213   void CheckValidIndex(size_t i) const {
1214     if (begin_ <= end_) {
1215       DCHECK(i >= begin_ && i < end_);
1216     } else {
1217       DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
1218     }
1219   }
1220 
1221   // Asserts the given index is either dereferencable or points to end().
CheckValidIndexOrEnd(size_t i)1222   void CheckValidIndexOrEnd(size_t i) const {
1223     if (i != end_) {
1224       CheckValidIndex(i);
1225     }
1226   }
1227 
ValidateIterator(const const_iterator & i)1228   void ValidateIterator(const const_iterator& i) const {
1229     DCHECK(i.parent_deque_ == this);
1230     i.CheckUnstableUsage();
1231   }
1232 
1233   // See generation_ below.
IncrementGeneration()1234   void IncrementGeneration() { generation_++; }
1235 #else
1236   // No-op versions of these functions for release builds.
CheckValidIndex(size_t)1237   void CheckValidIndex(size_t) const {}
CheckValidIndexOrEnd(size_t)1238   void CheckValidIndexOrEnd(size_t) const {}
ValidateIterator(const const_iterator & i)1239   void ValidateIterator(const const_iterator& i) const {}
IncrementGeneration()1240   void IncrementGeneration() {}
1241 #endif
1242 
1243   // Danger, the buffer_.capacity() is the "internal capacity" which is
1244   // capacity() + 1 since there is an extra item to indicate the end. Otherwise
1245   // being completely empty and completely full are indistinguishable (begin ==
1246   // end). We could add a separate flag to avoid it, but that adds significant
1247   // extra complexity since every computation will have to check for it. Always
1248   // keeping one extra unused element in the buffer makes iterator computations
1249   // much simpler.
1250   //
1251   // Container internal code will want to use buffer_.capacity() for offset
1252   // computations rather than capacity().
1253   VectorBuffer buffer_;
1254   size_type begin_ = 0;
1255   size_type end_ = 0;
1256 
1257 #if DCHECK_IS_ON()
1258   // Incremented every time a modification is made that could affect iterator
1259   // invalidations.
1260   uint64_t generation_ = 0;
1261 #endif
1262 };
1263 
1264 // Implementations of base::Erase[If] (see base/stl_util.h).
1265 template <class T, class Value>
Erase(circular_deque<T> & container,const Value & value)1266 size_t Erase(circular_deque<T>& container, const Value& value) {
1267   auto it = ranges::remove(container, value);
1268   size_t removed = std::distance(it, container.end());
1269   container.erase(it, container.end());
1270   return removed;
1271 }
1272 
1273 template <class T, class Predicate>
EraseIf(circular_deque<T> & container,Predicate pred)1274 size_t EraseIf(circular_deque<T>& container, Predicate pred) {
1275   auto it = ranges::remove_if(container, pred);
1276   size_t removed = std::distance(it, container.end());
1277   container.erase(it, container.end());
1278   return removed;
1279 }
1280 
1281 }  // namespace base
1282 
1283 #endif  // BASE_CONTAINERS_CIRCULAR_DEQUE_H_
1284