1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "perfetto/base/circular_queue.h"
18
19 #include <random>
20
21 #include "gtest/gtest.h"
22
23 namespace perfetto {
24 namespace base {
25 namespace {
26
TEST(CircularQueueTest,Int)27 TEST(CircularQueueTest, Int) {
28 CircularQueue<int> queue(/*initial_capacity=*/1);
29 ASSERT_EQ(queue.size(), 0u);
30 queue.emplace_back(101);
31 ASSERT_EQ(queue.size(), 1u);
32 queue.emplace_back(102);
33 queue.emplace_back(103);
34 queue.emplace_back(104);
35 ASSERT_EQ(queue.size(), 4u);
36
37 auto it = queue.begin();
38 for (int i = 101; i <= 104; i++) {
39 ASSERT_EQ(*it, i);
40 it++;
41 }
42 ASSERT_EQ(it, queue.end());
43
44 queue.erase_front(1);
45 ASSERT_EQ(queue.size(), 3u);
46 ASSERT_EQ(*queue.begin(), 102);
47
48 *(queue.begin() + 1) = 42;
49 ASSERT_EQ(*(queue.end() - 2), 42);
50
51 queue.erase_front(2);
52 ASSERT_EQ(queue.size(), 1u);
53 ASSERT_EQ(*queue.begin(), 104);
54
55 queue.pop_front();
56 ASSERT_EQ(queue.size(), 0u);
57 ASSERT_EQ(queue.begin(), queue.end());
58
59 const size_t kNumInts = 100000u;
60
61 {
62 std::minstd_rand0 rnd_engine(0);
63 for (size_t i = 0; i < kNumInts; i++) {
64 int n = static_cast<int>(rnd_engine());
65 queue.emplace_back(n);
66 }
67 }
68 ASSERT_EQ(queue.size(), kNumInts);
69 ASSERT_EQ(static_cast<size_t>(queue.end() - queue.begin()), kNumInts);
70 {
71 std::minstd_rand0 rnd_engine(0);
72 it = queue.begin();
73 for (size_t i = 0; i < kNumInts; ++i, ++it) {
74 ASSERT_LT(it, queue.end());
75 ASSERT_EQ(*it, static_cast<int>(rnd_engine()));
76 }
77 }
78
79 {
80 std::minstd_rand0 del_rnd(42);
81 std::minstd_rand0 rnd_engine(0);
82 while (!queue.empty()) {
83 ASSERT_EQ(*queue.begin(), static_cast<int>(rnd_engine()));
84 size_t num_del = (del_rnd() % 8) + 1;
85 queue.erase_front(num_del + 1); // +1 because of the read 2 lines above.
86 rnd_engine.discard(num_del);
87 }
88 }
89 }
90
TEST(CircularQueueTest,Sorting)91 TEST(CircularQueueTest, Sorting) {
92 CircularQueue<uint64_t> queue;
93 std::minstd_rand0 rnd_engine(0);
94 for (int i = 0; i < 100000; i++) {
95 queue.emplace_back(static_cast<uint64_t>(rnd_engine()));
96 if ((i % 100) == 0)
97 queue.erase_front(29);
98 }
99 ASSERT_FALSE(std::is_sorted(queue.begin(), queue.end()));
100 std::sort(queue.begin(), queue.end());
101 ASSERT_TRUE(std::is_sorted(queue.begin(), queue.end()));
102 }
103
TEST(CircularQueueTest,MoveOperators)104 TEST(CircularQueueTest, MoveOperators) {
105 CircularQueue<int> queue;
106 queue.emplace_back(1);
107 queue.emplace_back(2);
108
109 {
110 CircularQueue<int> moved(std::move(queue));
111 ASSERT_TRUE(queue.empty());
112 ASSERT_EQ(moved.size(), 2u);
113
114 moved.emplace_back(3);
115 moved.emplace_back(4);
116 ASSERT_EQ(moved.size(), 4u);
117 }
118 queue.emplace_back(10);
119 queue.emplace_back(11);
120 queue.emplace_back(12);
121 ASSERT_EQ(queue.size(), 3u);
122 ASSERT_EQ(queue.front(), 10);
123 ASSERT_EQ(queue.back(), 12);
124
125 {
126 CircularQueue<int> moved;
127 moved.emplace_back(42);
128 moved = std::move(queue);
129 ASSERT_TRUE(queue.empty());
130 ASSERT_EQ(moved.size(), 3u);
131 ASSERT_EQ(moved.size(), 3u);
132 ASSERT_EQ(moved.front(), 10);
133 ASSERT_EQ(moved.back(), 12);
134 }
135 }
136
TEST(CircularQueueTest,Iterators)137 TEST(CircularQueueTest, Iterators) {
138 for (size_t repeat = 1; repeat < 8; repeat++) {
139 size_t capacity = 8 * (1 << repeat);
140 CircularQueue<size_t> queue(capacity);
141 for (size_t i = 0; i < capacity - 2; i++)
142 queue.emplace_back(0u);
143 queue.erase_front(queue.size());
144 ASSERT_TRUE(queue.empty());
145 ASSERT_EQ(queue.capacity(), capacity);
146
147 // Now the queue is empty and the internal write iterator is abut to wrap.
148
149 // Add a bit more than half-capacity and check that the queue didn't resize.
150 for (size_t i = 0; i < capacity / 2 + 3; i++)
151 queue.emplace_back(i);
152 ASSERT_EQ(queue.capacity(), capacity);
153
154 // Check that all iterators are consistent.
155 auto begin = queue.begin();
156 auto end = queue.end();
157 auto mid = begin + std::distance(begin, end) / 2;
158 ASSERT_TRUE(std::is_sorted(begin, end));
159 ASSERT_TRUE(begin < end);
160 ASSERT_TRUE(begin <= begin);
161 ASSERT_TRUE(begin >= begin);
162 ASSERT_FALSE(begin < begin);
163 ASSERT_FALSE(begin > begin);
164 ASSERT_TRUE(begin + 1 > begin);
165 ASSERT_TRUE(begin + 1 >= begin);
166 ASSERT_FALSE(begin >= begin + 1);
167 ASSERT_TRUE(begin <= begin);
168 ASSERT_TRUE(begin <= begin + 1);
169 ASSERT_TRUE(end > mid);
170 ASSERT_TRUE(mid > begin);
171 ASSERT_TRUE(std::is_sorted(begin, mid));
172 ASSERT_TRUE(std::is_sorted(mid, end));
173 }
174 }
175
TEST(CircularQueueTest,ObjectLifetime)176 TEST(CircularQueueTest, ObjectLifetime) {
177 class Checker {
178 public:
179 struct Stats {
180 int num_ctors = 0;
181 int num_dtors = 0;
182 int num_alive = 0;
183 };
184 Checker(Stats* stats, int n) : stats_(stats), n_(n) {
185 EXPECT_GE(n, 0);
186 stats_->num_ctors++;
187 stats_->num_alive++;
188 ptr_ = reinterpret_cast<uintptr_t>(this);
189 }
190
191 ~Checker() {
192 EXPECT_EQ(ptr_, reinterpret_cast<uintptr_t>(this));
193 if (n_ >= 0)
194 stats_->num_alive--;
195 n_ = -1;
196 stats_->num_dtors++;
197 }
198
199 Checker(Checker&& other) noexcept {
200 n_ = other.n_;
201 other.n_ = -1;
202 stats_ = other.stats_;
203 ptr_ = reinterpret_cast<uintptr_t>(this);
204 }
205
206 Checker& operator=(Checker&& other) {
207 new (this) Checker(std::move(other));
208 return *this;
209 }
210
211 Checker(const Checker&) = delete;
212 Checker& operator=(const Checker&) = delete;
213
214 Stats* stats_ = nullptr;
215 uintptr_t ptr_ = 0;
216 int n_ = -1;
217 };
218
219 // Check that dtors are invoked on old elements when growing capacity.
220 Checker::Stats stats;
221 {
222 CircularQueue<Checker> queue(/*initial_capacity=*/2);
223 for (int i = 0; i < 2; i++)
224 queue.emplace_back(&stats, i);
225 ASSERT_EQ(stats.num_ctors, 2);
226 ASSERT_EQ(stats.num_dtors, 0);
227
228 // This further insertion will grow the queue, causing two std::move()s
229 // for the previous elements and one emplace.
230 queue.emplace_back(&stats, 2);
231 ASSERT_EQ(stats.num_ctors, 3);
232
233 // The two old elements that have std::move()d should be destroyed upon
234 // expansion.
235 ASSERT_EQ(stats.num_dtors, 2);
236 }
237 ASSERT_EQ(stats.num_dtors, 2 + 3);
238
239 stats = Checker::Stats();
240 {
241 CircularQueue<Checker> queue(/*initial_capacity=*/1);
242 for (int i = 0; i < 5; i++)
243 queue.emplace_back(&stats, i);
244 ASSERT_EQ(stats.num_ctors, 5);
245 Checker c5(&stats, 5);
246 queue.emplace_back(std::move(c5));
247 ASSERT_EQ(stats.num_alive, 5 + 1);
248
249 queue.erase_front(2);
250 ASSERT_EQ(stats.num_alive, 5 + 1 - 2);
251
252 for (int i = 0; i < 4; i++)
253 queue.emplace_back(&stats, 10 + i);
254 ASSERT_EQ(stats.num_alive, 5 + 1 - 2 + 4);
255 }
256 ASSERT_EQ(stats.num_ctors, 5 + 1 + 4);
257 ASSERT_EQ(stats.num_alive, 0);
258
259 stats = Checker::Stats();
260 {
261 CircularQueue<Checker> q1(1);
262 CircularQueue<Checker> q2(64);
263 for (int i = 0; i < 100; i++) {
264 q1.emplace_back(&stats, 1000 + i * 2);
265 q2.emplace_back(&stats, 1001 + i * 2);
266 }
267
268 ASSERT_EQ(stats.num_alive, 200);
269
270 for (int i = 0; i < 100; i += 2) {
271 auto it1 = q1.begin() + i;
272 auto it2 = q2.begin() + i;
273 std::swap(*it1, *it2);
274 }
275 auto comparer = [](const Checker& lhs, const Checker& rhs) {
276 return lhs.n_ < rhs.n_;
277 };
278 ASSERT_TRUE(std::is_sorted(q1.begin(), q1.end(), comparer));
279 ASSERT_TRUE(std::is_sorted(q2.begin(), q2.end(), comparer));
280 ASSERT_EQ(stats.num_alive, 200);
281 }
282 }
283
284 } // namespace
285 } // namespace base
286 } // namespace perfetto
287