1 // Copyright 2019 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "absl/container/inlined_vector.h"
16
17 #include <algorithm>
18 #include <forward_list>
19 #include <iterator>
20 #include <list>
21 #include <memory>
22 #include <scoped_allocator>
23 #include <sstream>
24 #include <stdexcept>
25 #include <string>
26 #include <utility>
27 #include <vector>
28
29 #include "gmock/gmock.h"
30 #include "gtest/gtest.h"
31 #include "absl/base/attributes.h"
32 #include "absl/base/internal/exception_testing.h"
33 #include "absl/base/internal/raw_logging.h"
34 #include "absl/base/macros.h"
35 #include "absl/base/options.h"
36 #include "absl/container/internal/counting_allocator.h"
37 #include "absl/container/internal/test_instance_tracker.h"
38 #include "absl/hash/hash_testing.h"
39 #include "absl/memory/memory.h"
40 #include "absl/strings/str_cat.h"
41
42 namespace {
43
44 using absl::container_internal::CountingAllocator;
45 using absl::test_internal::CopyableMovableInstance;
46 using absl::test_internal::CopyableOnlyInstance;
47 using absl::test_internal::InstanceTracker;
48 using testing::AllOf;
49 using testing::Each;
50 using testing::ElementsAre;
51 using testing::ElementsAreArray;
52 using testing::Eq;
53 using testing::Gt;
54 using testing::Pointwise;
55 using testing::PrintToString;
56
57 using IntVec = absl::InlinedVector<int, 8>;
58
59 MATCHER_P(SizeIs, n, "") {
60 return testing::ExplainMatchResult(n, arg.size(), result_listener);
61 }
62
63 MATCHER_P(CapacityIs, n, "") {
64 return testing::ExplainMatchResult(n, arg.capacity(), result_listener);
65 }
66
67 MATCHER_P(ValueIs, e, "") {
68 return testing::ExplainMatchResult(e, arg.value(), result_listener);
69 }
70
71 // TODO(bsamwel): Add support for movable-only types.
72
73 // Test fixture for typed tests on BaseCountedInstance derived classes, see
74 // test_instance_tracker.h.
75 template <typename T>
76 class InstanceTest : public ::testing::Test {};
77 TYPED_TEST_SUITE_P(InstanceTest);
78
79 // A simple reference counted class to make sure that the proper elements are
80 // destroyed in the erase(begin, end) test.
81 class RefCounted {
82 public:
RefCounted(int value,int * count)83 RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); }
84
RefCounted(const RefCounted & v)85 RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) {
86 Ref();
87 }
88
~RefCounted()89 ~RefCounted() {
90 Unref();
91 count_ = nullptr;
92 }
93
swap(RefCounted & a,RefCounted & b)94 friend void swap(RefCounted& a, RefCounted& b) {
95 using std::swap;
96 swap(a.value_, b.value_);
97 swap(a.count_, b.count_);
98 }
99
operator =(RefCounted v)100 RefCounted& operator=(RefCounted v) {
101 using std::swap;
102 swap(*this, v);
103 return *this;
104 }
105
Ref() const106 void Ref() const {
107 ABSL_RAW_CHECK(count_ != nullptr, "");
108 ++(*count_);
109 }
110
Unref() const111 void Unref() const {
112 --(*count_);
113 ABSL_RAW_CHECK(*count_ >= 0, "");
114 }
115
116 int value_;
117 int* count_;
118 };
119
120 using RefCountedVec = absl::InlinedVector<RefCounted, 8>;
121
122 // A class with a vtable pointer
123 class Dynamic {
124 public:
~Dynamic()125 virtual ~Dynamic() {}
126 };
127
128 using DynamicVec = absl::InlinedVector<Dynamic, 8>;
129
130 // Append 0..len-1 to *v
131 template <typename Container>
Fill(Container * v,size_t len,int offset=0)132 static void Fill(Container* v, size_t len, int offset = 0) {
133 for (size_t i = 0; i < len; i++) {
134 v->push_back(static_cast<int>(i) + offset);
135 }
136 }
137
Fill(size_t len,int offset=0)138 static IntVec Fill(size_t len, int offset = 0) {
139 IntVec v;
140 Fill(&v, len, offset);
141 return v;
142 }
143
TEST(IntVec,SimpleOps)144 TEST(IntVec, SimpleOps) {
145 for (size_t len = 0; len < 20; len++) {
146 IntVec v;
147 const IntVec& cv = v; // const alias
148
149 Fill(&v, len);
150 EXPECT_EQ(len, v.size());
151 EXPECT_LE(len, v.capacity());
152
153 for (size_t i = 0; i < len; i++) {
154 EXPECT_EQ(static_cast<int>(i), v[i]);
155 EXPECT_EQ(static_cast<int>(i), v.at(i));
156 }
157 EXPECT_EQ(v.begin(), v.data());
158 EXPECT_EQ(cv.begin(), cv.data());
159
160 size_t counter = 0;
161 for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) {
162 EXPECT_EQ(static_cast<int>(counter), *iter);
163 counter++;
164 }
165 EXPECT_EQ(counter, len);
166
167 counter = 0;
168 for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) {
169 EXPECT_EQ(static_cast<int>(counter), *iter);
170 counter++;
171 }
172 EXPECT_EQ(counter, len);
173
174 counter = 0;
175 for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) {
176 EXPECT_EQ(static_cast<int>(counter), *iter);
177 counter++;
178 }
179 EXPECT_EQ(counter, len);
180
181 if (len > 0) {
182 EXPECT_EQ(0, v.front());
183 EXPECT_EQ(static_cast<int>(len - 1), v.back());
184 v.pop_back();
185 EXPECT_EQ(len - 1, v.size());
186 for (size_t i = 0; i < v.size(); ++i) {
187 EXPECT_EQ(static_cast<int>(i), v[i]);
188 EXPECT_EQ(static_cast<int>(i), v.at(i));
189 }
190 }
191 }
192 }
193
TEST(IntVec,PopBackNoOverflow)194 TEST(IntVec, PopBackNoOverflow) {
195 IntVec v = {1};
196 v.pop_back();
197 EXPECT_EQ(v.size(), 0u);
198 }
199
TEST(IntVec,AtThrows)200 TEST(IntVec, AtThrows) {
201 IntVec v = {1, 2, 3};
202 EXPECT_EQ(v.at(2), 3);
203 ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range,
204 "failed bounds check");
205 }
206
TEST(IntVec,ReverseIterator)207 TEST(IntVec, ReverseIterator) {
208 for (size_t len = 0; len < 20; len++) {
209 IntVec v;
210 Fill(&v, len);
211
212 size_t counter = len;
213 for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) {
214 counter--;
215 EXPECT_EQ(static_cast<int>(counter), *iter);
216 }
217 EXPECT_EQ(counter, 0u);
218
219 counter = len;
220 for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend();
221 ++iter) {
222 counter--;
223 EXPECT_EQ(static_cast<int>(counter), *iter);
224 }
225 EXPECT_EQ(counter, 0u);
226
227 counter = len;
228 for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend();
229 ++iter) {
230 counter--;
231 EXPECT_EQ(static_cast<int>(counter), *iter);
232 }
233 EXPECT_EQ(counter, 0u);
234 }
235 }
236
TEST(IntVec,Erase)237 TEST(IntVec, Erase) {
238 for (size_t len = 1; len < 20; len++) {
239 for (size_t i = 0; i < len; ++i) {
240 IntVec v;
241 Fill(&v, len);
242 v.erase(v.begin() + i);
243 EXPECT_EQ(len - 1, v.size());
244 for (size_t j = 0; j < i; ++j) {
245 EXPECT_EQ(static_cast<int>(j), v[j]);
246 }
247 for (size_t j = i; j < len - 1; ++j) {
248 EXPECT_EQ(static_cast<int>(j + 1), v[j]);
249 }
250 }
251 }
252 }
253
TEST(IntVec,Hardened)254 TEST(IntVec, Hardened) {
255 IntVec v;
256 Fill(&v, 10);
257 EXPECT_EQ(v[9], 9);
258 #if !defined(NDEBUG) || ABSL_OPTION_HARDENED
259 EXPECT_DEATH_IF_SUPPORTED(v[10], "");
260 EXPECT_DEATH_IF_SUPPORTED(v[static_cast<size_t>(-1)], "");
261 EXPECT_DEATH_IF_SUPPORTED(v.resize(v.max_size() + 1), "");
262 #endif
263 }
264
265 // At the end of this test loop, the elements between [erase_begin, erase_end)
266 // should have reference counts == 0, and all others elements should have
267 // reference counts == 1.
TEST(RefCountedVec,EraseBeginEnd)268 TEST(RefCountedVec, EraseBeginEnd) {
269 for (size_t len = 1; len < 20; ++len) {
270 for (size_t erase_begin = 0; erase_begin < len; ++erase_begin) {
271 for (size_t erase_end = erase_begin; erase_end <= len; ++erase_end) {
272 std::vector<int> counts(len, 0);
273 RefCountedVec v;
274 for (size_t i = 0; i < len; ++i) {
275 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
276 }
277
278 size_t erase_len = erase_end - erase_begin;
279
280 v.erase(v.begin() + erase_begin, v.begin() + erase_end);
281
282 EXPECT_EQ(len - erase_len, v.size());
283
284 // Check the elements before the first element erased.
285 for (size_t i = 0; i < erase_begin; ++i) {
286 EXPECT_EQ(static_cast<int>(i), v[i].value_);
287 }
288
289 // Check the elements after the first element erased.
290 for (size_t i = erase_begin; i < v.size(); ++i) {
291 EXPECT_EQ(static_cast<int>(i + erase_len), v[i].value_);
292 }
293
294 // Check that the elements at the beginning are preserved.
295 for (size_t i = 0; i < erase_begin; ++i) {
296 EXPECT_EQ(1, counts[i]);
297 }
298
299 // Check that the erased elements are destroyed
300 for (size_t i = erase_begin; i < erase_end; ++i) {
301 EXPECT_EQ(0, counts[i]);
302 }
303
304 // Check that the elements at the end are preserved.
305 for (size_t i = erase_end; i < len; ++i) {
306 EXPECT_EQ(1, counts[i]);
307 }
308 }
309 }
310 }
311 }
312
313 struct NoDefaultCtor {
NoDefaultCtor__anon91b7609f0111::NoDefaultCtor314 explicit NoDefaultCtor(int) {}
315 };
316 struct NoCopy {
NoCopy__anon91b7609f0111::NoCopy317 NoCopy() {}
318 NoCopy(const NoCopy&) = delete;
319 };
320 struct NoAssign {
NoAssign__anon91b7609f0111::NoAssign321 NoAssign() {}
322 NoAssign& operator=(const NoAssign&) = delete;
323 };
324 struct MoveOnly {
MoveOnly__anon91b7609f0111::MoveOnly325 MoveOnly() {}
326 MoveOnly(MoveOnly&&) = default;
327 MoveOnly& operator=(MoveOnly&&) = default;
328 };
TEST(InlinedVectorTest,NoDefaultCtor)329 TEST(InlinedVectorTest, NoDefaultCtor) {
330 absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2));
331 (void)v;
332 }
TEST(InlinedVectorTest,NoCopy)333 TEST(InlinedVectorTest, NoCopy) {
334 absl::InlinedVector<NoCopy, 1> v(10);
335 (void)v;
336 }
TEST(InlinedVectorTest,NoAssign)337 TEST(InlinedVectorTest, NoAssign) {
338 absl::InlinedVector<NoAssign, 1> v(10);
339 (void)v;
340 }
TEST(InlinedVectorTest,MoveOnly)341 TEST(InlinedVectorTest, MoveOnly) {
342 absl::InlinedVector<MoveOnly, 2> v;
343 v.push_back(MoveOnly{});
344 v.push_back(MoveOnly{});
345 v.push_back(MoveOnly{});
346 v.erase(v.begin());
347 v.push_back(MoveOnly{});
348 v.erase(v.begin(), v.begin() + 1);
349 v.insert(v.begin(), MoveOnly{});
350 v.emplace(v.begin());
351 v.emplace(v.begin(), MoveOnly{});
352 }
TEST(InlinedVectorTest,Noexcept)353 TEST(InlinedVectorTest, Noexcept) {
354 EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value);
355 EXPECT_TRUE((std::is_nothrow_move_constructible<
356 absl::InlinedVector<MoveOnly, 2>>::value));
357
358 struct MoveCanThrow {
359 MoveCanThrow(MoveCanThrow&&) {}
360 };
361 EXPECT_EQ(absl::default_allocator_is_nothrow::value,
362 (std::is_nothrow_move_constructible<
363 absl::InlinedVector<MoveCanThrow, 2>>::value));
364 }
365
TEST(InlinedVectorTest,EmplaceBack)366 TEST(InlinedVectorTest, EmplaceBack) {
367 absl::InlinedVector<std::pair<std::string, int>, 1> v;
368
369 auto& inlined_element = v.emplace_back("answer", 42);
370 EXPECT_EQ(&inlined_element, &v[0]);
371 EXPECT_EQ(inlined_element.first, "answer");
372 EXPECT_EQ(inlined_element.second, 42);
373
374 auto& allocated_element = v.emplace_back("taxicab", 1729);
375 EXPECT_EQ(&allocated_element, &v[1]);
376 EXPECT_EQ(allocated_element.first, "taxicab");
377 EXPECT_EQ(allocated_element.second, 1729);
378 }
379
TEST(InlinedVectorTest,ShrinkToFitGrowingVector)380 TEST(InlinedVectorTest, ShrinkToFitGrowingVector) {
381 absl::InlinedVector<std::pair<std::string, int>, 1> v;
382
383 v.shrink_to_fit();
384 EXPECT_EQ(v.capacity(), 1u);
385
386 v.emplace_back("answer", 42);
387 v.shrink_to_fit();
388 EXPECT_EQ(v.capacity(), 1u);
389
390 v.emplace_back("taxicab", 1729);
391 EXPECT_GE(v.capacity(), 2u);
392 v.shrink_to_fit();
393 EXPECT_EQ(v.capacity(), 2u);
394
395 v.reserve(100);
396 EXPECT_GE(v.capacity(), 100u);
397 v.shrink_to_fit();
398 EXPECT_EQ(v.capacity(), 2u);
399 }
400
TEST(InlinedVectorTest,ShrinkToFitEdgeCases)401 TEST(InlinedVectorTest, ShrinkToFitEdgeCases) {
402 {
403 absl::InlinedVector<std::pair<std::string, int>, 1> v;
404 v.emplace_back("answer", 42);
405 v.emplace_back("taxicab", 1729);
406 EXPECT_GE(v.capacity(), 2u);
407 v.pop_back();
408 v.shrink_to_fit();
409 EXPECT_EQ(v.capacity(), 1u);
410 EXPECT_EQ(v[0].first, "answer");
411 EXPECT_EQ(v[0].second, 42);
412 }
413
414 {
415 absl::InlinedVector<std::string, 2> v(100);
416 v.resize(0);
417 v.shrink_to_fit();
418 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
419 }
420
421 {
422 absl::InlinedVector<std::string, 2> v(100);
423 v.resize(1);
424 v.shrink_to_fit();
425 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
426 }
427
428 {
429 absl::InlinedVector<std::string, 2> v(100);
430 v.resize(2);
431 v.shrink_to_fit();
432 EXPECT_EQ(v.capacity(), 2u);
433 }
434
435 {
436 absl::InlinedVector<std::string, 2> v(100);
437 v.resize(3);
438 v.shrink_to_fit();
439 EXPECT_EQ(v.capacity(), 3u);
440 }
441 }
442
TEST(IntVec,Insert)443 TEST(IntVec, Insert) {
444 for (size_t len = 0; len < 20; len++) {
445 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) {
446 {
447 // Single element
448 std::vector<int> std_v;
449 Fill(&std_v, len);
450 IntVec v;
451 Fill(&v, len);
452
453 std_v.insert(std_v.begin() + pos, 9999);
454 IntVec::iterator it = v.insert(v.cbegin() + pos, 9999);
455 EXPECT_THAT(v, ElementsAreArray(std_v));
456 EXPECT_EQ(it, v.cbegin() + pos);
457 }
458 {
459 // n elements
460 std::vector<int> std_v;
461 Fill(&std_v, len);
462 IntVec v;
463 Fill(&v, len);
464
465 IntVec::size_type n = 5;
466 std_v.insert(std_v.begin() + pos, n, 9999);
467 IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999);
468 EXPECT_THAT(v, ElementsAreArray(std_v));
469 EXPECT_EQ(it, v.cbegin() + pos);
470 }
471 {
472 // Iterator range (random access iterator)
473 std::vector<int> std_v;
474 Fill(&std_v, len);
475 IntVec v;
476 Fill(&v, len);
477
478 const std::vector<int> input = {9999, 8888, 7777};
479 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
480 IntVec::iterator it =
481 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
482 EXPECT_THAT(v, ElementsAreArray(std_v));
483 EXPECT_EQ(it, v.cbegin() + pos);
484 }
485 {
486 // Iterator range (forward iterator)
487 std::vector<int> std_v;
488 Fill(&std_v, len);
489 IntVec v;
490 Fill(&v, len);
491
492 const std::forward_list<int> input = {9999, 8888, 7777};
493 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
494 IntVec::iterator it =
495 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
496 EXPECT_THAT(v, ElementsAreArray(std_v));
497 EXPECT_EQ(it, v.cbegin() + pos);
498 }
499 {
500 // Iterator range (input iterator)
501 std::vector<int> std_v;
502 Fill(&std_v, len);
503 IntVec v;
504 Fill(&v, len);
505
506 std_v.insert(std_v.begin() + pos, {9999, 8888, 7777});
507 std::istringstream input("9999 8888 7777");
508 IntVec::iterator it =
509 v.insert(v.cbegin() + pos, std::istream_iterator<int>(input),
510 std::istream_iterator<int>());
511 EXPECT_THAT(v, ElementsAreArray(std_v));
512 EXPECT_EQ(it, v.cbegin() + pos);
513 }
514 {
515 // Initializer list
516 std::vector<int> std_v;
517 Fill(&std_v, len);
518 IntVec v;
519 Fill(&v, len);
520
521 std_v.insert(std_v.begin() + pos, {9999, 8888});
522 IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888});
523 EXPECT_THAT(v, ElementsAreArray(std_v));
524 EXPECT_EQ(it, v.cbegin() + pos);
525 }
526 }
527 }
528 }
529
TEST(RefCountedVec,InsertConstructorDestructor)530 TEST(RefCountedVec, InsertConstructorDestructor) {
531 // Make sure the proper construction/destruction happen during insert
532 // operations.
533 for (size_t len = 0; len < 20; len++) {
534 SCOPED_TRACE(len);
535 for (size_t pos = 0; pos <= len; pos++) {
536 SCOPED_TRACE(pos);
537 std::vector<int> counts(len, 0);
538 int inserted_count = 0;
539 RefCountedVec v;
540 for (size_t i = 0; i < len; ++i) {
541 SCOPED_TRACE(i);
542 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
543 }
544
545 EXPECT_THAT(counts, Each(Eq(1)));
546
547 RefCounted insert_element(9999, &inserted_count);
548 EXPECT_EQ(1, inserted_count);
549 v.insert(v.begin() + pos, insert_element);
550 EXPECT_EQ(2, inserted_count);
551 // Check that the elements at the end are preserved.
552 EXPECT_THAT(counts, Each(Eq(1)));
553 EXPECT_EQ(2, inserted_count);
554 }
555 }
556 }
557
TEST(IntVec,Resize)558 TEST(IntVec, Resize) {
559 for (size_t len = 0; len < 20; len++) {
560 IntVec v;
561 Fill(&v, len);
562
563 // Try resizing up and down by k elements
564 static const int kResizeElem = 1000000;
565 for (size_t k = 0; k < 10; k++) {
566 // Enlarging resize
567 v.resize(len + k, kResizeElem);
568 EXPECT_EQ(len + k, v.size());
569 EXPECT_LE(len + k, v.capacity());
570 for (size_t i = 0; i < len + k; i++) {
571 if (i < len) {
572 EXPECT_EQ(static_cast<int>(i), v[i]);
573 } else {
574 EXPECT_EQ(kResizeElem, v[i]);
575 }
576 }
577
578 // Shrinking resize
579 v.resize(len, kResizeElem);
580 EXPECT_EQ(len, v.size());
581 EXPECT_LE(len, v.capacity());
582 for (size_t i = 0; i < len; i++) {
583 EXPECT_EQ(static_cast<int>(i), v[i]);
584 }
585 }
586 }
587 }
588
TEST(IntVec,InitWithLength)589 TEST(IntVec, InitWithLength) {
590 for (size_t len = 0; len < 20; len++) {
591 IntVec v(len, 7);
592 EXPECT_EQ(len, v.size());
593 EXPECT_LE(len, v.capacity());
594 for (size_t i = 0; i < len; i++) {
595 EXPECT_EQ(7, v[i]);
596 }
597 }
598 }
599
TEST(IntVec,CopyConstructorAndAssignment)600 TEST(IntVec, CopyConstructorAndAssignment) {
601 for (size_t len = 0; len < 20; len++) {
602 IntVec v;
603 Fill(&v, len);
604 EXPECT_EQ(len, v.size());
605 EXPECT_LE(len, v.capacity());
606
607 IntVec v2(v);
608 EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2);
609
610 for (size_t start_len = 0; start_len < 20; start_len++) {
611 IntVec v3;
612 Fill(&v3, start_len, 99); // Add dummy elements that should go away
613 v3 = v;
614 EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3);
615 }
616 }
617 }
618
TEST(IntVec,AliasingCopyAssignment)619 TEST(IntVec, AliasingCopyAssignment) {
620 for (size_t len = 0; len < 20; ++len) {
621 IntVec original;
622 Fill(&original, len);
623 IntVec dup = original;
624 dup = *&dup;
625 EXPECT_EQ(dup, original);
626 }
627 }
628
TEST(IntVec,MoveConstructorAndAssignment)629 TEST(IntVec, MoveConstructorAndAssignment) {
630 for (size_t len = 0; len < 20; len++) {
631 IntVec v_in;
632 const size_t inlined_capacity = v_in.capacity();
633 Fill(&v_in, len);
634 EXPECT_EQ(len, v_in.size());
635 EXPECT_LE(len, v_in.capacity());
636
637 {
638 IntVec v_temp(v_in);
639 auto* old_data = v_temp.data();
640 IntVec v_out(std::move(v_temp));
641 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
642 if (v_in.size() > inlined_capacity) {
643 // Allocation is moved as a whole, data stays in place.
644 EXPECT_TRUE(v_out.data() == old_data);
645 } else {
646 EXPECT_FALSE(v_out.data() == old_data);
647 }
648 }
649 for (size_t start_len = 0; start_len < 20; start_len++) {
650 IntVec v_out;
651 Fill(&v_out, start_len, 99); // Add dummy elements that should go away
652 IntVec v_temp(v_in);
653 auto* old_data = v_temp.data();
654 v_out = std::move(v_temp);
655 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
656 if (v_in.size() > inlined_capacity) {
657 // Allocation is moved as a whole, data stays in place.
658 EXPECT_TRUE(v_out.data() == old_data);
659 } else {
660 EXPECT_FALSE(v_out.data() == old_data);
661 }
662 }
663 }
664 }
665
666 class NotTriviallyDestructible {
667 public:
NotTriviallyDestructible()668 NotTriviallyDestructible() : p_(new int(1)) {}
NotTriviallyDestructible(int i)669 explicit NotTriviallyDestructible(int i) : p_(new int(i)) {}
670
NotTriviallyDestructible(const NotTriviallyDestructible & other)671 NotTriviallyDestructible(const NotTriviallyDestructible& other)
672 : p_(new int(*other.p_)) {}
673
operator =(const NotTriviallyDestructible & other)674 NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) {
675 p_ = absl::make_unique<int>(*other.p_);
676 return *this;
677 }
678
operator ==(const NotTriviallyDestructible & other) const679 bool operator==(const NotTriviallyDestructible& other) const {
680 return *p_ == *other.p_;
681 }
682
683 private:
684 std::unique_ptr<int> p_;
685 };
686
TEST(AliasingTest,Emplace)687 TEST(AliasingTest, Emplace) {
688 for (size_t i = 2; i < 20; ++i) {
689 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
690 for (size_t j = 0; j < i; ++j) {
691 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
692 }
693 vec.emplace(vec.begin(), vec[0]);
694 EXPECT_EQ(vec[0], vec[1]);
695 vec.emplace(vec.begin() + i / 2, vec[i / 2]);
696 EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]);
697 vec.emplace(vec.end() - 1, vec.back());
698 EXPECT_EQ(vec[vec.size() - 2], vec.back());
699 }
700 }
701
TEST(AliasingTest,InsertWithCount)702 TEST(AliasingTest, InsertWithCount) {
703 for (size_t i = 1; i < 20; ++i) {
704 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
705 for (size_t j = 0; j < i; ++j) {
706 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
707 }
708 for (size_t n = 0; n < 5; ++n) {
709 // We use back where we can because it's guaranteed to become invalidated
710 vec.insert(vec.begin(), n, vec.back());
711 auto b = vec.begin();
712 EXPECT_TRUE(
713 std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) {
714 return x == vec.back();
715 }));
716
717 auto m_idx = vec.size() / 2;
718 vec.insert(vec.begin() + m_idx, n, vec.back());
719 auto m = vec.begin() + m_idx;
720 EXPECT_TRUE(
721 std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) {
722 return x == vec.back();
723 }));
724
725 // We want distinct values so the equality test is meaningful,
726 // vec[vec.size() - 1] is also almost always invalidated.
727 auto old_e = vec.size() - 1;
728 auto val = vec[old_e];
729 vec.insert(vec.end(), n, vec[old_e]);
730 auto e = vec.begin() + old_e;
731 EXPECT_TRUE(std::all_of(
732 e, e + n,
733 [&val](const NotTriviallyDestructible& x) { return x == val; }));
734 }
735 }
736 }
737
TEST(OverheadTest,Storage)738 TEST(OverheadTest, Storage) {
739 // Check for size overhead.
740 // In particular, ensure that std::allocator doesn't cost anything to store.
741 // The union should be absorbing some of the allocation bookkeeping overhead
742 // in the larger vectors, leaving only the size_ field as overhead.
743
744 struct T { void* val; };
745 size_t expected_overhead = sizeof(T);
746
747 EXPECT_EQ((2 * expected_overhead),
748 sizeof(absl::InlinedVector<T, 1>) - sizeof(T[1]));
749 EXPECT_EQ(expected_overhead,
750 sizeof(absl::InlinedVector<T, 2>) - sizeof(T[2]));
751 EXPECT_EQ(expected_overhead,
752 sizeof(absl::InlinedVector<T, 3>) - sizeof(T[3]));
753 EXPECT_EQ(expected_overhead,
754 sizeof(absl::InlinedVector<T, 4>) - sizeof(T[4]));
755 EXPECT_EQ(expected_overhead,
756 sizeof(absl::InlinedVector<T, 5>) - sizeof(T[5]));
757 EXPECT_EQ(expected_overhead,
758 sizeof(absl::InlinedVector<T, 6>) - sizeof(T[6]));
759 EXPECT_EQ(expected_overhead,
760 sizeof(absl::InlinedVector<T, 7>) - sizeof(T[7]));
761 EXPECT_EQ(expected_overhead,
762 sizeof(absl::InlinedVector<T, 8>) - sizeof(T[8]));
763 }
764
TEST(IntVec,Clear)765 TEST(IntVec, Clear) {
766 for (size_t len = 0; len < 20; len++) {
767 SCOPED_TRACE(len);
768 IntVec v;
769 Fill(&v, len);
770 v.clear();
771 EXPECT_EQ(0u, v.size());
772 EXPECT_EQ(v.begin(), v.end());
773 }
774 }
775
TEST(IntVec,Reserve)776 TEST(IntVec, Reserve) {
777 for (size_t len = 0; len < 20; len++) {
778 IntVec v;
779 Fill(&v, len);
780
781 for (size_t newlen = 0; newlen < 100; newlen++) {
782 const int* start_rep = v.data();
783 v.reserve(newlen);
784 const int* final_rep = v.data();
785 if (newlen <= len) {
786 EXPECT_EQ(start_rep, final_rep);
787 }
788 EXPECT_LE(newlen, v.capacity());
789
790 // Filling up to newlen should not change rep
791 while (v.size() < newlen) {
792 v.push_back(0);
793 }
794 EXPECT_EQ(final_rep, v.data());
795 }
796 }
797 }
798
TEST(StringVec,SelfRefPushBack)799 TEST(StringVec, SelfRefPushBack) {
800 std::vector<std::string> std_v;
801 absl::InlinedVector<std::string, 4> v;
802 const std::string s = "A quite long string to ensure heap.";
803 std_v.push_back(s);
804 v.push_back(s);
805 for (int i = 0; i < 20; ++i) {
806 EXPECT_THAT(v, ElementsAreArray(std_v));
807
808 v.push_back(v.back());
809 std_v.push_back(std_v.back());
810 }
811 EXPECT_THAT(v, ElementsAreArray(std_v));
812 }
813
TEST(StringVec,SelfRefPushBackWithMove)814 TEST(StringVec, SelfRefPushBackWithMove) {
815 std::vector<std::string> std_v;
816 absl::InlinedVector<std::string, 4> v;
817 const std::string s = "A quite long string to ensure heap.";
818 std_v.push_back(s);
819 v.push_back(s);
820 for (int i = 0; i < 20; ++i) {
821 EXPECT_EQ(v.back(), std_v.back());
822
823 v.push_back(std::move(v.back()));
824 std_v.push_back(std::move(std_v.back()));
825 }
826 EXPECT_EQ(v.back(), std_v.back());
827 }
828
TEST(StringVec,SelfMove)829 TEST(StringVec, SelfMove) {
830 const std::string s = "A quite long string to ensure heap.";
831 for (int len = 0; len < 20; len++) {
832 SCOPED_TRACE(len);
833 absl::InlinedVector<std::string, 8> v;
834 for (int i = 0; i < len; ++i) {
835 SCOPED_TRACE(i);
836 v.push_back(s);
837 }
838 // Indirection necessary to avoid compiler warning.
839 v = std::move(*(&v));
840 // Ensure that the inlined vector is still in a valid state by copying it.
841 // We don't expect specific contents since a self-move results in an
842 // unspecified valid state.
843 std::vector<std::string> copy(v.begin(), v.end());
844 }
845 }
846
TEST(IntVec,Swap)847 TEST(IntVec, Swap) {
848 for (size_t l1 = 0; l1 < 20; l1++) {
849 SCOPED_TRACE(l1);
850 for (size_t l2 = 0; l2 < 20; l2++) {
851 SCOPED_TRACE(l2);
852 IntVec a = Fill(l1, 0);
853 IntVec b = Fill(l2, 100);
854 {
855 using std::swap;
856 swap(a, b);
857 }
858 EXPECT_EQ(l1, b.size());
859 EXPECT_EQ(l2, a.size());
860 for (size_t i = 0; i < l1; i++) {
861 SCOPED_TRACE(i);
862 EXPECT_EQ(static_cast<int>(i), b[i]);
863 }
864 for (size_t i = 0; i < l2; i++) {
865 SCOPED_TRACE(i);
866 EXPECT_EQ(100 + static_cast<int>(i), a[i]);
867 }
868 }
869 }
870 }
871
TYPED_TEST_P(InstanceTest,Swap)872 TYPED_TEST_P(InstanceTest, Swap) {
873 using Instance = TypeParam;
874 using InstanceVec = absl::InlinedVector<Instance, 8>;
875 for (size_t l1 = 0; l1 < 20; l1++) {
876 SCOPED_TRACE(l1);
877 for (size_t l2 = 0; l2 < 20; l2++) {
878 SCOPED_TRACE(l2);
879 InstanceTracker tracker;
880 InstanceVec a, b;
881 const size_t inlined_capacity = a.capacity();
882 auto min_len = std::min(l1, l2);
883 auto max_len = std::max(l1, l2);
884 for (size_t i = 0; i < l1; i++)
885 a.push_back(Instance(static_cast<int>(i)));
886 for (size_t i = 0; i < l2; i++)
887 b.push_back(Instance(100 + static_cast<int>(i)));
888 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
889 tracker.ResetCopiesMovesSwaps();
890 {
891 using std::swap;
892 swap(a, b);
893 }
894 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
895 if (a.size() > inlined_capacity && b.size() > inlined_capacity) {
896 EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped.
897 EXPECT_EQ(tracker.moves(), 0);
898 } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) {
899 EXPECT_EQ(tracker.swaps(), static_cast<int>(min_len));
900 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
901 static_cast<int>(max_len - min_len));
902 } else {
903 // One is allocated and the other isn't. The allocation is transferred
904 // without copying elements, and the inlined instances are copied/moved.
905 EXPECT_EQ(tracker.swaps(), 0);
906 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
907 static_cast<int>(min_len));
908 }
909
910 EXPECT_EQ(l1, b.size());
911 EXPECT_EQ(l2, a.size());
912 for (size_t i = 0; i < l1; i++) {
913 EXPECT_EQ(static_cast<int>(i), b[i].value());
914 }
915 for (size_t i = 0; i < l2; i++) {
916 EXPECT_EQ(100 + static_cast<int>(i), a[i].value());
917 }
918 }
919 }
920 }
921
TEST(IntVec,EqualAndNotEqual)922 TEST(IntVec, EqualAndNotEqual) {
923 IntVec a, b;
924 EXPECT_TRUE(a == b);
925 EXPECT_FALSE(a != b);
926
927 a.push_back(3);
928 EXPECT_FALSE(a == b);
929 EXPECT_TRUE(a != b);
930
931 b.push_back(3);
932 EXPECT_TRUE(a == b);
933 EXPECT_FALSE(a != b);
934
935 b.push_back(7);
936 EXPECT_FALSE(a == b);
937 EXPECT_TRUE(a != b);
938
939 a.push_back(6);
940 EXPECT_FALSE(a == b);
941 EXPECT_TRUE(a != b);
942
943 a.clear();
944 b.clear();
945 for (size_t i = 0; i < 100; i++) {
946 a.push_back(static_cast<int>(i));
947 b.push_back(static_cast<int>(i));
948 EXPECT_TRUE(a == b);
949 EXPECT_FALSE(a != b);
950
951 b[i] = b[i] + 1;
952 EXPECT_FALSE(a == b);
953 EXPECT_TRUE(a != b);
954
955 b[i] = b[i] - 1; // Back to before
956 EXPECT_TRUE(a == b);
957 EXPECT_FALSE(a != b);
958 }
959 }
960
TEST(IntVec,RelationalOps)961 TEST(IntVec, RelationalOps) {
962 IntVec a, b;
963 EXPECT_FALSE(a < b);
964 EXPECT_FALSE(b < a);
965 EXPECT_FALSE(a > b);
966 EXPECT_FALSE(b > a);
967 EXPECT_TRUE(a <= b);
968 EXPECT_TRUE(b <= a);
969 EXPECT_TRUE(a >= b);
970 EXPECT_TRUE(b >= a);
971 b.push_back(3);
972 EXPECT_TRUE(a < b);
973 EXPECT_FALSE(b < a);
974 EXPECT_FALSE(a > b);
975 EXPECT_TRUE(b > a);
976 EXPECT_TRUE(a <= b);
977 EXPECT_FALSE(b <= a);
978 EXPECT_FALSE(a >= b);
979 EXPECT_TRUE(b >= a);
980 }
981
TYPED_TEST_P(InstanceTest,CountConstructorsDestructors)982 TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) {
983 using Instance = TypeParam;
984 using InstanceVec = absl::InlinedVector<Instance, 8>;
985 InstanceTracker tracker;
986 for (size_t len = 0; len < 20; len++) {
987 SCOPED_TRACE(len);
988 tracker.ResetCopiesMovesSwaps();
989
990 InstanceVec v;
991 const size_t inlined_capacity = v.capacity();
992 for (size_t i = 0; i < len; i++) {
993 v.push_back(Instance(static_cast<int>(i)));
994 }
995 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
996 EXPECT_GE(tracker.copies() + tracker.moves(),
997 static_cast<int>(len)); // More due to reallocation.
998 tracker.ResetCopiesMovesSwaps();
999
1000 // Enlarging resize() must construct some objects
1001 tracker.ResetCopiesMovesSwaps();
1002 v.resize(len + 10, Instance(100));
1003 EXPECT_EQ(tracker.instances(), static_cast<int>(len) + 10);
1004 if (len <= inlined_capacity && len + 10 > inlined_capacity) {
1005 EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + static_cast<int>(len));
1006 } else {
1007 // Only specify a minimum number of copies + moves. We don't want to
1008 // depend on the reallocation policy here.
1009 EXPECT_GE(tracker.copies() + tracker.moves(),
1010 10); // More due to reallocation.
1011 }
1012
1013 // Shrinking resize() must destroy some objects
1014 tracker.ResetCopiesMovesSwaps();
1015 v.resize(len, Instance(100));
1016 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1017 EXPECT_EQ(tracker.copies(), 0);
1018 EXPECT_EQ(tracker.moves(), 0);
1019
1020 // reserve() must not increase the number of initialized objects
1021 SCOPED_TRACE("reserve");
1022 v.reserve(len + 1000);
1023 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1024 EXPECT_EQ(tracker.copies() + tracker.moves(), static_cast<int>(len));
1025
1026 // pop_back() and erase() must destroy one object
1027 if (len > 0) {
1028 tracker.ResetCopiesMovesSwaps();
1029 v.pop_back();
1030 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 1);
1031 EXPECT_EQ(tracker.copies(), 0);
1032 EXPECT_EQ(tracker.moves(), 0);
1033
1034 if (!v.empty()) {
1035 tracker.ResetCopiesMovesSwaps();
1036 v.erase(v.begin());
1037 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 2);
1038 EXPECT_EQ(tracker.copies() + tracker.moves(),
1039 static_cast<int>(len) - 2);
1040 }
1041 }
1042
1043 tracker.ResetCopiesMovesSwaps();
1044 int instances_before_empty_erase = tracker.instances();
1045 v.erase(v.begin(), v.begin());
1046 EXPECT_EQ(tracker.instances(), instances_before_empty_erase);
1047 EXPECT_EQ(tracker.copies() + tracker.moves(), 0);
1048 }
1049 }
1050
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnCopyConstruction)1051 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) {
1052 using Instance = TypeParam;
1053 using InstanceVec = absl::InlinedVector<Instance, 8>;
1054 InstanceTracker tracker;
1055 for (int len = 0; len < 20; len++) {
1056 SCOPED_TRACE(len);
1057 tracker.ResetCopiesMovesSwaps();
1058
1059 InstanceVec v;
1060 for (int i = 0; i < len; i++) {
1061 v.push_back(Instance(i));
1062 }
1063 EXPECT_EQ(tracker.instances(), len);
1064 EXPECT_GE(tracker.copies() + tracker.moves(),
1065 len); // More due to reallocation.
1066 tracker.ResetCopiesMovesSwaps();
1067 { // Copy constructor should create 'len' more instances.
1068 InstanceVec v_copy(v);
1069 EXPECT_EQ(tracker.instances(), len + len);
1070 EXPECT_EQ(tracker.copies(), len);
1071 EXPECT_EQ(tracker.moves(), 0);
1072 }
1073 EXPECT_EQ(tracker.instances(), len);
1074 }
1075 }
1076
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveConstruction)1077 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) {
1078 using Instance = TypeParam;
1079 using InstanceVec = absl::InlinedVector<Instance, 8>;
1080 InstanceTracker tracker;
1081 for (int len = 0; len < 20; len++) {
1082 SCOPED_TRACE(len);
1083 tracker.ResetCopiesMovesSwaps();
1084
1085 InstanceVec v;
1086 const size_t inlined_capacity = v.capacity();
1087 for (int i = 0; i < len; i++) {
1088 v.push_back(Instance(i));
1089 }
1090 EXPECT_EQ(tracker.instances(), len);
1091 EXPECT_GE(tracker.copies() + tracker.moves(),
1092 len); // More due to reallocation.
1093 tracker.ResetCopiesMovesSwaps();
1094 {
1095 InstanceVec v_copy(std::move(v));
1096 if (static_cast<size_t>(len) > inlined_capacity) {
1097 // Allocation is moved as a whole.
1098 EXPECT_EQ(tracker.instances(), len);
1099 EXPECT_EQ(tracker.live_instances(), len);
1100 // Tests an implementation detail, don't rely on this in your code.
1101 EXPECT_EQ(v.size(), 0u); // NOLINT misc-use-after-move
1102 EXPECT_EQ(tracker.copies(), 0);
1103 EXPECT_EQ(tracker.moves(), 0);
1104 } else {
1105 EXPECT_EQ(tracker.instances(), len + len);
1106 if (Instance::supports_move()) {
1107 EXPECT_EQ(tracker.live_instances(), len);
1108 EXPECT_EQ(tracker.copies(), 0);
1109 EXPECT_EQ(tracker.moves(), len);
1110 } else {
1111 EXPECT_EQ(tracker.live_instances(), len + len);
1112 EXPECT_EQ(tracker.copies(), len);
1113 EXPECT_EQ(tracker.moves(), 0);
1114 }
1115 }
1116 EXPECT_EQ(tracker.swaps(), 0);
1117 }
1118 }
1119 }
1120
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnAssignment)1121 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) {
1122 using Instance = TypeParam;
1123 using InstanceVec = absl::InlinedVector<Instance, 8>;
1124 InstanceTracker tracker;
1125 for (int len = 0; len < 20; len++) {
1126 SCOPED_TRACE(len);
1127 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1128 SCOPED_TRACE(longorshort);
1129 tracker.ResetCopiesMovesSwaps();
1130
1131 InstanceVec longer, shorter;
1132 for (int i = 0; i < len; i++) {
1133 longer.push_back(Instance(i));
1134 shorter.push_back(Instance(i));
1135 }
1136 longer.push_back(Instance(len));
1137 EXPECT_EQ(tracker.instances(), len + len + 1);
1138 EXPECT_GE(tracker.copies() + tracker.moves(),
1139 len + len + 1); // More due to reallocation.
1140
1141 tracker.ResetCopiesMovesSwaps();
1142 if (longorshort) {
1143 shorter = longer;
1144 EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1));
1145 EXPECT_GE(tracker.copies() + tracker.moves(),
1146 len + 1); // More due to reallocation.
1147 } else {
1148 longer = shorter;
1149 EXPECT_EQ(tracker.instances(), len + len);
1150 EXPECT_EQ(tracker.copies() + tracker.moves(), len);
1151 }
1152 }
1153 }
1154 }
1155
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveAssignment)1156 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) {
1157 using Instance = TypeParam;
1158 using InstanceVec = absl::InlinedVector<Instance, 8>;
1159 InstanceTracker tracker;
1160 for (int len = 0; len < 20; len++) {
1161 SCOPED_TRACE(len);
1162 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1163 SCOPED_TRACE(longorshort);
1164 tracker.ResetCopiesMovesSwaps();
1165
1166 InstanceVec longer, shorter;
1167 const size_t inlined_capacity = longer.capacity();
1168 for (int i = 0; i < len; i++) {
1169 longer.push_back(Instance(i));
1170 shorter.push_back(Instance(i));
1171 }
1172 longer.push_back(Instance(len));
1173 EXPECT_EQ(tracker.instances(), len + len + 1);
1174 EXPECT_GE(tracker.copies() + tracker.moves(),
1175 len + len + 1); // More due to reallocation.
1176
1177 tracker.ResetCopiesMovesSwaps();
1178 int src_len;
1179 if (longorshort) {
1180 src_len = len + 1;
1181 shorter = std::move(longer);
1182 } else {
1183 src_len = len;
1184 longer = std::move(shorter);
1185 }
1186 if (static_cast<size_t>(src_len) > inlined_capacity) {
1187 // Allocation moved as a whole.
1188 EXPECT_EQ(tracker.instances(), src_len);
1189 EXPECT_EQ(tracker.live_instances(), src_len);
1190 EXPECT_EQ(tracker.copies(), 0);
1191 EXPECT_EQ(tracker.moves(), 0);
1192 } else {
1193 // Elements are all copied.
1194 EXPECT_EQ(tracker.instances(), src_len + src_len);
1195 if (Instance::supports_move()) {
1196 EXPECT_EQ(tracker.copies(), 0);
1197 EXPECT_EQ(tracker.moves(), src_len);
1198 EXPECT_EQ(tracker.live_instances(), src_len);
1199 } else {
1200 EXPECT_EQ(tracker.copies(), src_len);
1201 EXPECT_EQ(tracker.moves(), 0);
1202 EXPECT_EQ(tracker.live_instances(), src_len + src_len);
1203 }
1204 }
1205 EXPECT_EQ(tracker.swaps(), 0);
1206 }
1207 }
1208 }
1209
TEST(CountElemAssign,SimpleTypeWithInlineBacking)1210 TEST(CountElemAssign, SimpleTypeWithInlineBacking) {
1211 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1212 SCOPED_TRACE(original_size);
1213 // Original contents are [12345, 12345, ...]
1214 std::vector<int> original_contents(original_size, 12345);
1215
1216 absl::InlinedVector<int, 2> v(original_contents.begin(),
1217 original_contents.end());
1218 v.assign(2, 123);
1219 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(123, 123)));
1220 if (original_size <= 2) {
1221 // If the original had inline backing, it should stay inline.
1222 EXPECT_EQ(2u, v.capacity());
1223 }
1224 }
1225 }
1226
TEST(CountElemAssign,SimpleTypeWithAllocation)1227 TEST(CountElemAssign, SimpleTypeWithAllocation) {
1228 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1229 SCOPED_TRACE(original_size);
1230 // Original contents are [12345, 12345, ...]
1231 std::vector<int> original_contents(original_size, 12345);
1232
1233 absl::InlinedVector<int, 2> v(original_contents.begin(),
1234 original_contents.end());
1235 v.assign(3, 123);
1236 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(123, 123, 123)));
1237 EXPECT_LE(v.size(), v.capacity());
1238 }
1239 }
1240
TYPED_TEST_P(InstanceTest,CountElemAssignInlineBacking)1241 TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) {
1242 using Instance = TypeParam;
1243 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1244 SCOPED_TRACE(original_size);
1245 // Original contents are [12345, 12345, ...]
1246 std::vector<Instance> original_contents(original_size, Instance(12345));
1247
1248 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1249 original_contents.end());
1250 v.assign(2, Instance(123));
1251 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(ValueIs(123), ValueIs(123))));
1252 if (original_size <= 2) {
1253 // If the original had inline backing, it should stay inline.
1254 EXPECT_EQ(2u, v.capacity());
1255 }
1256 }
1257 }
1258
1259 template <typename Instance>
InstanceCountElemAssignWithAllocationTest()1260 void InstanceCountElemAssignWithAllocationTest() {
1261 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1262 SCOPED_TRACE(original_size);
1263 // Original contents are [12345, 12345, ...]
1264 std::vector<Instance> original_contents(original_size, Instance(12345));
1265
1266 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1267 original_contents.end());
1268 v.assign(3, Instance(123));
1269 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(ValueIs(123), ValueIs(123),
1270 ValueIs(123))));
1271 EXPECT_LE(v.size(), v.capacity());
1272 }
1273 }
TEST(CountElemAssign,WithAllocationCopyableInstance)1274 TEST(CountElemAssign, WithAllocationCopyableInstance) {
1275 InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>();
1276 }
TEST(CountElemAssign,WithAllocationCopyableMovableInstance)1277 TEST(CountElemAssign, WithAllocationCopyableMovableInstance) {
1278 InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>();
1279 }
1280
TEST(RangedConstructor,SimpleType)1281 TEST(RangedConstructor, SimpleType) {
1282 std::vector<int> source_v = {4, 5, 6};
1283 // First try to fit in inline backing
1284 absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end());
1285 EXPECT_EQ(3u, v.size());
1286 EXPECT_EQ(4u,
1287 v.capacity()); // Indication that we're still on inlined storage
1288 EXPECT_EQ(4, v[0]);
1289 EXPECT_EQ(5, v[1]);
1290 EXPECT_EQ(6, v[2]);
1291
1292 // Now, force a re-allocate
1293 absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end());
1294 EXPECT_EQ(3u, realloc_v.size());
1295 EXPECT_LT(2u, realloc_v.capacity());
1296 EXPECT_EQ(4, realloc_v[0]);
1297 EXPECT_EQ(5, realloc_v[1]);
1298 EXPECT_EQ(6, realloc_v[2]);
1299 }
1300
1301 // Test for ranged constructors using Instance as the element type and
1302 // SourceContainer as the source container type.
1303 template <typename Instance, typename SourceContainer, int inlined_capacity>
InstanceRangedConstructorTestForContainer()1304 void InstanceRangedConstructorTestForContainer() {
1305 InstanceTracker tracker;
1306 SourceContainer source_v = {Instance(0), Instance(1)};
1307 tracker.ResetCopiesMovesSwaps();
1308 absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(),
1309 source_v.end());
1310 EXPECT_EQ(2u, v.size());
1311 EXPECT_LT(1u, v.capacity());
1312 EXPECT_EQ(0, v[0].value());
1313 EXPECT_EQ(1, v[1].value());
1314 EXPECT_EQ(tracker.copies(), 2);
1315 EXPECT_EQ(tracker.moves(), 0);
1316 }
1317
1318 template <typename Instance, int inlined_capacity>
InstanceRangedConstructorTestWithCapacity()1319 void InstanceRangedConstructorTestWithCapacity() {
1320 // Test with const and non-const, random access and non-random-access sources.
1321 // TODO(bsamwel): Test with an input iterator source.
1322 {
1323 SCOPED_TRACE("std::list");
1324 InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>,
1325 inlined_capacity>();
1326 {
1327 SCOPED_TRACE("const std::list");
1328 InstanceRangedConstructorTestForContainer<
1329 Instance, const std::list<Instance>, inlined_capacity>();
1330 }
1331 {
1332 SCOPED_TRACE("std::vector");
1333 InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>,
1334 inlined_capacity>();
1335 }
1336 {
1337 SCOPED_TRACE("const std::vector");
1338 InstanceRangedConstructorTestForContainer<
1339 Instance, const std::vector<Instance>, inlined_capacity>();
1340 }
1341 }
1342 }
1343
TYPED_TEST_P(InstanceTest,RangedConstructor)1344 TYPED_TEST_P(InstanceTest, RangedConstructor) {
1345 using Instance = TypeParam;
1346 SCOPED_TRACE("capacity=1");
1347 InstanceRangedConstructorTestWithCapacity<Instance, 1>();
1348 SCOPED_TRACE("capacity=2");
1349 InstanceRangedConstructorTestWithCapacity<Instance, 2>();
1350 }
1351
TEST(RangedConstructor,ElementsAreConstructed)1352 TEST(RangedConstructor, ElementsAreConstructed) {
1353 std::vector<std::string> source_v = {"cat", "dog"};
1354
1355 // Force expansion and re-allocation of v. Ensures that when the vector is
1356 // expanded that new elements are constructed.
1357 absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end());
1358 EXPECT_EQ("cat", v[0]);
1359 EXPECT_EQ("dog", v[1]);
1360 }
1361
TEST(RangedAssign,SimpleType)1362 TEST(RangedAssign, SimpleType) {
1363 // Test for all combinations of original sizes (empty and non-empty inline,
1364 // and out of line) and target sizes.
1365 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1366 SCOPED_TRACE(original_size);
1367 // Original contents are [12345, 12345, ...]
1368 std::vector<int> original_contents(original_size, 12345);
1369
1370 for (int target_size = 0; target_size <= 5; ++target_size) {
1371 SCOPED_TRACE(target_size);
1372
1373 // New contents are [3, 4, ...]
1374 std::vector<int> new_contents;
1375 for (int i = 0; i < target_size; ++i) {
1376 new_contents.push_back(i + 3);
1377 }
1378
1379 absl::InlinedVector<int, 3> v(original_contents.begin(),
1380 original_contents.end());
1381 v.assign(new_contents.begin(), new_contents.end());
1382
1383 EXPECT_EQ(new_contents.size(), v.size());
1384 EXPECT_LE(new_contents.size(), v.capacity());
1385 if (target_size <= 3 && original_size <= 3) {
1386 // Storage should stay inline when target size is small.
1387 EXPECT_EQ(3u, v.capacity());
1388 }
1389 EXPECT_THAT(v, ElementsAreArray(new_contents));
1390 }
1391 }
1392 }
1393
1394 // Returns true if lhs and rhs have the same value.
1395 template <typename Instance>
InstanceValuesEqual(const Instance & lhs,const Instance & rhs)1396 static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) {
1397 return lhs.value() == rhs.value();
1398 }
1399
1400 // Test for ranged assign() using Instance as the element type and
1401 // SourceContainer as the source container type.
1402 template <typename Instance, typename SourceContainer>
InstanceRangedAssignTestForContainer()1403 void InstanceRangedAssignTestForContainer() {
1404 // Test for all combinations of original sizes (empty and non-empty inline,
1405 // and out of line) and target sizes.
1406 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1407 SCOPED_TRACE(original_size);
1408 // Original contents are [12345, 12345, ...]
1409 std::vector<Instance> original_contents(original_size, Instance(12345));
1410
1411 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1412 SCOPED_TRACE(target_size);
1413
1414 // New contents are [3, 4, ...]
1415 // Generate data using a non-const container, because SourceContainer
1416 // itself may be const.
1417 // TODO(bsamwel): Test with an input iterator.
1418 std::vector<Instance> new_contents_in;
1419 for (size_t i = 0; i < target_size; ++i) {
1420 new_contents_in.push_back(Instance(static_cast<int>(i) + 3));
1421 }
1422 SourceContainer new_contents(new_contents_in.begin(),
1423 new_contents_in.end());
1424
1425 absl::InlinedVector<Instance, 3> v(original_contents.begin(),
1426 original_contents.end());
1427 v.assign(new_contents.begin(), new_contents.end());
1428
1429 EXPECT_EQ(new_contents.size(), v.size());
1430 EXPECT_LE(new_contents.size(), v.capacity());
1431 if (target_size <= 3 && original_size <= 3) {
1432 // Storage should stay inline when target size is small.
1433 EXPECT_EQ(3u, v.capacity());
1434 }
1435 EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(),
1436 InstanceValuesEqual<Instance>));
1437 }
1438 }
1439 }
1440
TYPED_TEST_P(InstanceTest,RangedAssign)1441 TYPED_TEST_P(InstanceTest, RangedAssign) {
1442 using Instance = TypeParam;
1443 // Test with const and non-const, random access and non-random-access sources.
1444 // TODO(bsamwel): Test with an input iterator source.
1445 SCOPED_TRACE("std::list");
1446 InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>();
1447 SCOPED_TRACE("const std::list");
1448 InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>();
1449 SCOPED_TRACE("std::vector");
1450 InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>();
1451 SCOPED_TRACE("const std::vector");
1452 InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>();
1453 }
1454
TEST(InitializerListConstructor,SimpleTypeWithInlineBacking)1455 TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) {
1456 EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}),
1457 AllOf(SizeIs(3u), CapacityIs(4u), ElementsAre(4, 5, 6)));
1458 }
1459
TEST(InitializerListConstructor,SimpleTypeWithReallocationRequired)1460 TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) {
1461 EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}),
1462 AllOf(SizeIs(3u), CapacityIs(Gt(2u)), ElementsAre(4, 5, 6)));
1463 }
1464
TEST(InitializerListConstructor,DisparateTypesInList)1465 TEST(InitializerListConstructor, DisparateTypesInList) {
1466 EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8));
1467
1468 EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}),
1469 ElementsAre("foo", "bar"));
1470 }
1471
TEST(InitializerListConstructor,ComplexTypeWithInlineBacking)1472 TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) {
1473 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{
1474 CopyableMovableInstance(0)}),
1475 AllOf(SizeIs(1u), CapacityIs(1u), ElementsAre(ValueIs(0))));
1476 }
1477
TEST(InitializerListConstructor,ComplexTypeWithReallocationRequired)1478 TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) {
1479 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{
1480 CopyableMovableInstance(0), CopyableMovableInstance(1)}),
1481 AllOf(SizeIs(2u), CapacityIs(Gt(1u)),
1482 ElementsAre(ValueIs(0), ValueIs(1))));
1483 }
1484
TEST(InitializerListAssign,SimpleTypeFitsInlineBacking)1485 TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) {
1486 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1487 SCOPED_TRACE(original_size);
1488
1489 absl::InlinedVector<int, 2> v1(original_size, 12345);
1490 const size_t original_capacity_v1 = v1.capacity();
1491 v1.assign({3});
1492 EXPECT_THAT(v1, AllOf(SizeIs(1u), CapacityIs(original_capacity_v1),
1493 ElementsAre(3)));
1494
1495 absl::InlinedVector<int, 2> v2(original_size, 12345);
1496 const size_t original_capacity_v2 = v2.capacity();
1497 v2 = {3};
1498 EXPECT_THAT(v2, AllOf(SizeIs(1u), CapacityIs(original_capacity_v2),
1499 ElementsAre(3)));
1500 }
1501 }
1502
TEST(InitializerListAssign,SimpleTypeDoesNotFitInlineBacking)1503 TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) {
1504 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1505 SCOPED_TRACE(original_size);
1506 absl::InlinedVector<int, 2> v1(original_size, 12345);
1507 v1.assign({3, 4, 5});
1508 EXPECT_THAT(v1, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1509 EXPECT_LE(3u, v1.capacity());
1510
1511 absl::InlinedVector<int, 2> v2(original_size, 12345);
1512 v2 = {3, 4, 5};
1513 EXPECT_THAT(v2, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1514 EXPECT_LE(3u, v2.capacity());
1515 }
1516 }
1517
TEST(InitializerListAssign,DisparateTypesInList)1518 TEST(InitializerListAssign, DisparateTypesInList) {
1519 absl::InlinedVector<int, 2> v_int1;
1520 v_int1.assign({-7, 8ULL});
1521 EXPECT_THAT(v_int1, ElementsAre(-7, 8));
1522
1523 absl::InlinedVector<int, 2> v_int2;
1524 v_int2 = {-7, 8ULL};
1525 EXPECT_THAT(v_int2, ElementsAre(-7, 8));
1526
1527 absl::InlinedVector<std::string, 2> v_string1;
1528 v_string1.assign({"foo", std::string("bar")});
1529 EXPECT_THAT(v_string1, ElementsAre("foo", "bar"));
1530
1531 absl::InlinedVector<std::string, 2> v_string2;
1532 v_string2 = {"foo", std::string("bar")};
1533 EXPECT_THAT(v_string2, ElementsAre("foo", "bar"));
1534 }
1535
TYPED_TEST_P(InstanceTest,InitializerListAssign)1536 TYPED_TEST_P(InstanceTest, InitializerListAssign) {
1537 using Instance = TypeParam;
1538 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1539 SCOPED_TRACE(original_size);
1540 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1541 const size_t original_capacity = v.capacity();
1542 v.assign({Instance(3)});
1543 EXPECT_THAT(v, AllOf(SizeIs(1u), CapacityIs(original_capacity),
1544 ElementsAre(ValueIs(3))));
1545 }
1546 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1547 SCOPED_TRACE(original_size);
1548 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1549 v.assign({Instance(3), Instance(4), Instance(5)});
1550 EXPECT_THAT(
1551 v, AllOf(SizeIs(3u), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
1552 EXPECT_LE(3u, v.capacity());
1553 }
1554 }
1555
1556 REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors,
1557 CountConstructorsDestructorsOnCopyConstruction,
1558 CountConstructorsDestructorsOnMoveConstruction,
1559 CountConstructorsDestructorsOnAssignment,
1560 CountConstructorsDestructorsOnMoveAssignment,
1561 CountElemAssignInlineBacking, RangedConstructor,
1562 RangedAssign, InitializerListAssign);
1563
1564 using InstanceTypes =
1565 ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>;
1566 INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest,
1567 InstanceTypes);
1568
TEST(DynamicVec,DynamicVecCompiles)1569 TEST(DynamicVec, DynamicVecCompiles) {
1570 DynamicVec v;
1571 (void)v;
1572 }
1573
TEST(AllocatorSupportTest,Constructors)1574 TEST(AllocatorSupportTest, Constructors) {
1575 using MyAlloc = CountingAllocator<int>;
1576 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1577 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1578 int64_t allocated = 0;
1579 MyAlloc alloc(&allocated);
1580 { AllocVec ABSL_ATTRIBUTE_UNUSED v; }
1581 { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); }
1582 { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); }
1583 { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); }
1584
1585 AllocVec v2;
1586 { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); }
1587 { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); }
1588 }
1589
TEST(AllocatorSupportTest,CountAllocations)1590 TEST(AllocatorSupportTest, CountAllocations) {
1591 using MyAlloc = CountingAllocator<int>;
1592 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1593 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1594 int64_t allocated = 0;
1595 MyAlloc alloc(&allocated);
1596 {
1597 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc);
1598 EXPECT_THAT(allocated, Eq(0));
1599 }
1600 EXPECT_THAT(allocated, Eq(0));
1601 {
1602 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc);
1603 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1604 }
1605 EXPECT_THAT(allocated, Eq(0));
1606 {
1607 AllocVec v(4, 1, alloc);
1608 EXPECT_THAT(allocated, Eq(0));
1609
1610 int64_t allocated2 = 0;
1611 MyAlloc alloc2(&allocated2);
1612 AllocVec v2(v, alloc2);
1613 EXPECT_THAT(allocated2, Eq(0));
1614
1615 int64_t allocated3 = 0;
1616 MyAlloc alloc3(&allocated3);
1617 AllocVec v3(std::move(v), alloc3);
1618 EXPECT_THAT(allocated3, Eq(0));
1619 }
1620 EXPECT_THAT(allocated, 0);
1621 {
1622 AllocVec v(8, 2, alloc);
1623 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1624
1625 int64_t allocated2 = 0;
1626 MyAlloc alloc2(&allocated2);
1627 AllocVec v2(v, alloc2);
1628 EXPECT_THAT(allocated2, Eq(static_cast<int64_t>(v2.size() * sizeof(int))));
1629
1630 int64_t allocated3 = 0;
1631 MyAlloc alloc3(&allocated3);
1632 AllocVec v3(std::move(v), alloc3);
1633 EXPECT_THAT(allocated3, Eq(static_cast<int64_t>(v3.size() * sizeof(int))));
1634 }
1635 EXPECT_EQ(allocated, 0);
1636 {
1637 // Test shrink_to_fit deallocations.
1638 AllocVec v(8, 2, alloc);
1639 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1640 v.resize(5);
1641 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1642 v.shrink_to_fit();
1643 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1644 v.resize(4);
1645 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1646 v.shrink_to_fit();
1647 EXPECT_EQ(allocated, 0);
1648 }
1649 }
1650
TEST(AllocatorSupportTest,SwapBothAllocated)1651 TEST(AllocatorSupportTest, SwapBothAllocated) {
1652 using MyAlloc = CountingAllocator<int>;
1653 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1654 int64_t allocated1 = 0;
1655 int64_t allocated2 = 0;
1656 {
1657 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1658 const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1659 MyAlloc a1(&allocated1);
1660 MyAlloc a2(&allocated2);
1661 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1662 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1663 EXPECT_LT(v1.capacity(), v2.capacity());
1664 EXPECT_THAT(allocated1,
1665 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1666 EXPECT_THAT(allocated2,
1667 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1668 v1.swap(v2);
1669 EXPECT_THAT(v1, ElementsAreArray(ia2));
1670 EXPECT_THAT(v2, ElementsAreArray(ia1));
1671 EXPECT_THAT(allocated1,
1672 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1673 EXPECT_THAT(allocated2,
1674 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1675 }
1676 EXPECT_THAT(allocated1, 0);
1677 EXPECT_THAT(allocated2, 0);
1678 }
1679
TEST(AllocatorSupportTest,SwapOneAllocated)1680 TEST(AllocatorSupportTest, SwapOneAllocated) {
1681 using MyAlloc = CountingAllocator<int>;
1682 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1683 int64_t allocated1 = 0;
1684 int64_t allocated2 = 0;
1685 {
1686 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1687 const int ia2[] = {0, 1, 2, 3};
1688 MyAlloc a1(&allocated1);
1689 MyAlloc a2(&allocated2);
1690 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1691 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1692 EXPECT_THAT(allocated1,
1693 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1694 EXPECT_THAT(allocated2, Eq(0));
1695 v1.swap(v2);
1696 EXPECT_THAT(v1, ElementsAreArray(ia2));
1697 EXPECT_THAT(v2, ElementsAreArray(ia1));
1698 EXPECT_THAT(allocated1,
1699 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1700 EXPECT_THAT(allocated2, Eq(0));
1701 EXPECT_TRUE(v2.get_allocator() == a1);
1702 EXPECT_TRUE(v1.get_allocator() == a2);
1703 }
1704 EXPECT_THAT(allocated1, 0);
1705 EXPECT_THAT(allocated2, 0);
1706 }
1707
TEST(AllocatorSupportTest,ScopedAllocatorWorksInlined)1708 TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) {
1709 using StdVector = std::vector<int, CountingAllocator<int>>;
1710 using Alloc = CountingAllocator<StdVector>;
1711 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1712 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1713
1714 int64_t total_allocated_byte_count = 0;
1715
1716 AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1717
1718 // Called only once to remain inlined
1719 inlined_case.emplace_back();
1720
1721 int64_t absl_responsible_for_count = total_allocated_byte_count;
1722
1723 // MSVC's allocator preemptively allocates in debug mode
1724 #if !defined(_MSC_VER)
1725 EXPECT_EQ(absl_responsible_for_count, 0);
1726 #endif // !defined(_MSC_VER)
1727
1728 inlined_case[0].emplace_back();
1729 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1730
1731 inlined_case.clear();
1732 inlined_case.shrink_to_fit();
1733 EXPECT_EQ(total_allocated_byte_count, 0);
1734 }
1735
TEST(AllocatorSupportTest,ScopedAllocatorWorksAllocated)1736 TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) {
1737 using StdVector = std::vector<int, CountingAllocator<int>>;
1738 using Alloc = CountingAllocator<StdVector>;
1739 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1740 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1741
1742 int64_t total_allocated_byte_count = 0;
1743
1744 AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1745
1746 // Called twice to force into being allocated
1747 allocated_case.emplace_back();
1748 allocated_case.emplace_back();
1749
1750 int64_t absl_responsible_for_count = total_allocated_byte_count;
1751 EXPECT_GT(absl_responsible_for_count, 0);
1752
1753 allocated_case[1].emplace_back();
1754 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1755
1756 allocated_case.clear();
1757 allocated_case.shrink_to_fit();
1758 EXPECT_EQ(total_allocated_byte_count, 0);
1759 }
1760
TEST(AllocatorSupportTest,SizeAllocConstructor)1761 TEST(AllocatorSupportTest, SizeAllocConstructor) {
1762 constexpr size_t inlined_size = 4;
1763 using Alloc = CountingAllocator<int>;
1764 using AllocVec = absl::InlinedVector<int, inlined_size, Alloc>;
1765
1766 {
1767 auto len = inlined_size / 2;
1768 int64_t allocated = 0;
1769 auto v = AllocVec(len, Alloc(&allocated));
1770
1771 // Inline storage used; allocator should not be invoked
1772 EXPECT_THAT(allocated, Eq(0));
1773 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1774 }
1775
1776 {
1777 auto len = inlined_size * 2;
1778 int64_t allocated = 0;
1779 auto v = AllocVec(len, Alloc(&allocated));
1780
1781 // Out of line storage used; allocation of 8 elements expected
1782 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(len * sizeof(int))));
1783 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1784 }
1785 }
1786
TEST(InlinedVectorTest,MinimumAllocatorCompilesUsingTraits)1787 TEST(InlinedVectorTest, MinimumAllocatorCompilesUsingTraits) {
1788 using T = int;
1789 using A = std::allocator<T>;
1790 using ATraits = absl::allocator_traits<A>;
1791
1792 struct MinimumAllocator {
1793 using value_type = T;
1794
1795 value_type* allocate(size_t n) {
1796 A a;
1797 return ATraits::allocate(a, n);
1798 }
1799
1800 void deallocate(value_type* p, size_t n) {
1801 A a;
1802 ATraits::deallocate(a, p, n);
1803 }
1804 };
1805
1806 absl::InlinedVector<T, 1, MinimumAllocator> vec;
1807 vec.emplace_back();
1808 vec.resize(0);
1809 }
1810
TEST(InlinedVectorTest,AbslHashValueWorks)1811 TEST(InlinedVectorTest, AbslHashValueWorks) {
1812 using V = absl::InlinedVector<int, 4>;
1813 std::vector<V> cases;
1814
1815 // Generate a variety of vectors some of these are small enough for the inline
1816 // space but are stored out of line.
1817 for (size_t i = 0; i < 10; ++i) {
1818 V v;
1819 for (int j = 0; j < static_cast<int>(i); ++j) {
1820 v.push_back(j);
1821 }
1822 cases.push_back(v);
1823 v.resize(i % 4);
1824 cases.push_back(v);
1825 }
1826
1827 EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(cases));
1828 }
1829
1830 class MoveConstructibleOnlyInstance
1831 : public absl::test_internal::BaseCountedInstance {
1832 public:
MoveConstructibleOnlyInstance(int x)1833 explicit MoveConstructibleOnlyInstance(int x) : BaseCountedInstance(x) {}
1834 MoveConstructibleOnlyInstance(MoveConstructibleOnlyInstance&& other) =
1835 default;
1836 MoveConstructibleOnlyInstance& operator=(
1837 MoveConstructibleOnlyInstance&& other) = delete;
1838 };
1839
1840 MATCHER(HasValue, "") {
1841 return ::testing::get<0>(arg).value() == ::testing::get<1>(arg);
1842 }
1843
TEST(NonAssignableMoveAssignmentTest,AllocatedToInline)1844 TEST(NonAssignableMoveAssignmentTest, AllocatedToInline) {
1845 using X = MoveConstructibleOnlyInstance;
1846 InstanceTracker tracker;
1847 absl::InlinedVector<X, 2> inlined;
1848 inlined.emplace_back(1);
1849 absl::InlinedVector<X, 2> allocated;
1850 allocated.emplace_back(1);
1851 allocated.emplace_back(2);
1852 allocated.emplace_back(3);
1853 tracker.ResetCopiesMovesSwaps();
1854
1855 inlined = std::move(allocated);
1856 // passed ownership of the allocated storage
1857 EXPECT_EQ(tracker.moves(), 0);
1858 EXPECT_EQ(tracker.live_instances(), 3);
1859
1860 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
1861 }
1862
TEST(NonAssignableMoveAssignmentTest,InlineToAllocated)1863 TEST(NonAssignableMoveAssignmentTest, InlineToAllocated) {
1864 using X = MoveConstructibleOnlyInstance;
1865 InstanceTracker tracker;
1866 absl::InlinedVector<X, 2> inlined;
1867 inlined.emplace_back(1);
1868 absl::InlinedVector<X, 2> allocated;
1869 allocated.emplace_back(1);
1870 allocated.emplace_back(2);
1871 allocated.emplace_back(3);
1872 tracker.ResetCopiesMovesSwaps();
1873
1874 allocated = std::move(inlined);
1875 // Moved elements
1876 EXPECT_EQ(tracker.moves(), 1);
1877 EXPECT_EQ(tracker.live_instances(), 1);
1878
1879 EXPECT_THAT(allocated, Pointwise(HasValue(), {1}));
1880 }
1881
TEST(NonAssignableMoveAssignmentTest,InlineToInline)1882 TEST(NonAssignableMoveAssignmentTest, InlineToInline) {
1883 using X = MoveConstructibleOnlyInstance;
1884 InstanceTracker tracker;
1885 absl::InlinedVector<X, 2> inlined_a;
1886 inlined_a.emplace_back(1);
1887 absl::InlinedVector<X, 2> inlined_b;
1888 inlined_b.emplace_back(1);
1889 tracker.ResetCopiesMovesSwaps();
1890
1891 inlined_a = std::move(inlined_b);
1892 // Moved elements
1893 EXPECT_EQ(tracker.moves(), 1);
1894 EXPECT_EQ(tracker.live_instances(), 1);
1895
1896 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {1}));
1897 }
1898
TEST(NonAssignableMoveAssignmentTest,AllocatedToAllocated)1899 TEST(NonAssignableMoveAssignmentTest, AllocatedToAllocated) {
1900 using X = MoveConstructibleOnlyInstance;
1901 InstanceTracker tracker;
1902 absl::InlinedVector<X, 2> allocated_a;
1903 allocated_a.emplace_back(1);
1904 allocated_a.emplace_back(2);
1905 allocated_a.emplace_back(3);
1906 absl::InlinedVector<X, 2> allocated_b;
1907 allocated_b.emplace_back(4);
1908 allocated_b.emplace_back(5);
1909 allocated_b.emplace_back(6);
1910 allocated_b.emplace_back(7);
1911 tracker.ResetCopiesMovesSwaps();
1912
1913 allocated_a = std::move(allocated_b);
1914 // passed ownership of the allocated storage
1915 EXPECT_EQ(tracker.moves(), 0);
1916 EXPECT_EQ(tracker.live_instances(), 4);
1917
1918 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
1919 }
1920
TEST(NonAssignableMoveAssignmentTest,AssignThis)1921 TEST(NonAssignableMoveAssignmentTest, AssignThis) {
1922 using X = MoveConstructibleOnlyInstance;
1923 InstanceTracker tracker;
1924 absl::InlinedVector<X, 2> v;
1925 v.emplace_back(1);
1926 v.emplace_back(2);
1927 v.emplace_back(3);
1928
1929 tracker.ResetCopiesMovesSwaps();
1930
1931 // Obfuscated in order to pass -Wself-move.
1932 v = std::move(*std::addressof(v));
1933 // nothing happens
1934 EXPECT_EQ(tracker.moves(), 0);
1935 EXPECT_EQ(tracker.live_instances(), 3);
1936
1937 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
1938 }
1939
1940 class NonSwappableInstance : public absl::test_internal::BaseCountedInstance {
1941 public:
NonSwappableInstance(int x)1942 explicit NonSwappableInstance(int x) : BaseCountedInstance(x) {}
1943 NonSwappableInstance(const NonSwappableInstance& other) = default;
1944 NonSwappableInstance& operator=(const NonSwappableInstance& other) = default;
1945 NonSwappableInstance(NonSwappableInstance&& other) = default;
1946 NonSwappableInstance& operator=(NonSwappableInstance&& other) = default;
1947 };
1948
1949 void swap(NonSwappableInstance&, NonSwappableInstance&) = delete;
1950
TEST(NonSwappableSwapTest,InlineAndAllocatedTransferStorageAndMove)1951 TEST(NonSwappableSwapTest, InlineAndAllocatedTransferStorageAndMove) {
1952 using X = NonSwappableInstance;
1953 InstanceTracker tracker;
1954 absl::InlinedVector<X, 2> inlined;
1955 inlined.emplace_back(1);
1956 absl::InlinedVector<X, 2> allocated;
1957 allocated.emplace_back(1);
1958 allocated.emplace_back(2);
1959 allocated.emplace_back(3);
1960 tracker.ResetCopiesMovesSwaps();
1961
1962 inlined.swap(allocated);
1963 EXPECT_EQ(tracker.moves(), 1);
1964 EXPECT_EQ(tracker.live_instances(), 4);
1965
1966 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
1967 }
1968
TEST(NonSwappableSwapTest,InlineAndInlineMoveIndividualElements)1969 TEST(NonSwappableSwapTest, InlineAndInlineMoveIndividualElements) {
1970 using X = NonSwappableInstance;
1971 InstanceTracker tracker;
1972 absl::InlinedVector<X, 2> inlined_a;
1973 inlined_a.emplace_back(1);
1974 absl::InlinedVector<X, 2> inlined_b;
1975 inlined_b.emplace_back(2);
1976 tracker.ResetCopiesMovesSwaps();
1977
1978 inlined_a.swap(inlined_b);
1979 EXPECT_EQ(tracker.moves(), 3);
1980 EXPECT_EQ(tracker.live_instances(), 2);
1981
1982 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {2}));
1983 EXPECT_THAT(inlined_b, Pointwise(HasValue(), {1}));
1984 }
1985
TEST(NonSwappableSwapTest,AllocatedAndAllocatedOnlyTransferStorage)1986 TEST(NonSwappableSwapTest, AllocatedAndAllocatedOnlyTransferStorage) {
1987 using X = NonSwappableInstance;
1988 InstanceTracker tracker;
1989 absl::InlinedVector<X, 2> allocated_a;
1990 allocated_a.emplace_back(1);
1991 allocated_a.emplace_back(2);
1992 allocated_a.emplace_back(3);
1993 absl::InlinedVector<X, 2> allocated_b;
1994 allocated_b.emplace_back(4);
1995 allocated_b.emplace_back(5);
1996 allocated_b.emplace_back(6);
1997 allocated_b.emplace_back(7);
1998 tracker.ResetCopiesMovesSwaps();
1999
2000 allocated_a.swap(allocated_b);
2001 EXPECT_EQ(tracker.moves(), 0);
2002 EXPECT_EQ(tracker.live_instances(), 7);
2003
2004 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
2005 EXPECT_THAT(allocated_b, Pointwise(HasValue(), {1, 2, 3}));
2006 }
2007
TEST(NonSwappableSwapTest,SwapThis)2008 TEST(NonSwappableSwapTest, SwapThis) {
2009 using X = NonSwappableInstance;
2010 InstanceTracker tracker;
2011 absl::InlinedVector<X, 2> v;
2012 v.emplace_back(1);
2013 v.emplace_back(2);
2014 v.emplace_back(3);
2015
2016 tracker.ResetCopiesMovesSwaps();
2017
2018 v.swap(v);
2019 EXPECT_EQ(tracker.moves(), 0);
2020 EXPECT_EQ(tracker.live_instances(), 3);
2021
2022 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2023 }
2024
2025 } // anonymous namespace
2026