1 // Copyright 2019 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "absl/container/inlined_vector.h"
16
17 #include <algorithm>
18 #include <cstddef>
19 #include <forward_list>
20 #include <iterator>
21 #include <list>
22 #include <memory>
23 #include <scoped_allocator>
24 #include <sstream>
25 #include <stdexcept>
26 #include <string>
27 #include <utility>
28 #include <vector>
29
30 #include "gmock/gmock.h"
31 #include "gtest/gtest.h"
32 #include "absl/base/attributes.h"
33 #include "absl/base/internal/exception_testing.h"
34 #include "absl/base/macros.h"
35 #include "absl/base/options.h"
36 #include "absl/container/internal/counting_allocator.h"
37 #include "absl/container/internal/test_instance_tracker.h"
38 #include "absl/hash/hash_testing.h"
39 #include "absl/log/check.h"
40 #include "absl/memory/memory.h"
41 #include "absl/strings/str_cat.h"
42
43 namespace {
44
45 using absl::container_internal::CountingAllocator;
46 using absl::test_internal::CopyableMovableInstance;
47 using absl::test_internal::CopyableOnlyInstance;
48 using absl::test_internal::InstanceTracker;
49 using testing::AllOf;
50 using testing::Each;
51 using testing::ElementsAre;
52 using testing::ElementsAreArray;
53 using testing::Eq;
54 using testing::Gt;
55 using testing::Pointee;
56 using testing::Pointwise;
57 using testing::PrintToString;
58 using testing::SizeIs;
59
60 using IntVec = absl::InlinedVector<int, 8>;
61
62 MATCHER_P(CapacityIs, n, "") {
63 return testing::ExplainMatchResult(n, arg.capacity(), result_listener);
64 }
65
66 MATCHER_P(ValueIs, e, "") {
67 return testing::ExplainMatchResult(e, arg.value(), result_listener);
68 }
69
70 // TODO(bsamwel): Add support for movable-only types.
71
72 // Test fixture for typed tests on BaseCountedInstance derived classes, see
73 // test_instance_tracker.h.
74 template <typename T>
75 class InstanceTest : public ::testing::Test {};
76 TYPED_TEST_SUITE_P(InstanceTest);
77
78 // A simple reference counted class to make sure that the proper elements are
79 // destroyed in the erase(begin, end) test.
80 class RefCounted {
81 public:
RefCounted(int value,int * count)82 RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); }
83
RefCounted(const RefCounted & v)84 RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) {
85 Ref();
86 }
87
~RefCounted()88 ~RefCounted() {
89 Unref();
90 count_ = nullptr;
91 }
92
swap(RefCounted & a,RefCounted & b)93 friend void swap(RefCounted& a, RefCounted& b) {
94 using std::swap;
95 swap(a.value_, b.value_);
96 swap(a.count_, b.count_);
97 }
98
operator =(RefCounted v)99 RefCounted& operator=(RefCounted v) {
100 using std::swap;
101 swap(*this, v);
102 return *this;
103 }
104
Ref() const105 void Ref() const {
106 CHECK_NE(count_, nullptr);
107 ++(*count_);
108 }
109
Unref() const110 void Unref() const {
111 --(*count_);
112 CHECK_GE(*count_, 0);
113 }
114
115 int value_;
116 int* count_;
117 };
118
119 using RefCountedVec = absl::InlinedVector<RefCounted, 8>;
120
121 // A class with a vtable pointer
122 class Dynamic {
123 public:
~Dynamic()124 virtual ~Dynamic() {}
125 };
126
127 using DynamicVec = absl::InlinedVector<Dynamic, 8>;
128
129 // Append 0..len-1 to *v
130 template <typename Container>
Fill(Container * v,size_t len,int offset=0)131 static void Fill(Container* v, size_t len, int offset = 0) {
132 for (size_t i = 0; i < len; i++) {
133 v->push_back(static_cast<int>(i) + offset);
134 }
135 }
136
Fill(size_t len,int offset=0)137 static IntVec Fill(size_t len, int offset = 0) {
138 IntVec v;
139 Fill(&v, len, offset);
140 return v;
141 }
142
TEST(IntVec,SimpleOps)143 TEST(IntVec, SimpleOps) {
144 for (size_t len = 0; len < 20; len++) {
145 IntVec v;
146 const IntVec& cv = v; // const alias
147
148 Fill(&v, len);
149 EXPECT_EQ(len, v.size());
150 EXPECT_LE(len, v.capacity());
151
152 for (size_t i = 0; i < len; i++) {
153 EXPECT_EQ(static_cast<int>(i), v[i]);
154 EXPECT_EQ(static_cast<int>(i), v.at(i));
155 }
156 EXPECT_EQ(v.begin(), v.data());
157 EXPECT_EQ(cv.begin(), cv.data());
158
159 size_t counter = 0;
160 for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) {
161 EXPECT_EQ(static_cast<int>(counter), *iter);
162 counter++;
163 }
164 EXPECT_EQ(counter, len);
165
166 counter = 0;
167 for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) {
168 EXPECT_EQ(static_cast<int>(counter), *iter);
169 counter++;
170 }
171 EXPECT_EQ(counter, len);
172
173 counter = 0;
174 for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) {
175 EXPECT_EQ(static_cast<int>(counter), *iter);
176 counter++;
177 }
178 EXPECT_EQ(counter, len);
179
180 if (len > 0) {
181 EXPECT_EQ(0, v.front());
182 EXPECT_EQ(static_cast<int>(len - 1), v.back());
183 v.pop_back();
184 EXPECT_EQ(len - 1, v.size());
185 for (size_t i = 0; i < v.size(); ++i) {
186 EXPECT_EQ(static_cast<int>(i), v[i]);
187 EXPECT_EQ(static_cast<int>(i), v.at(i));
188 }
189 }
190 }
191 }
192
TEST(IntVec,PopBackNoOverflow)193 TEST(IntVec, PopBackNoOverflow) {
194 IntVec v = {1};
195 v.pop_back();
196 EXPECT_EQ(v.size(), 0u);
197 }
198
TEST(IntVec,AtThrows)199 TEST(IntVec, AtThrows) {
200 IntVec v = {1, 2, 3};
201 EXPECT_EQ(v.at(2), 3);
202 ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range,
203 "failed bounds check");
204 }
205
TEST(IntVec,ReverseIterator)206 TEST(IntVec, ReverseIterator) {
207 for (size_t len = 0; len < 20; len++) {
208 IntVec v;
209 Fill(&v, len);
210
211 size_t counter = len;
212 for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) {
213 counter--;
214 EXPECT_EQ(static_cast<int>(counter), *iter);
215 }
216 EXPECT_EQ(counter, 0u);
217
218 counter = len;
219 for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend();
220 ++iter) {
221 counter--;
222 EXPECT_EQ(static_cast<int>(counter), *iter);
223 }
224 EXPECT_EQ(counter, 0u);
225
226 counter = len;
227 for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend();
228 ++iter) {
229 counter--;
230 EXPECT_EQ(static_cast<int>(counter), *iter);
231 }
232 EXPECT_EQ(counter, 0u);
233 }
234 }
235
TEST(IntVec,Erase)236 TEST(IntVec, Erase) {
237 for (size_t len = 1; len < 20; len++) {
238 for (size_t i = 0; i < len; ++i) {
239 IntVec v;
240 Fill(&v, len);
241 v.erase(v.begin() + i);
242 EXPECT_EQ(len - 1, v.size());
243 for (size_t j = 0; j < i; ++j) {
244 EXPECT_EQ(static_cast<int>(j), v[j]);
245 }
246 for (size_t j = i; j < len - 1; ++j) {
247 EXPECT_EQ(static_cast<int>(j + 1), v[j]);
248 }
249 }
250 }
251 }
252
TEST(IntVec,Hardened)253 TEST(IntVec, Hardened) {
254 IntVec v;
255 Fill(&v, 10);
256 EXPECT_EQ(v[9], 9);
257 #if !defined(NDEBUG) || ABSL_OPTION_HARDENED
258 EXPECT_DEATH_IF_SUPPORTED(v[10], "");
259 EXPECT_DEATH_IF_SUPPORTED(v[static_cast<size_t>(-1)], "");
260 EXPECT_DEATH_IF_SUPPORTED(v.resize(v.max_size() + 1), "");
261 #endif
262 }
263
264 // Move construction of a container of unique pointers should work fine, with no
265 // leaks, despite the fact that unique pointers are trivially relocatable but
266 // not trivially destructible.
TEST(UniquePtr,MoveConstruct)267 TEST(UniquePtr, MoveConstruct) {
268 for (size_t size = 0; size < 16; ++size) {
269 SCOPED_TRACE(size);
270
271 absl::InlinedVector<std::unique_ptr<size_t>, 2> a;
272 for (size_t i = 0; i < size; ++i) {
273 a.push_back(std::make_unique<size_t>(i));
274 }
275
276 absl::InlinedVector<std::unique_ptr<size_t>, 2> b(std::move(a));
277
278 ASSERT_THAT(b, SizeIs(size));
279 for (size_t i = 0; i < size; ++i) {
280 ASSERT_THAT(b[i], Pointee(i));
281 }
282 }
283 }
284
285 // Move assignment of a container of unique pointers should work fine, with no
286 // leaks, despite the fact that unique pointers are trivially relocatable but
287 // not trivially destructible.
TEST(UniquePtr,MoveAssign)288 TEST(UniquePtr, MoveAssign) {
289 for (size_t size = 0; size < 16; ++size) {
290 SCOPED_TRACE(size);
291
292 absl::InlinedVector<std::unique_ptr<size_t>, 2> a;
293 for (size_t i = 0; i < size; ++i) {
294 a.push_back(std::make_unique<size_t>(i));
295 }
296
297 absl::InlinedVector<std::unique_ptr<size_t>, 2> b;
298 b = std::move(a);
299
300 ASSERT_THAT(b, SizeIs(size));
301 for (size_t i = 0; i < size; ++i) {
302 ASSERT_THAT(b[i], Pointee(i));
303 }
304 }
305 }
306
307 // At the end of this test loop, the elements between [erase_begin, erase_end)
308 // should have reference counts == 0, and all others elements should have
309 // reference counts == 1.
TEST(RefCountedVec,EraseBeginEnd)310 TEST(RefCountedVec, EraseBeginEnd) {
311 for (size_t len = 1; len < 20; ++len) {
312 for (size_t erase_begin = 0; erase_begin < len; ++erase_begin) {
313 for (size_t erase_end = erase_begin; erase_end <= len; ++erase_end) {
314 std::vector<int> counts(len, 0);
315 RefCountedVec v;
316 for (size_t i = 0; i < len; ++i) {
317 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
318 }
319
320 size_t erase_len = erase_end - erase_begin;
321
322 v.erase(v.begin() + erase_begin, v.begin() + erase_end);
323
324 EXPECT_EQ(len - erase_len, v.size());
325
326 // Check the elements before the first element erased.
327 for (size_t i = 0; i < erase_begin; ++i) {
328 EXPECT_EQ(static_cast<int>(i), v[i].value_);
329 }
330
331 // Check the elements after the first element erased.
332 for (size_t i = erase_begin; i < v.size(); ++i) {
333 EXPECT_EQ(static_cast<int>(i + erase_len), v[i].value_);
334 }
335
336 // Check that the elements at the beginning are preserved.
337 for (size_t i = 0; i < erase_begin; ++i) {
338 EXPECT_EQ(1, counts[i]);
339 }
340
341 // Check that the erased elements are destroyed
342 for (size_t i = erase_begin; i < erase_end; ++i) {
343 EXPECT_EQ(0, counts[i]);
344 }
345
346 // Check that the elements at the end are preserved.
347 for (size_t i = erase_end; i < len; ++i) {
348 EXPECT_EQ(1, counts[i]);
349 }
350 }
351 }
352 }
353 }
354
355 struct NoDefaultCtor {
NoDefaultCtor__anon3feebdbf0111::NoDefaultCtor356 explicit NoDefaultCtor(int) {}
357 };
358 struct NoCopy {
NoCopy__anon3feebdbf0111::NoCopy359 NoCopy() {}
360 NoCopy(const NoCopy&) = delete;
361 };
362 struct NoAssign {
NoAssign__anon3feebdbf0111::NoAssign363 NoAssign() {}
364 NoAssign& operator=(const NoAssign&) = delete;
365 };
366 struct MoveOnly {
MoveOnly__anon3feebdbf0111::MoveOnly367 MoveOnly() {}
368 MoveOnly(MoveOnly&&) = default;
369 MoveOnly& operator=(MoveOnly&&) = default;
370 };
TEST(InlinedVectorTest,NoDefaultCtor)371 TEST(InlinedVectorTest, NoDefaultCtor) {
372 absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2));
373 (void)v;
374 }
TEST(InlinedVectorTest,NoCopy)375 TEST(InlinedVectorTest, NoCopy) {
376 absl::InlinedVector<NoCopy, 1> v(10);
377 (void)v;
378 }
TEST(InlinedVectorTest,NoAssign)379 TEST(InlinedVectorTest, NoAssign) {
380 absl::InlinedVector<NoAssign, 1> v(10);
381 (void)v;
382 }
TEST(InlinedVectorTest,MoveOnly)383 TEST(InlinedVectorTest, MoveOnly) {
384 absl::InlinedVector<MoveOnly, 2> v;
385 v.push_back(MoveOnly{});
386 v.push_back(MoveOnly{});
387 v.push_back(MoveOnly{});
388 v.erase(v.begin());
389 v.push_back(MoveOnly{});
390 v.erase(v.begin(), v.begin() + 1);
391 v.insert(v.begin(), MoveOnly{});
392 v.emplace(v.begin());
393 v.emplace(v.begin(), MoveOnly{});
394 }
TEST(InlinedVectorTest,Noexcept)395 TEST(InlinedVectorTest, Noexcept) {
396 EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value);
397 EXPECT_TRUE((std::is_nothrow_move_constructible<
398 absl::InlinedVector<MoveOnly, 2>>::value));
399
400 struct MoveCanThrow {
401 MoveCanThrow(MoveCanThrow&&) {}
402 };
403 EXPECT_EQ(absl::default_allocator_is_nothrow::value,
404 (std::is_nothrow_move_constructible<
405 absl::InlinedVector<MoveCanThrow, 2>>::value));
406 }
407
TEST(InlinedVectorTest,EmplaceBack)408 TEST(InlinedVectorTest, EmplaceBack) {
409 absl::InlinedVector<std::pair<std::string, int>, 1> v;
410
411 auto& inlined_element = v.emplace_back("answer", 42);
412 EXPECT_EQ(&inlined_element, &v[0]);
413 EXPECT_EQ(inlined_element.first, "answer");
414 EXPECT_EQ(inlined_element.second, 42);
415
416 auto& allocated_element = v.emplace_back("taxicab", 1729);
417 EXPECT_EQ(&allocated_element, &v[1]);
418 EXPECT_EQ(allocated_element.first, "taxicab");
419 EXPECT_EQ(allocated_element.second, 1729);
420 }
421
TEST(InlinedVectorTest,ShrinkToFitGrowingVector)422 TEST(InlinedVectorTest, ShrinkToFitGrowingVector) {
423 absl::InlinedVector<std::pair<std::string, int>, 1> v;
424
425 v.shrink_to_fit();
426 EXPECT_EQ(v.capacity(), 1u);
427
428 v.emplace_back("answer", 42);
429 v.shrink_to_fit();
430 EXPECT_EQ(v.capacity(), 1u);
431
432 v.emplace_back("taxicab", 1729);
433 EXPECT_GE(v.capacity(), 2u);
434 v.shrink_to_fit();
435 EXPECT_EQ(v.capacity(), 2u);
436
437 v.reserve(100);
438 EXPECT_GE(v.capacity(), 100u);
439 v.shrink_to_fit();
440 EXPECT_EQ(v.capacity(), 2u);
441 }
442
TEST(InlinedVectorTest,ShrinkToFitEdgeCases)443 TEST(InlinedVectorTest, ShrinkToFitEdgeCases) {
444 {
445 absl::InlinedVector<std::pair<std::string, int>, 1> v;
446 v.emplace_back("answer", 42);
447 v.emplace_back("taxicab", 1729);
448 EXPECT_GE(v.capacity(), 2u);
449 v.pop_back();
450 v.shrink_to_fit();
451 EXPECT_EQ(v.capacity(), 1u);
452 EXPECT_EQ(v[0].first, "answer");
453 EXPECT_EQ(v[0].second, 42);
454 }
455
456 {
457 absl::InlinedVector<std::string, 2> v(100);
458 v.resize(0);
459 v.shrink_to_fit();
460 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
461 }
462
463 {
464 absl::InlinedVector<std::string, 2> v(100);
465 v.resize(1);
466 v.shrink_to_fit();
467 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
468 }
469
470 {
471 absl::InlinedVector<std::string, 2> v(100);
472 v.resize(2);
473 v.shrink_to_fit();
474 EXPECT_EQ(v.capacity(), 2u);
475 }
476
477 {
478 absl::InlinedVector<std::string, 2> v(100);
479 v.resize(3);
480 v.shrink_to_fit();
481 EXPECT_EQ(v.capacity(), 3u);
482 }
483 }
484
TEST(IntVec,Insert)485 TEST(IntVec, Insert) {
486 for (size_t len = 0; len < 20; len++) {
487 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) {
488 {
489 // Single element
490 std::vector<int> std_v;
491 Fill(&std_v, len);
492 IntVec v;
493 Fill(&v, len);
494
495 std_v.insert(std_v.begin() + pos, 9999);
496 IntVec::iterator it = v.insert(v.cbegin() + pos, 9999);
497 EXPECT_THAT(v, ElementsAreArray(std_v));
498 EXPECT_EQ(it, v.cbegin() + pos);
499 }
500 {
501 // n elements
502 std::vector<int> std_v;
503 Fill(&std_v, len);
504 IntVec v;
505 Fill(&v, len);
506
507 IntVec::size_type n = 5;
508 std_v.insert(std_v.begin() + pos, n, 9999);
509 IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999);
510 EXPECT_THAT(v, ElementsAreArray(std_v));
511 EXPECT_EQ(it, v.cbegin() + pos);
512 }
513 {
514 // Iterator range (random access iterator)
515 std::vector<int> std_v;
516 Fill(&std_v, len);
517 IntVec v;
518 Fill(&v, len);
519
520 const std::vector<int> input = {9999, 8888, 7777};
521 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
522 IntVec::iterator it =
523 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
524 EXPECT_THAT(v, ElementsAreArray(std_v));
525 EXPECT_EQ(it, v.cbegin() + pos);
526 }
527 {
528 // Iterator range (forward iterator)
529 std::vector<int> std_v;
530 Fill(&std_v, len);
531 IntVec v;
532 Fill(&v, len);
533
534 const std::forward_list<int> input = {9999, 8888, 7777};
535 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
536 IntVec::iterator it =
537 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
538 EXPECT_THAT(v, ElementsAreArray(std_v));
539 EXPECT_EQ(it, v.cbegin() + pos);
540 }
541 {
542 // Iterator range (input iterator)
543 std::vector<int> std_v;
544 Fill(&std_v, len);
545 IntVec v;
546 Fill(&v, len);
547
548 std_v.insert(std_v.begin() + pos, {9999, 8888, 7777});
549 std::istringstream input("9999 8888 7777");
550 IntVec::iterator it =
551 v.insert(v.cbegin() + pos, std::istream_iterator<int>(input),
552 std::istream_iterator<int>());
553 EXPECT_THAT(v, ElementsAreArray(std_v));
554 EXPECT_EQ(it, v.cbegin() + pos);
555 }
556 {
557 // Initializer list
558 std::vector<int> std_v;
559 Fill(&std_v, len);
560 IntVec v;
561 Fill(&v, len);
562
563 std_v.insert(std_v.begin() + pos, {9999, 8888});
564 IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888});
565 EXPECT_THAT(v, ElementsAreArray(std_v));
566 EXPECT_EQ(it, v.cbegin() + pos);
567 }
568 }
569 }
570 }
571
TEST(RefCountedVec,InsertConstructorDestructor)572 TEST(RefCountedVec, InsertConstructorDestructor) {
573 // Make sure the proper construction/destruction happen during insert
574 // operations.
575 for (size_t len = 0; len < 20; len++) {
576 SCOPED_TRACE(len);
577 for (size_t pos = 0; pos <= len; pos++) {
578 SCOPED_TRACE(pos);
579 std::vector<int> counts(len, 0);
580 int inserted_count = 0;
581 RefCountedVec v;
582 for (size_t i = 0; i < len; ++i) {
583 SCOPED_TRACE(i);
584 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
585 }
586
587 EXPECT_THAT(counts, Each(Eq(1)));
588
589 RefCounted insert_element(9999, &inserted_count);
590 EXPECT_EQ(1, inserted_count);
591 v.insert(v.begin() + pos, insert_element);
592 EXPECT_EQ(2, inserted_count);
593 // Check that the elements at the end are preserved.
594 EXPECT_THAT(counts, Each(Eq(1)));
595 EXPECT_EQ(2, inserted_count);
596 }
597 }
598 }
599
TEST(IntVec,Resize)600 TEST(IntVec, Resize) {
601 for (size_t len = 0; len < 20; len++) {
602 IntVec v;
603 Fill(&v, len);
604
605 // Try resizing up and down by k elements
606 static const int kResizeElem = 1000000;
607 for (size_t k = 0; k < 10; k++) {
608 // Enlarging resize
609 v.resize(len + k, kResizeElem);
610 EXPECT_EQ(len + k, v.size());
611 EXPECT_LE(len + k, v.capacity());
612 for (size_t i = 0; i < len + k; i++) {
613 if (i < len) {
614 EXPECT_EQ(static_cast<int>(i), v[i]);
615 } else {
616 EXPECT_EQ(kResizeElem, v[i]);
617 }
618 }
619
620 // Shrinking resize
621 v.resize(len, kResizeElem);
622 EXPECT_EQ(len, v.size());
623 EXPECT_LE(len, v.capacity());
624 for (size_t i = 0; i < len; i++) {
625 EXPECT_EQ(static_cast<int>(i), v[i]);
626 }
627 }
628 }
629 }
630
TEST(IntVec,InitWithLength)631 TEST(IntVec, InitWithLength) {
632 for (size_t len = 0; len < 20; len++) {
633 IntVec v(len, 7);
634 EXPECT_EQ(len, v.size());
635 EXPECT_LE(len, v.capacity());
636 for (size_t i = 0; i < len; i++) {
637 EXPECT_EQ(7, v[i]);
638 }
639 }
640 }
641
TEST(IntVec,CopyConstructorAndAssignment)642 TEST(IntVec, CopyConstructorAndAssignment) {
643 for (size_t len = 0; len < 20; len++) {
644 IntVec v;
645 Fill(&v, len);
646 EXPECT_EQ(len, v.size());
647 EXPECT_LE(len, v.capacity());
648
649 IntVec v2(v);
650 EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2);
651
652 for (size_t start_len = 0; start_len < 20; start_len++) {
653 IntVec v3;
654 Fill(&v3, start_len, 99); // Add dummy elements that should go away
655 v3 = v;
656 EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3);
657 }
658 }
659 }
660
TEST(IntVec,AliasingCopyAssignment)661 TEST(IntVec, AliasingCopyAssignment) {
662 for (size_t len = 0; len < 20; ++len) {
663 IntVec original;
664 Fill(&original, len);
665 IntVec dup = original;
666 dup = *&dup;
667 EXPECT_EQ(dup, original);
668 }
669 }
670
TEST(IntVec,MoveConstructorAndAssignment)671 TEST(IntVec, MoveConstructorAndAssignment) {
672 for (size_t len = 0; len < 20; len++) {
673 IntVec v_in;
674 const size_t inlined_capacity = v_in.capacity();
675 Fill(&v_in, len);
676 EXPECT_EQ(len, v_in.size());
677 EXPECT_LE(len, v_in.capacity());
678
679 {
680 IntVec v_temp(v_in);
681 auto* old_data = v_temp.data();
682 IntVec v_out(std::move(v_temp));
683 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
684 if (v_in.size() > inlined_capacity) {
685 // Allocation is moved as a whole, data stays in place.
686 EXPECT_TRUE(v_out.data() == old_data);
687 } else {
688 EXPECT_FALSE(v_out.data() == old_data);
689 }
690 }
691 for (size_t start_len = 0; start_len < 20; start_len++) {
692 IntVec v_out;
693 Fill(&v_out, start_len, 99); // Add dummy elements that should go away
694 IntVec v_temp(v_in);
695 auto* old_data = v_temp.data();
696 v_out = std::move(v_temp);
697 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
698 if (v_in.size() > inlined_capacity) {
699 // Allocation is moved as a whole, data stays in place.
700 EXPECT_TRUE(v_out.data() == old_data);
701 } else {
702 EXPECT_FALSE(v_out.data() == old_data);
703 }
704 }
705 }
706 }
707
708 class NotTriviallyDestructible {
709 public:
NotTriviallyDestructible()710 NotTriviallyDestructible() : p_(new int(1)) {}
NotTriviallyDestructible(int i)711 explicit NotTriviallyDestructible(int i) : p_(new int(i)) {}
712
NotTriviallyDestructible(const NotTriviallyDestructible & other)713 NotTriviallyDestructible(const NotTriviallyDestructible& other)
714 : p_(new int(*other.p_)) {}
715
operator =(const NotTriviallyDestructible & other)716 NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) {
717 p_ = absl::make_unique<int>(*other.p_);
718 return *this;
719 }
720
operator ==(const NotTriviallyDestructible & other) const721 bool operator==(const NotTriviallyDestructible& other) const {
722 return *p_ == *other.p_;
723 }
724
725 private:
726 std::unique_ptr<int> p_;
727 };
728
TEST(AliasingTest,Emplace)729 TEST(AliasingTest, Emplace) {
730 for (size_t i = 2; i < 20; ++i) {
731 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
732 for (size_t j = 0; j < i; ++j) {
733 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
734 }
735 vec.emplace(vec.begin(), vec[0]);
736 EXPECT_EQ(vec[0], vec[1]);
737 vec.emplace(vec.begin() + i / 2, vec[i / 2]);
738 EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]);
739 vec.emplace(vec.end() - 1, vec.back());
740 EXPECT_EQ(vec[vec.size() - 2], vec.back());
741 }
742 }
743
TEST(AliasingTest,InsertWithCount)744 TEST(AliasingTest, InsertWithCount) {
745 for (size_t i = 1; i < 20; ++i) {
746 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
747 for (size_t j = 0; j < i; ++j) {
748 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
749 }
750 for (size_t n = 0; n < 5; ++n) {
751 // We use back where we can because it's guaranteed to become invalidated
752 vec.insert(vec.begin(), n, vec.back());
753 auto b = vec.begin();
754 EXPECT_TRUE(
755 std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) {
756 return x == vec.back();
757 }));
758
759 auto m_idx = vec.size() / 2;
760 vec.insert(vec.begin() + m_idx, n, vec.back());
761 auto m = vec.begin() + m_idx;
762 EXPECT_TRUE(
763 std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) {
764 return x == vec.back();
765 }));
766
767 // We want distinct values so the equality test is meaningful,
768 // vec[vec.size() - 1] is also almost always invalidated.
769 auto old_e = vec.size() - 1;
770 auto val = vec[old_e];
771 vec.insert(vec.end(), n, vec[old_e]);
772 auto e = vec.begin() + old_e;
773 EXPECT_TRUE(std::all_of(
774 e, e + n,
775 [&val](const NotTriviallyDestructible& x) { return x == val; }));
776 }
777 }
778 }
779
TEST(OverheadTest,Storage)780 TEST(OverheadTest, Storage) {
781 // Check for size overhead.
782 // In particular, ensure that std::allocator doesn't cost anything to store.
783 // The union should be absorbing some of the allocation bookkeeping overhead
784 // in the larger vectors, leaving only the size_ field as overhead.
785
786 struct T { void* val; };
787 size_t expected_overhead = sizeof(T);
788
789 EXPECT_EQ((2 * expected_overhead),
790 sizeof(absl::InlinedVector<T, 1>) - sizeof(T[1]));
791 EXPECT_EQ(expected_overhead,
792 sizeof(absl::InlinedVector<T, 2>) - sizeof(T[2]));
793 EXPECT_EQ(expected_overhead,
794 sizeof(absl::InlinedVector<T, 3>) - sizeof(T[3]));
795 EXPECT_EQ(expected_overhead,
796 sizeof(absl::InlinedVector<T, 4>) - sizeof(T[4]));
797 EXPECT_EQ(expected_overhead,
798 sizeof(absl::InlinedVector<T, 5>) - sizeof(T[5]));
799 EXPECT_EQ(expected_overhead,
800 sizeof(absl::InlinedVector<T, 6>) - sizeof(T[6]));
801 EXPECT_EQ(expected_overhead,
802 sizeof(absl::InlinedVector<T, 7>) - sizeof(T[7]));
803 EXPECT_EQ(expected_overhead,
804 sizeof(absl::InlinedVector<T, 8>) - sizeof(T[8]));
805 }
806
TEST(IntVec,Clear)807 TEST(IntVec, Clear) {
808 for (size_t len = 0; len < 20; len++) {
809 SCOPED_TRACE(len);
810 IntVec v;
811 Fill(&v, len);
812 v.clear();
813 EXPECT_EQ(0u, v.size());
814 EXPECT_EQ(v.begin(), v.end());
815 }
816 }
817
TEST(IntVec,Reserve)818 TEST(IntVec, Reserve) {
819 for (size_t len = 0; len < 20; len++) {
820 IntVec v;
821 Fill(&v, len);
822
823 for (size_t newlen = 0; newlen < 100; newlen++) {
824 const int* start_rep = v.data();
825 v.reserve(newlen);
826 const int* final_rep = v.data();
827 if (newlen <= len) {
828 EXPECT_EQ(start_rep, final_rep);
829 }
830 EXPECT_LE(newlen, v.capacity());
831
832 // Filling up to newlen should not change rep
833 while (v.size() < newlen) {
834 v.push_back(0);
835 }
836 EXPECT_EQ(final_rep, v.data());
837 }
838 }
839 }
840
TEST(StringVec,SelfRefPushBack)841 TEST(StringVec, SelfRefPushBack) {
842 std::vector<std::string> std_v;
843 absl::InlinedVector<std::string, 4> v;
844 const std::string s = "A quite long string to ensure heap.";
845 std_v.push_back(s);
846 v.push_back(s);
847 for (int i = 0; i < 20; ++i) {
848 EXPECT_THAT(v, ElementsAreArray(std_v));
849
850 v.push_back(v.back());
851 std_v.push_back(std_v.back());
852 }
853 EXPECT_THAT(v, ElementsAreArray(std_v));
854 }
855
TEST(StringVec,SelfRefPushBackWithMove)856 TEST(StringVec, SelfRefPushBackWithMove) {
857 std::vector<std::string> std_v;
858 absl::InlinedVector<std::string, 4> v;
859 const std::string s = "A quite long string to ensure heap.";
860 std_v.push_back(s);
861 v.push_back(s);
862 for (int i = 0; i < 20; ++i) {
863 EXPECT_EQ(v.back(), std_v.back());
864
865 v.push_back(std::move(v.back()));
866 std_v.push_back(std::move(std_v.back()));
867 }
868 EXPECT_EQ(v.back(), std_v.back());
869 }
870
TEST(StringVec,SelfMove)871 TEST(StringVec, SelfMove) {
872 const std::string s = "A quite long string to ensure heap.";
873 for (int len = 0; len < 20; len++) {
874 SCOPED_TRACE(len);
875 absl::InlinedVector<std::string, 8> v;
876 for (int i = 0; i < len; ++i) {
877 SCOPED_TRACE(i);
878 v.push_back(s);
879 }
880 // Indirection necessary to avoid compiler warning.
881 v = std::move(*(&v));
882 // Ensure that the inlined vector is still in a valid state by copying it.
883 // We don't expect specific contents since a self-move results in an
884 // unspecified valid state.
885 std::vector<std::string> copy(v.begin(), v.end());
886 }
887 }
888
TEST(IntVec,Swap)889 TEST(IntVec, Swap) {
890 for (size_t l1 = 0; l1 < 20; l1++) {
891 SCOPED_TRACE(l1);
892 for (size_t l2 = 0; l2 < 20; l2++) {
893 SCOPED_TRACE(l2);
894 IntVec a = Fill(l1, 0);
895 IntVec b = Fill(l2, 100);
896 {
897 using std::swap;
898 swap(a, b);
899 }
900 EXPECT_EQ(l1, b.size());
901 EXPECT_EQ(l2, a.size());
902 for (size_t i = 0; i < l1; i++) {
903 SCOPED_TRACE(i);
904 EXPECT_EQ(static_cast<int>(i), b[i]);
905 }
906 for (size_t i = 0; i < l2; i++) {
907 SCOPED_TRACE(i);
908 EXPECT_EQ(100 + static_cast<int>(i), a[i]);
909 }
910 }
911 }
912 }
913
TYPED_TEST_P(InstanceTest,Swap)914 TYPED_TEST_P(InstanceTest, Swap) {
915 using Instance = TypeParam;
916 using InstanceVec = absl::InlinedVector<Instance, 8>;
917 for (size_t l1 = 0; l1 < 20; l1++) {
918 SCOPED_TRACE(l1);
919 for (size_t l2 = 0; l2 < 20; l2++) {
920 SCOPED_TRACE(l2);
921 InstanceTracker tracker;
922 InstanceVec a, b;
923 const size_t inlined_capacity = a.capacity();
924 auto min_len = std::min(l1, l2);
925 auto max_len = std::max(l1, l2);
926 for (size_t i = 0; i < l1; i++)
927 a.push_back(Instance(static_cast<int>(i)));
928 for (size_t i = 0; i < l2; i++)
929 b.push_back(Instance(100 + static_cast<int>(i)));
930 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
931 tracker.ResetCopiesMovesSwaps();
932 {
933 using std::swap;
934 swap(a, b);
935 }
936 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
937 if (a.size() > inlined_capacity && b.size() > inlined_capacity) {
938 EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped.
939 EXPECT_EQ(tracker.moves(), 0);
940 } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) {
941 EXPECT_EQ(tracker.swaps(), static_cast<int>(min_len));
942 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
943 static_cast<int>(max_len - min_len));
944 } else {
945 // One is allocated and the other isn't. The allocation is transferred
946 // without copying elements, and the inlined instances are copied/moved.
947 EXPECT_EQ(tracker.swaps(), 0);
948 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
949 static_cast<int>(min_len));
950 }
951
952 EXPECT_EQ(l1, b.size());
953 EXPECT_EQ(l2, a.size());
954 for (size_t i = 0; i < l1; i++) {
955 EXPECT_EQ(static_cast<int>(i), b[i].value());
956 }
957 for (size_t i = 0; i < l2; i++) {
958 EXPECT_EQ(100 + static_cast<int>(i), a[i].value());
959 }
960 }
961 }
962 }
963
TEST(IntVec,EqualAndNotEqual)964 TEST(IntVec, EqualAndNotEqual) {
965 IntVec a, b;
966 EXPECT_TRUE(a == b);
967 EXPECT_FALSE(a != b);
968
969 a.push_back(3);
970 EXPECT_FALSE(a == b);
971 EXPECT_TRUE(a != b);
972
973 b.push_back(3);
974 EXPECT_TRUE(a == b);
975 EXPECT_FALSE(a != b);
976
977 b.push_back(7);
978 EXPECT_FALSE(a == b);
979 EXPECT_TRUE(a != b);
980
981 a.push_back(6);
982 EXPECT_FALSE(a == b);
983 EXPECT_TRUE(a != b);
984
985 a.clear();
986 b.clear();
987 for (size_t i = 0; i < 100; i++) {
988 a.push_back(static_cast<int>(i));
989 b.push_back(static_cast<int>(i));
990 EXPECT_TRUE(a == b);
991 EXPECT_FALSE(a != b);
992
993 b[i] = b[i] + 1;
994 EXPECT_FALSE(a == b);
995 EXPECT_TRUE(a != b);
996
997 b[i] = b[i] - 1; // Back to before
998 EXPECT_TRUE(a == b);
999 EXPECT_FALSE(a != b);
1000 }
1001 }
1002
TEST(IntVec,RelationalOps)1003 TEST(IntVec, RelationalOps) {
1004 IntVec a, b;
1005 EXPECT_FALSE(a < b);
1006 EXPECT_FALSE(b < a);
1007 EXPECT_FALSE(a > b);
1008 EXPECT_FALSE(b > a);
1009 EXPECT_TRUE(a <= b);
1010 EXPECT_TRUE(b <= a);
1011 EXPECT_TRUE(a >= b);
1012 EXPECT_TRUE(b >= a);
1013 b.push_back(3);
1014 EXPECT_TRUE(a < b);
1015 EXPECT_FALSE(b < a);
1016 EXPECT_FALSE(a > b);
1017 EXPECT_TRUE(b > a);
1018 EXPECT_TRUE(a <= b);
1019 EXPECT_FALSE(b <= a);
1020 EXPECT_FALSE(a >= b);
1021 EXPECT_TRUE(b >= a);
1022 }
1023
TYPED_TEST_P(InstanceTest,CountConstructorsDestructors)1024 TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) {
1025 using Instance = TypeParam;
1026 using InstanceVec = absl::InlinedVector<Instance, 8>;
1027 InstanceTracker tracker;
1028 for (size_t len = 0; len < 20; len++) {
1029 SCOPED_TRACE(len);
1030 tracker.ResetCopiesMovesSwaps();
1031
1032 InstanceVec v;
1033 const size_t inlined_capacity = v.capacity();
1034 for (size_t i = 0; i < len; i++) {
1035 v.push_back(Instance(static_cast<int>(i)));
1036 }
1037 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1038 EXPECT_GE(tracker.copies() + tracker.moves(),
1039 static_cast<int>(len)); // More due to reallocation.
1040 tracker.ResetCopiesMovesSwaps();
1041
1042 // Enlarging resize() must construct some objects
1043 tracker.ResetCopiesMovesSwaps();
1044 v.resize(len + 10, Instance(100));
1045 EXPECT_EQ(tracker.instances(), static_cast<int>(len) + 10);
1046 if (len <= inlined_capacity && len + 10 > inlined_capacity) {
1047 EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + static_cast<int>(len));
1048 } else {
1049 // Only specify a minimum number of copies + moves. We don't want to
1050 // depend on the reallocation policy here.
1051 EXPECT_GE(tracker.copies() + tracker.moves(),
1052 10); // More due to reallocation.
1053 }
1054
1055 // Shrinking resize() must destroy some objects
1056 tracker.ResetCopiesMovesSwaps();
1057 v.resize(len, Instance(100));
1058 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1059 EXPECT_EQ(tracker.copies(), 0);
1060 EXPECT_EQ(tracker.moves(), 0);
1061
1062 // reserve() must not increase the number of initialized objects
1063 SCOPED_TRACE("reserve");
1064 v.reserve(len + 1000);
1065 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1066 EXPECT_EQ(tracker.copies() + tracker.moves(), static_cast<int>(len));
1067
1068 // pop_back() and erase() must destroy one object
1069 if (len > 0) {
1070 tracker.ResetCopiesMovesSwaps();
1071 v.pop_back();
1072 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 1);
1073 EXPECT_EQ(tracker.copies(), 0);
1074 EXPECT_EQ(tracker.moves(), 0);
1075
1076 if (!v.empty()) {
1077 tracker.ResetCopiesMovesSwaps();
1078 v.erase(v.begin());
1079 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 2);
1080 EXPECT_EQ(tracker.copies() + tracker.moves(),
1081 static_cast<int>(len) - 2);
1082 }
1083 }
1084
1085 tracker.ResetCopiesMovesSwaps();
1086 int instances_before_empty_erase = tracker.instances();
1087 v.erase(v.begin(), v.begin());
1088 EXPECT_EQ(tracker.instances(), instances_before_empty_erase);
1089 EXPECT_EQ(tracker.copies() + tracker.moves(), 0);
1090 }
1091 }
1092
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnCopyConstruction)1093 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) {
1094 using Instance = TypeParam;
1095 using InstanceVec = absl::InlinedVector<Instance, 8>;
1096 InstanceTracker tracker;
1097 for (int len = 0; len < 20; len++) {
1098 SCOPED_TRACE(len);
1099 tracker.ResetCopiesMovesSwaps();
1100
1101 InstanceVec v;
1102 for (int i = 0; i < len; i++) {
1103 v.push_back(Instance(i));
1104 }
1105 EXPECT_EQ(tracker.instances(), len);
1106 EXPECT_GE(tracker.copies() + tracker.moves(),
1107 len); // More due to reallocation.
1108 tracker.ResetCopiesMovesSwaps();
1109 { // Copy constructor should create 'len' more instances.
1110 InstanceVec v_copy(v);
1111 EXPECT_EQ(tracker.instances(), len + len);
1112 EXPECT_EQ(tracker.copies(), len);
1113 EXPECT_EQ(tracker.moves(), 0);
1114 }
1115 EXPECT_EQ(tracker.instances(), len);
1116 }
1117 }
1118
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveConstruction)1119 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) {
1120 using Instance = TypeParam;
1121 using InstanceVec = absl::InlinedVector<Instance, 8>;
1122 InstanceTracker tracker;
1123 for (int len = 0; len < 20; len++) {
1124 SCOPED_TRACE(len);
1125 tracker.ResetCopiesMovesSwaps();
1126
1127 InstanceVec v;
1128 const size_t inlined_capacity = v.capacity();
1129 for (int i = 0; i < len; i++) {
1130 v.push_back(Instance(i));
1131 }
1132 EXPECT_EQ(tracker.instances(), len);
1133 EXPECT_GE(tracker.copies() + tracker.moves(),
1134 len); // More due to reallocation.
1135 tracker.ResetCopiesMovesSwaps();
1136 {
1137 InstanceVec v_copy(std::move(v));
1138 if (static_cast<size_t>(len) > inlined_capacity) {
1139 // Allocation is moved as a whole.
1140 EXPECT_EQ(tracker.instances(), len);
1141 EXPECT_EQ(tracker.live_instances(), len);
1142 // Tests an implementation detail, don't rely on this in your code.
1143 EXPECT_EQ(v.size(), 0u); // NOLINT misc-use-after-move
1144 EXPECT_EQ(tracker.copies(), 0);
1145 EXPECT_EQ(tracker.moves(), 0);
1146 } else {
1147 EXPECT_EQ(tracker.instances(), len + len);
1148 if (Instance::supports_move()) {
1149 EXPECT_EQ(tracker.live_instances(), len);
1150 EXPECT_EQ(tracker.copies(), 0);
1151 EXPECT_EQ(tracker.moves(), len);
1152 } else {
1153 EXPECT_EQ(tracker.live_instances(), len + len);
1154 EXPECT_EQ(tracker.copies(), len);
1155 EXPECT_EQ(tracker.moves(), 0);
1156 }
1157 }
1158 EXPECT_EQ(tracker.swaps(), 0);
1159 }
1160 }
1161 }
1162
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnAssignment)1163 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) {
1164 using Instance = TypeParam;
1165 using InstanceVec = absl::InlinedVector<Instance, 8>;
1166 InstanceTracker tracker;
1167 for (int len = 0; len < 20; len++) {
1168 SCOPED_TRACE(len);
1169 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1170 SCOPED_TRACE(longorshort);
1171 tracker.ResetCopiesMovesSwaps();
1172
1173 InstanceVec longer, shorter;
1174 for (int i = 0; i < len; i++) {
1175 longer.push_back(Instance(i));
1176 shorter.push_back(Instance(i));
1177 }
1178 longer.push_back(Instance(len));
1179 EXPECT_EQ(tracker.instances(), len + len + 1);
1180 EXPECT_GE(tracker.copies() + tracker.moves(),
1181 len + len + 1); // More due to reallocation.
1182
1183 tracker.ResetCopiesMovesSwaps();
1184 if (longorshort) {
1185 shorter = longer;
1186 EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1));
1187 EXPECT_GE(tracker.copies() + tracker.moves(),
1188 len + 1); // More due to reallocation.
1189 } else {
1190 longer = shorter;
1191 EXPECT_EQ(tracker.instances(), len + len);
1192 EXPECT_EQ(tracker.copies() + tracker.moves(), len);
1193 }
1194 }
1195 }
1196 }
1197
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveAssignment)1198 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) {
1199 using Instance = TypeParam;
1200 using InstanceVec = absl::InlinedVector<Instance, 8>;
1201 InstanceTracker tracker;
1202 for (int len = 0; len < 20; len++) {
1203 SCOPED_TRACE(len);
1204 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1205 SCOPED_TRACE(longorshort);
1206 tracker.ResetCopiesMovesSwaps();
1207
1208 InstanceVec longer, shorter;
1209 const size_t inlined_capacity = longer.capacity();
1210 for (int i = 0; i < len; i++) {
1211 longer.push_back(Instance(i));
1212 shorter.push_back(Instance(i));
1213 }
1214 longer.push_back(Instance(len));
1215 EXPECT_EQ(tracker.instances(), len + len + 1);
1216 EXPECT_GE(tracker.copies() + tracker.moves(),
1217 len + len + 1); // More due to reallocation.
1218
1219 tracker.ResetCopiesMovesSwaps();
1220 int src_len;
1221 if (longorshort) {
1222 src_len = len + 1;
1223 shorter = std::move(longer);
1224 } else {
1225 src_len = len;
1226 longer = std::move(shorter);
1227 }
1228 if (static_cast<size_t>(src_len) > inlined_capacity) {
1229 // Allocation moved as a whole.
1230 EXPECT_EQ(tracker.instances(), src_len);
1231 EXPECT_EQ(tracker.live_instances(), src_len);
1232 EXPECT_EQ(tracker.copies(), 0);
1233 EXPECT_EQ(tracker.moves(), 0);
1234 } else {
1235 // Elements are all copied.
1236 EXPECT_EQ(tracker.instances(), src_len + src_len);
1237 if (Instance::supports_move()) {
1238 EXPECT_EQ(tracker.copies(), 0);
1239 EXPECT_EQ(tracker.moves(), src_len);
1240 EXPECT_EQ(tracker.live_instances(), src_len);
1241 } else {
1242 EXPECT_EQ(tracker.copies(), src_len);
1243 EXPECT_EQ(tracker.moves(), 0);
1244 EXPECT_EQ(tracker.live_instances(), src_len + src_len);
1245 }
1246 }
1247 EXPECT_EQ(tracker.swaps(), 0);
1248 }
1249 }
1250 }
1251
TEST(CountElemAssign,SimpleTypeWithInlineBacking)1252 TEST(CountElemAssign, SimpleTypeWithInlineBacking) {
1253 const size_t inlined_capacity = absl::InlinedVector<int, 2>().capacity();
1254
1255 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1256 SCOPED_TRACE(original_size);
1257 // Original contents are [12345, 12345, ...]
1258 std::vector<int> original_contents(original_size, 12345);
1259
1260 absl::InlinedVector<int, 2> v(original_contents.begin(),
1261 original_contents.end());
1262 v.assign(2, 123);
1263 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(123, 123)));
1264 if (original_size <= inlined_capacity) {
1265 // If the original had inline backing, it should stay inline.
1266 EXPECT_EQ(v.capacity(), inlined_capacity);
1267 }
1268 }
1269 }
1270
TEST(CountElemAssign,SimpleTypeWithAllocation)1271 TEST(CountElemAssign, SimpleTypeWithAllocation) {
1272 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1273 SCOPED_TRACE(original_size);
1274 // Original contents are [12345, 12345, ...]
1275 std::vector<int> original_contents(original_size, 12345);
1276
1277 absl::InlinedVector<int, 2> v(original_contents.begin(),
1278 original_contents.end());
1279 v.assign(3, 123);
1280 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(123, 123, 123)));
1281 EXPECT_LE(v.size(), v.capacity());
1282 }
1283 }
1284
TYPED_TEST_P(InstanceTest,CountElemAssignInlineBacking)1285 TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) {
1286 using Instance = TypeParam;
1287 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1288 SCOPED_TRACE(original_size);
1289 // Original contents are [12345, 12345, ...]
1290 std::vector<Instance> original_contents(original_size, Instance(12345));
1291
1292 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1293 original_contents.end());
1294 v.assign(2, Instance(123));
1295 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(ValueIs(123), ValueIs(123))));
1296 if (original_size <= 2) {
1297 // If the original had inline backing, it should stay inline.
1298 EXPECT_EQ(2u, v.capacity());
1299 }
1300 }
1301 }
1302
1303 template <typename Instance>
InstanceCountElemAssignWithAllocationTest()1304 void InstanceCountElemAssignWithAllocationTest() {
1305 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1306 SCOPED_TRACE(original_size);
1307 // Original contents are [12345, 12345, ...]
1308 std::vector<Instance> original_contents(original_size, Instance(12345));
1309
1310 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1311 original_contents.end());
1312 v.assign(3, Instance(123));
1313 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(ValueIs(123), ValueIs(123),
1314 ValueIs(123))));
1315 EXPECT_LE(v.size(), v.capacity());
1316 }
1317 }
TEST(CountElemAssign,WithAllocationCopyableInstance)1318 TEST(CountElemAssign, WithAllocationCopyableInstance) {
1319 InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>();
1320 }
TEST(CountElemAssign,WithAllocationCopyableMovableInstance)1321 TEST(CountElemAssign, WithAllocationCopyableMovableInstance) {
1322 InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>();
1323 }
1324
TEST(RangedConstructor,SimpleType)1325 TEST(RangedConstructor, SimpleType) {
1326 std::vector<int> source_v = {4, 5, 6};
1327 // First try to fit in inline backing
1328 absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end());
1329 EXPECT_EQ(3u, v.size());
1330 EXPECT_EQ(4u,
1331 v.capacity()); // Indication that we're still on inlined storage
1332 EXPECT_EQ(4, v[0]);
1333 EXPECT_EQ(5, v[1]);
1334 EXPECT_EQ(6, v[2]);
1335
1336 // Now, force a re-allocate
1337 absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end());
1338 EXPECT_EQ(3u, realloc_v.size());
1339 EXPECT_LT(2u, realloc_v.capacity());
1340 EXPECT_EQ(4, realloc_v[0]);
1341 EXPECT_EQ(5, realloc_v[1]);
1342 EXPECT_EQ(6, realloc_v[2]);
1343 }
1344
1345 // Test for ranged constructors using Instance as the element type and
1346 // SourceContainer as the source container type.
1347 template <typename Instance, typename SourceContainer, int inlined_capacity>
InstanceRangedConstructorTestForContainer()1348 void InstanceRangedConstructorTestForContainer() {
1349 InstanceTracker tracker;
1350 SourceContainer source_v = {Instance(0), Instance(1)};
1351 tracker.ResetCopiesMovesSwaps();
1352 absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(),
1353 source_v.end());
1354 EXPECT_EQ(2u, v.size());
1355 EXPECT_LT(1u, v.capacity());
1356 EXPECT_EQ(0, v[0].value());
1357 EXPECT_EQ(1, v[1].value());
1358 EXPECT_EQ(tracker.copies(), 2);
1359 EXPECT_EQ(tracker.moves(), 0);
1360 }
1361
1362 template <typename Instance, int inlined_capacity>
InstanceRangedConstructorTestWithCapacity()1363 void InstanceRangedConstructorTestWithCapacity() {
1364 // Test with const and non-const, random access and non-random-access sources.
1365 // TODO(bsamwel): Test with an input iterator source.
1366 {
1367 SCOPED_TRACE("std::list");
1368 InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>,
1369 inlined_capacity>();
1370 {
1371 SCOPED_TRACE("const std::list");
1372 InstanceRangedConstructorTestForContainer<
1373 Instance, const std::list<Instance>, inlined_capacity>();
1374 }
1375 {
1376 SCOPED_TRACE("std::vector");
1377 InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>,
1378 inlined_capacity>();
1379 }
1380 {
1381 SCOPED_TRACE("const std::vector");
1382 InstanceRangedConstructorTestForContainer<
1383 Instance, const std::vector<Instance>, inlined_capacity>();
1384 }
1385 }
1386 }
1387
TYPED_TEST_P(InstanceTest,RangedConstructor)1388 TYPED_TEST_P(InstanceTest, RangedConstructor) {
1389 using Instance = TypeParam;
1390 SCOPED_TRACE("capacity=1");
1391 InstanceRangedConstructorTestWithCapacity<Instance, 1>();
1392 SCOPED_TRACE("capacity=2");
1393 InstanceRangedConstructorTestWithCapacity<Instance, 2>();
1394 }
1395
TEST(RangedConstructor,ElementsAreConstructed)1396 TEST(RangedConstructor, ElementsAreConstructed) {
1397 std::vector<std::string> source_v = {"cat", "dog"};
1398
1399 // Force expansion and re-allocation of v. Ensures that when the vector is
1400 // expanded that new elements are constructed.
1401 absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end());
1402 EXPECT_EQ("cat", v[0]);
1403 EXPECT_EQ("dog", v[1]);
1404 }
1405
TEST(RangedAssign,SimpleType)1406 TEST(RangedAssign, SimpleType) {
1407 const size_t inlined_capacity = absl::InlinedVector<int, 3>().capacity();
1408
1409 // Test for all combinations of original sizes (empty and non-empty inline,
1410 // and out of line) and target sizes.
1411 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1412 SCOPED_TRACE(original_size);
1413 // Original contents are [12345, 12345, ...]
1414 std::vector<int> original_contents(original_size, 12345);
1415
1416 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1417 SCOPED_TRACE(target_size);
1418
1419 // New contents are [3, 4, ...]
1420 std::vector<int> new_contents;
1421 for (size_t i = 0; i < target_size; ++i) {
1422 new_contents.push_back(static_cast<int>(i + 3));
1423 }
1424
1425 absl::InlinedVector<int, 3> v(original_contents.begin(),
1426 original_contents.end());
1427 v.assign(new_contents.begin(), new_contents.end());
1428
1429 EXPECT_EQ(new_contents.size(), v.size());
1430 EXPECT_LE(new_contents.size(), v.capacity());
1431 if (target_size <= inlined_capacity &&
1432 original_size <= inlined_capacity) {
1433 // Storage should stay inline when target size is small.
1434 EXPECT_EQ(v.capacity(), inlined_capacity);
1435 }
1436 EXPECT_THAT(v, ElementsAreArray(new_contents));
1437 }
1438 }
1439 }
1440
1441 // Returns true if lhs and rhs have the same value.
1442 template <typename Instance>
InstanceValuesEqual(const Instance & lhs,const Instance & rhs)1443 static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) {
1444 return lhs.value() == rhs.value();
1445 }
1446
1447 // Test for ranged assign() using Instance as the element type and
1448 // SourceContainer as the source container type.
1449 template <typename Instance, typename SourceContainer>
InstanceRangedAssignTestForContainer()1450 void InstanceRangedAssignTestForContainer() {
1451 // Test for all combinations of original sizes (empty and non-empty inline,
1452 // and out of line) and target sizes.
1453 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1454 SCOPED_TRACE(original_size);
1455 // Original contents are [12345, 12345, ...]
1456 std::vector<Instance> original_contents(original_size, Instance(12345));
1457
1458 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1459 SCOPED_TRACE(target_size);
1460
1461 // New contents are [3, 4, ...]
1462 // Generate data using a non-const container, because SourceContainer
1463 // itself may be const.
1464 // TODO(bsamwel): Test with an input iterator.
1465 std::vector<Instance> new_contents_in;
1466 for (size_t i = 0; i < target_size; ++i) {
1467 new_contents_in.push_back(Instance(static_cast<int>(i) + 3));
1468 }
1469 SourceContainer new_contents(new_contents_in.begin(),
1470 new_contents_in.end());
1471
1472 absl::InlinedVector<Instance, 3> v(original_contents.begin(),
1473 original_contents.end());
1474 v.assign(new_contents.begin(), new_contents.end());
1475
1476 EXPECT_EQ(new_contents.size(), v.size());
1477 EXPECT_LE(new_contents.size(), v.capacity());
1478 if (target_size <= 3 && original_size <= 3) {
1479 // Storage should stay inline when target size is small.
1480 EXPECT_EQ(3u, v.capacity());
1481 }
1482 EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(),
1483 InstanceValuesEqual<Instance>));
1484 }
1485 }
1486 }
1487
TYPED_TEST_P(InstanceTest,RangedAssign)1488 TYPED_TEST_P(InstanceTest, RangedAssign) {
1489 using Instance = TypeParam;
1490 // Test with const and non-const, random access and non-random-access sources.
1491 // TODO(bsamwel): Test with an input iterator source.
1492 SCOPED_TRACE("std::list");
1493 InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>();
1494 SCOPED_TRACE("const std::list");
1495 InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>();
1496 SCOPED_TRACE("std::vector");
1497 InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>();
1498 SCOPED_TRACE("const std::vector");
1499 InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>();
1500 }
1501
TEST(InitializerListConstructor,SimpleTypeWithInlineBacking)1502 TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) {
1503 EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}),
1504 AllOf(SizeIs(3u), CapacityIs(4u), ElementsAre(4, 5, 6)));
1505 }
1506
TEST(InitializerListConstructor,SimpleTypeWithReallocationRequired)1507 TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) {
1508 EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}),
1509 AllOf(SizeIs(3u), CapacityIs(Gt(2u)), ElementsAre(4, 5, 6)));
1510 }
1511
TEST(InitializerListConstructor,DisparateTypesInList)1512 TEST(InitializerListConstructor, DisparateTypesInList) {
1513 EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8));
1514
1515 EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}),
1516 ElementsAre("foo", "bar"));
1517 }
1518
TEST(InitializerListConstructor,ComplexTypeWithInlineBacking)1519 TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) {
1520 const size_t inlined_capacity =
1521 absl::InlinedVector<CopyableMovableInstance, 1>().capacity();
1522 EXPECT_THAT(
1523 (absl::InlinedVector<CopyableMovableInstance, 1>{
1524 CopyableMovableInstance(0)}),
1525 AllOf(SizeIs(1u), CapacityIs(inlined_capacity), ElementsAre(ValueIs(0))));
1526 }
1527
TEST(InitializerListConstructor,ComplexTypeWithReallocationRequired)1528 TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) {
1529 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{
1530 CopyableMovableInstance(0), CopyableMovableInstance(1)}),
1531 AllOf(SizeIs(2u), CapacityIs(Gt(1u)),
1532 ElementsAre(ValueIs(0), ValueIs(1))));
1533 }
1534
TEST(InitializerListAssign,SimpleTypeFitsInlineBacking)1535 TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) {
1536 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1537 SCOPED_TRACE(original_size);
1538
1539 absl::InlinedVector<int, 2> v1(original_size, 12345);
1540 const size_t original_capacity_v1 = v1.capacity();
1541 v1.assign({3});
1542 EXPECT_THAT(v1, AllOf(SizeIs(1u), CapacityIs(original_capacity_v1),
1543 ElementsAre(3)));
1544
1545 absl::InlinedVector<int, 2> v2(original_size, 12345);
1546 const size_t original_capacity_v2 = v2.capacity();
1547 v2 = {3};
1548 EXPECT_THAT(v2, AllOf(SizeIs(1u), CapacityIs(original_capacity_v2),
1549 ElementsAre(3)));
1550 }
1551 }
1552
TEST(InitializerListAssign,SimpleTypeDoesNotFitInlineBacking)1553 TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) {
1554 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1555 SCOPED_TRACE(original_size);
1556 absl::InlinedVector<int, 2> v1(original_size, 12345);
1557 v1.assign({3, 4, 5});
1558 EXPECT_THAT(v1, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1559 EXPECT_LE(3u, v1.capacity());
1560
1561 absl::InlinedVector<int, 2> v2(original_size, 12345);
1562 v2 = {3, 4, 5};
1563 EXPECT_THAT(v2, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1564 EXPECT_LE(3u, v2.capacity());
1565 }
1566 }
1567
TEST(InitializerListAssign,DisparateTypesInList)1568 TEST(InitializerListAssign, DisparateTypesInList) {
1569 absl::InlinedVector<int, 2> v_int1;
1570 v_int1.assign({-7, 8ULL});
1571 EXPECT_THAT(v_int1, ElementsAre(-7, 8));
1572
1573 absl::InlinedVector<int, 2> v_int2;
1574 v_int2 = {-7, 8ULL};
1575 EXPECT_THAT(v_int2, ElementsAre(-7, 8));
1576
1577 absl::InlinedVector<std::string, 2> v_string1;
1578 v_string1.assign({"foo", std::string("bar")});
1579 EXPECT_THAT(v_string1, ElementsAre("foo", "bar"));
1580
1581 absl::InlinedVector<std::string, 2> v_string2;
1582 v_string2 = {"foo", std::string("bar")};
1583 EXPECT_THAT(v_string2, ElementsAre("foo", "bar"));
1584 }
1585
TYPED_TEST_P(InstanceTest,InitializerListAssign)1586 TYPED_TEST_P(InstanceTest, InitializerListAssign) {
1587 using Instance = TypeParam;
1588 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1589 SCOPED_TRACE(original_size);
1590 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1591 const size_t original_capacity = v.capacity();
1592 v.assign({Instance(3)});
1593 EXPECT_THAT(v, AllOf(SizeIs(1u), CapacityIs(original_capacity),
1594 ElementsAre(ValueIs(3))));
1595 }
1596 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1597 SCOPED_TRACE(original_size);
1598 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1599 v.assign({Instance(3), Instance(4), Instance(5)});
1600 EXPECT_THAT(
1601 v, AllOf(SizeIs(3u), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
1602 EXPECT_LE(3u, v.capacity());
1603 }
1604 }
1605
1606 REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors,
1607 CountConstructorsDestructorsOnCopyConstruction,
1608 CountConstructorsDestructorsOnMoveConstruction,
1609 CountConstructorsDestructorsOnAssignment,
1610 CountConstructorsDestructorsOnMoveAssignment,
1611 CountElemAssignInlineBacking, RangedConstructor,
1612 RangedAssign, InitializerListAssign);
1613
1614 using InstanceTypes =
1615 ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>;
1616 INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest,
1617 InstanceTypes);
1618
TEST(DynamicVec,DynamicVecCompiles)1619 TEST(DynamicVec, DynamicVecCompiles) {
1620 DynamicVec v;
1621 (void)v;
1622 }
1623
TEST(DynamicVec,CreateNonEmptyDynamicVec)1624 TEST(DynamicVec, CreateNonEmptyDynamicVec) {
1625 DynamicVec v(1);
1626 EXPECT_EQ(v.size(), 1u);
1627 }
1628
TEST(DynamicVec,EmplaceBack)1629 TEST(DynamicVec, EmplaceBack) {
1630 DynamicVec v;
1631 v.emplace_back(Dynamic{});
1632 EXPECT_EQ(v.size(), 1u);
1633 }
1634
TEST(DynamicVec,EmplaceBackAfterHeapAllocation)1635 TEST(DynamicVec, EmplaceBackAfterHeapAllocation) {
1636 DynamicVec v;
1637 v.reserve(10);
1638 v.emplace_back(Dynamic{});
1639 EXPECT_EQ(v.size(), 1u);
1640 }
1641
TEST(DynamicVec,EmptyIteratorComparison)1642 TEST(DynamicVec, EmptyIteratorComparison) {
1643 DynamicVec v;
1644 EXPECT_EQ(v.begin(), v.end());
1645 EXPECT_EQ(v.cbegin(), v.cend());
1646 }
1647
TEST(AllocatorSupportTest,Constructors)1648 TEST(AllocatorSupportTest, Constructors) {
1649 using MyAlloc = CountingAllocator<int>;
1650 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1651 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1652 int64_t allocated = 0;
1653 MyAlloc alloc(&allocated);
1654 { AllocVec ABSL_ATTRIBUTE_UNUSED v; }
1655 { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); }
1656 { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); }
1657 { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); }
1658
1659 AllocVec v2;
1660 { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); }
1661 { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); }
1662 }
1663
TEST(AllocatorSupportTest,CountAllocations)1664 TEST(AllocatorSupportTest, CountAllocations) {
1665 using MyAlloc = CountingAllocator<int>;
1666 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1667 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1668 int64_t allocated = 0;
1669 MyAlloc alloc(&allocated);
1670 {
1671 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc);
1672 EXPECT_THAT(allocated, Eq(0));
1673 }
1674 EXPECT_THAT(allocated, Eq(0));
1675 {
1676 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc);
1677 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1678 }
1679 EXPECT_THAT(allocated, Eq(0));
1680 {
1681 AllocVec v(4, 1, alloc);
1682 EXPECT_THAT(allocated, Eq(0));
1683
1684 int64_t allocated2 = 0;
1685 MyAlloc alloc2(&allocated2);
1686 AllocVec v2(v, alloc2);
1687 EXPECT_THAT(allocated2, Eq(0));
1688
1689 int64_t allocated3 = 0;
1690 MyAlloc alloc3(&allocated3);
1691 AllocVec v3(std::move(v), alloc3);
1692 EXPECT_THAT(allocated3, Eq(0));
1693 }
1694 EXPECT_THAT(allocated, 0);
1695 {
1696 AllocVec v(8, 2, alloc);
1697 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1698
1699 int64_t allocated2 = 0;
1700 MyAlloc alloc2(&allocated2);
1701 AllocVec v2(v, alloc2);
1702 EXPECT_THAT(allocated2, Eq(static_cast<int64_t>(v2.size() * sizeof(int))));
1703
1704 int64_t allocated3 = 0;
1705 MyAlloc alloc3(&allocated3);
1706 AllocVec v3(std::move(v), alloc3);
1707 EXPECT_THAT(allocated3, Eq(static_cast<int64_t>(v3.size() * sizeof(int))));
1708 }
1709 EXPECT_EQ(allocated, 0);
1710 {
1711 // Test shrink_to_fit deallocations.
1712 AllocVec v(8, 2, alloc);
1713 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1714 v.resize(5);
1715 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1716 v.shrink_to_fit();
1717 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1718 v.resize(4);
1719 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1720 v.shrink_to_fit();
1721 EXPECT_EQ(allocated, 0);
1722 }
1723 }
1724
TEST(AllocatorSupportTest,SwapBothAllocated)1725 TEST(AllocatorSupportTest, SwapBothAllocated) {
1726 using MyAlloc = CountingAllocator<int>;
1727 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1728 int64_t allocated1 = 0;
1729 int64_t allocated2 = 0;
1730 {
1731 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1732 const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1733 MyAlloc a1(&allocated1);
1734 MyAlloc a2(&allocated2);
1735 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1736 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1737 EXPECT_LT(v1.capacity(), v2.capacity());
1738 EXPECT_THAT(allocated1,
1739 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1740 EXPECT_THAT(allocated2,
1741 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1742 v1.swap(v2);
1743 EXPECT_THAT(v1, ElementsAreArray(ia2));
1744 EXPECT_THAT(v2, ElementsAreArray(ia1));
1745 EXPECT_THAT(allocated1,
1746 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1747 EXPECT_THAT(allocated2,
1748 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1749 }
1750 EXPECT_THAT(allocated1, 0);
1751 EXPECT_THAT(allocated2, 0);
1752 }
1753
TEST(AllocatorSupportTest,SwapOneAllocated)1754 TEST(AllocatorSupportTest, SwapOneAllocated) {
1755 using MyAlloc = CountingAllocator<int>;
1756 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1757 int64_t allocated1 = 0;
1758 int64_t allocated2 = 0;
1759 {
1760 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1761 const int ia2[] = {0, 1, 2, 3};
1762 MyAlloc a1(&allocated1);
1763 MyAlloc a2(&allocated2);
1764 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1765 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1766 EXPECT_THAT(allocated1,
1767 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1768 EXPECT_THAT(allocated2, Eq(0));
1769 v1.swap(v2);
1770 EXPECT_THAT(v1, ElementsAreArray(ia2));
1771 EXPECT_THAT(v2, ElementsAreArray(ia1));
1772 EXPECT_THAT(allocated1,
1773 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1774 EXPECT_THAT(allocated2, Eq(0));
1775 EXPECT_TRUE(v2.get_allocator() == a1);
1776 EXPECT_TRUE(v1.get_allocator() == a2);
1777 }
1778 EXPECT_THAT(allocated1, 0);
1779 EXPECT_THAT(allocated2, 0);
1780 }
1781
TEST(AllocatorSupportTest,ScopedAllocatorWorksInlined)1782 TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) {
1783 using StdVector = std::vector<int, CountingAllocator<int>>;
1784 using Alloc = CountingAllocator<StdVector>;
1785 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1786 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1787
1788 int64_t total_allocated_byte_count = 0;
1789
1790 AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1791
1792 // Called only once to remain inlined
1793 inlined_case.emplace_back();
1794
1795 int64_t absl_responsible_for_count = total_allocated_byte_count;
1796
1797 // MSVC's allocator preemptively allocates in debug mode
1798 #if !defined(_MSC_VER)
1799 EXPECT_EQ(absl_responsible_for_count, 0);
1800 #endif // !defined(_MSC_VER)
1801
1802 inlined_case[0].emplace_back();
1803 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1804
1805 inlined_case.clear();
1806 inlined_case.shrink_to_fit();
1807 EXPECT_EQ(total_allocated_byte_count, 0);
1808 }
1809
TEST(AllocatorSupportTest,ScopedAllocatorWorksAllocated)1810 TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) {
1811 using StdVector = std::vector<int, CountingAllocator<int>>;
1812 using Alloc = CountingAllocator<StdVector>;
1813 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1814 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1815
1816 int64_t total_allocated_byte_count = 0;
1817
1818 AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1819
1820 // Called twice to force into being allocated
1821 allocated_case.emplace_back();
1822 allocated_case.emplace_back();
1823
1824 int64_t absl_responsible_for_count = total_allocated_byte_count;
1825 EXPECT_GT(absl_responsible_for_count, 0);
1826
1827 allocated_case[1].emplace_back();
1828 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1829
1830 allocated_case.clear();
1831 allocated_case.shrink_to_fit();
1832 EXPECT_EQ(total_allocated_byte_count, 0);
1833 }
1834
TEST(AllocatorSupportTest,SizeAllocConstructor)1835 TEST(AllocatorSupportTest, SizeAllocConstructor) {
1836 constexpr size_t inlined_size = 4;
1837 using Alloc = CountingAllocator<int>;
1838 using AllocVec = absl::InlinedVector<int, inlined_size, Alloc>;
1839
1840 {
1841 auto len = inlined_size / 2;
1842 int64_t allocated = 0;
1843 auto v = AllocVec(len, Alloc(&allocated));
1844
1845 // Inline storage used; allocator should not be invoked
1846 EXPECT_THAT(allocated, Eq(0));
1847 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1848 }
1849
1850 {
1851 auto len = inlined_size * 2;
1852 int64_t allocated = 0;
1853 auto v = AllocVec(len, Alloc(&allocated));
1854
1855 // Out of line storage used; allocation of 8 elements expected
1856 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(len * sizeof(int))));
1857 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1858 }
1859 }
1860
TEST(InlinedVectorTest,MinimumAllocatorCompilesUsingTraits)1861 TEST(InlinedVectorTest, MinimumAllocatorCompilesUsingTraits) {
1862 using T = int;
1863 using A = std::allocator<T>;
1864 using ATraits = absl::allocator_traits<A>;
1865
1866 struct MinimumAllocator {
1867 using value_type = T;
1868
1869 value_type* allocate(size_t n) {
1870 A a;
1871 return ATraits::allocate(a, n);
1872 }
1873
1874 void deallocate(value_type* p, size_t n) {
1875 A a;
1876 ATraits::deallocate(a, p, n);
1877 }
1878 };
1879
1880 absl::InlinedVector<T, 1, MinimumAllocator> vec;
1881 vec.emplace_back();
1882 vec.resize(0);
1883 }
1884
TEST(InlinedVectorTest,AbslHashValueWorks)1885 TEST(InlinedVectorTest, AbslHashValueWorks) {
1886 using V = absl::InlinedVector<int, 4>;
1887 std::vector<V> cases;
1888
1889 // Generate a variety of vectors some of these are small enough for the inline
1890 // space but are stored out of line.
1891 for (size_t i = 0; i < 10; ++i) {
1892 V v;
1893 for (int j = 0; j < static_cast<int>(i); ++j) {
1894 v.push_back(j);
1895 }
1896 cases.push_back(v);
1897 v.resize(i % 4);
1898 cases.push_back(v);
1899 }
1900
1901 EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(cases));
1902 }
1903
1904 class MoveConstructibleOnlyInstance
1905 : public absl::test_internal::BaseCountedInstance {
1906 public:
MoveConstructibleOnlyInstance(int x)1907 explicit MoveConstructibleOnlyInstance(int x) : BaseCountedInstance(x) {}
1908 MoveConstructibleOnlyInstance(MoveConstructibleOnlyInstance&& other) =
1909 default;
1910 MoveConstructibleOnlyInstance& operator=(
1911 MoveConstructibleOnlyInstance&& other) = delete;
1912 };
1913
1914 MATCHER(HasValue, "") {
1915 return ::testing::get<0>(arg).value() == ::testing::get<1>(arg);
1916 }
1917
TEST(NonAssignableMoveAssignmentTest,AllocatedToInline)1918 TEST(NonAssignableMoveAssignmentTest, AllocatedToInline) {
1919 using X = MoveConstructibleOnlyInstance;
1920 InstanceTracker tracker;
1921 absl::InlinedVector<X, 2> inlined;
1922 inlined.emplace_back(1);
1923 absl::InlinedVector<X, 2> allocated;
1924 allocated.emplace_back(1);
1925 allocated.emplace_back(2);
1926 allocated.emplace_back(3);
1927 tracker.ResetCopiesMovesSwaps();
1928
1929 inlined = std::move(allocated);
1930 // passed ownership of the allocated storage
1931 EXPECT_EQ(tracker.moves(), 0);
1932 EXPECT_EQ(tracker.live_instances(), 3);
1933
1934 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
1935 }
1936
TEST(NonAssignableMoveAssignmentTest,InlineToAllocated)1937 TEST(NonAssignableMoveAssignmentTest, InlineToAllocated) {
1938 using X = MoveConstructibleOnlyInstance;
1939 InstanceTracker tracker;
1940 absl::InlinedVector<X, 2> inlined;
1941 inlined.emplace_back(1);
1942 absl::InlinedVector<X, 2> allocated;
1943 allocated.emplace_back(1);
1944 allocated.emplace_back(2);
1945 allocated.emplace_back(3);
1946 tracker.ResetCopiesMovesSwaps();
1947
1948 allocated = std::move(inlined);
1949 // Moved elements
1950 EXPECT_EQ(tracker.moves(), 1);
1951 EXPECT_EQ(tracker.live_instances(), 1);
1952
1953 EXPECT_THAT(allocated, Pointwise(HasValue(), {1}));
1954 }
1955
TEST(NonAssignableMoveAssignmentTest,InlineToInline)1956 TEST(NonAssignableMoveAssignmentTest, InlineToInline) {
1957 using X = MoveConstructibleOnlyInstance;
1958 InstanceTracker tracker;
1959 absl::InlinedVector<X, 2> inlined_a;
1960 inlined_a.emplace_back(1);
1961 absl::InlinedVector<X, 2> inlined_b;
1962 inlined_b.emplace_back(1);
1963 tracker.ResetCopiesMovesSwaps();
1964
1965 inlined_a = std::move(inlined_b);
1966 // Moved elements
1967 EXPECT_EQ(tracker.moves(), 1);
1968 EXPECT_EQ(tracker.live_instances(), 1);
1969
1970 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {1}));
1971 }
1972
TEST(NonAssignableMoveAssignmentTest,AllocatedToAllocated)1973 TEST(NonAssignableMoveAssignmentTest, AllocatedToAllocated) {
1974 using X = MoveConstructibleOnlyInstance;
1975 InstanceTracker tracker;
1976 absl::InlinedVector<X, 2> allocated_a;
1977 allocated_a.emplace_back(1);
1978 allocated_a.emplace_back(2);
1979 allocated_a.emplace_back(3);
1980 absl::InlinedVector<X, 2> allocated_b;
1981 allocated_b.emplace_back(4);
1982 allocated_b.emplace_back(5);
1983 allocated_b.emplace_back(6);
1984 allocated_b.emplace_back(7);
1985 tracker.ResetCopiesMovesSwaps();
1986
1987 allocated_a = std::move(allocated_b);
1988 // passed ownership of the allocated storage
1989 EXPECT_EQ(tracker.moves(), 0);
1990 EXPECT_EQ(tracker.live_instances(), 4);
1991
1992 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
1993 }
1994
TEST(NonAssignableMoveAssignmentTest,AssignThis)1995 TEST(NonAssignableMoveAssignmentTest, AssignThis) {
1996 using X = MoveConstructibleOnlyInstance;
1997 InstanceTracker tracker;
1998 absl::InlinedVector<X, 2> v;
1999 v.emplace_back(1);
2000 v.emplace_back(2);
2001 v.emplace_back(3);
2002
2003 tracker.ResetCopiesMovesSwaps();
2004
2005 // Obfuscated in order to pass -Wself-move.
2006 v = std::move(*std::addressof(v));
2007 // nothing happens
2008 EXPECT_EQ(tracker.moves(), 0);
2009 EXPECT_EQ(tracker.live_instances(), 3);
2010
2011 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2012 }
2013
2014 class NonSwappableInstance : public absl::test_internal::BaseCountedInstance {
2015 public:
NonSwappableInstance(int x)2016 explicit NonSwappableInstance(int x) : BaseCountedInstance(x) {}
2017 NonSwappableInstance(const NonSwappableInstance& other) = default;
2018 NonSwappableInstance& operator=(const NonSwappableInstance& other) = default;
2019 NonSwappableInstance(NonSwappableInstance&& other) = default;
2020 NonSwappableInstance& operator=(NonSwappableInstance&& other) = default;
2021 };
2022
2023 void swap(NonSwappableInstance&, NonSwappableInstance&) = delete;
2024
TEST(NonSwappableSwapTest,InlineAndAllocatedTransferStorageAndMove)2025 TEST(NonSwappableSwapTest, InlineAndAllocatedTransferStorageAndMove) {
2026 using X = NonSwappableInstance;
2027 InstanceTracker tracker;
2028 absl::InlinedVector<X, 2> inlined;
2029 inlined.emplace_back(1);
2030 absl::InlinedVector<X, 2> allocated;
2031 allocated.emplace_back(1);
2032 allocated.emplace_back(2);
2033 allocated.emplace_back(3);
2034 tracker.ResetCopiesMovesSwaps();
2035
2036 inlined.swap(allocated);
2037 EXPECT_EQ(tracker.moves(), 1);
2038 EXPECT_EQ(tracker.live_instances(), 4);
2039
2040 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
2041 }
2042
TEST(NonSwappableSwapTest,InlineAndInlineMoveIndividualElements)2043 TEST(NonSwappableSwapTest, InlineAndInlineMoveIndividualElements) {
2044 using X = NonSwappableInstance;
2045 InstanceTracker tracker;
2046 absl::InlinedVector<X, 2> inlined_a;
2047 inlined_a.emplace_back(1);
2048 absl::InlinedVector<X, 2> inlined_b;
2049 inlined_b.emplace_back(2);
2050 tracker.ResetCopiesMovesSwaps();
2051
2052 inlined_a.swap(inlined_b);
2053 EXPECT_EQ(tracker.moves(), 3);
2054 EXPECT_EQ(tracker.live_instances(), 2);
2055
2056 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {2}));
2057 EXPECT_THAT(inlined_b, Pointwise(HasValue(), {1}));
2058 }
2059
TEST(NonSwappableSwapTest,AllocatedAndAllocatedOnlyTransferStorage)2060 TEST(NonSwappableSwapTest, AllocatedAndAllocatedOnlyTransferStorage) {
2061 using X = NonSwappableInstance;
2062 InstanceTracker tracker;
2063 absl::InlinedVector<X, 2> allocated_a;
2064 allocated_a.emplace_back(1);
2065 allocated_a.emplace_back(2);
2066 allocated_a.emplace_back(3);
2067 absl::InlinedVector<X, 2> allocated_b;
2068 allocated_b.emplace_back(4);
2069 allocated_b.emplace_back(5);
2070 allocated_b.emplace_back(6);
2071 allocated_b.emplace_back(7);
2072 tracker.ResetCopiesMovesSwaps();
2073
2074 allocated_a.swap(allocated_b);
2075 EXPECT_EQ(tracker.moves(), 0);
2076 EXPECT_EQ(tracker.live_instances(), 7);
2077
2078 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
2079 EXPECT_THAT(allocated_b, Pointwise(HasValue(), {1, 2, 3}));
2080 }
2081
TEST(NonSwappableSwapTest,SwapThis)2082 TEST(NonSwappableSwapTest, SwapThis) {
2083 using X = NonSwappableInstance;
2084 InstanceTracker tracker;
2085 absl::InlinedVector<X, 2> v;
2086 v.emplace_back(1);
2087 v.emplace_back(2);
2088 v.emplace_back(3);
2089
2090 tracker.ResetCopiesMovesSwaps();
2091
2092 v.swap(v);
2093 EXPECT_EQ(tracker.moves(), 0);
2094 EXPECT_EQ(tracker.live_instances(), 3);
2095
2096 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2097 }
2098
2099 template <size_t N>
2100 using CharVec = absl::InlinedVector<char, N>;
2101
2102 // Warning: This struct "simulates" the type `InlinedVector::Storage::Allocated`
2103 // to make reasonable expectations for inlined storage capacity optimization. If
2104 // implementation changes `Allocated`, then `MySpan` and tests that use it need
2105 // to be updated accordingly.
2106 template <typename T>
2107 struct MySpan {
2108 T* data;
2109 size_t size;
2110 };
2111
TEST(StorageTest,InlinedCapacityAutoIncrease)2112 TEST(StorageTest, InlinedCapacityAutoIncrease) {
2113 // The requested capacity is auto increased to `sizeof(MySpan<char>)`.
2114 EXPECT_GT(CharVec<1>().capacity(), 1);
2115 EXPECT_EQ(CharVec<1>().capacity(), sizeof(MySpan<char>));
2116 EXPECT_EQ(CharVec<1>().capacity(), CharVec<2>().capacity());
2117 EXPECT_EQ(sizeof(CharVec<1>), sizeof(CharVec<2>));
2118
2119 // The requested capacity is auto increased to
2120 // `sizeof(MySpan<int>) / sizeof(int)`.
2121 EXPECT_GT((absl::InlinedVector<int, 1>().capacity()), 1);
2122 EXPECT_EQ((absl::InlinedVector<int, 1>().capacity()),
2123 sizeof(MySpan<int>) / sizeof(int));
2124 }
2125
2126 } // anonymous namespace
2127