• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7 
8 // Author: kenton@google.com (Kenton Varda)
9 //  Based on original Protocol Buffers design by
10 //  Sanjay Ghemawat, Jeff Dean, and others.
11 //
12 // TODO:  Improve this unittest to bring it up to the standards of
13 //   other proto2 unittests.
14 
15 #include "google/protobuf/repeated_field.h"
16 
17 #include <algorithm>
18 #include <cstddef>
19 #include <cstdint>
20 #include <cstdlib>
21 #include <cstring>
22 #include <iterator>
23 #include <limits>
24 #include <list>
25 #include <sstream>
26 #include <string>
27 #include <type_traits>
28 #include <utility>
29 #include <vector>
30 
31 #include <gmock/gmock.h>
32 #include <gtest/gtest.h>
33 #include "absl/numeric/bits.h"
34 #include "absl/strings/cord.h"
35 #include "absl/types/span.h"
36 #include "google/protobuf/arena_test_util.h"
37 #include "google/protobuf/internal_visibility_for_testing.h"
38 #include "google/protobuf/io/coded_stream.h"
39 #include "google/protobuf/io/zero_copy_stream_impl_lite.h"
40 #include "google/protobuf/parse_context.h"
41 // TODO: Remove.
42 #include "google/protobuf/repeated_ptr_field.h"
43 #include "google/protobuf/unittest.pb.h"
44 
45 
46 // Must be included last.
47 #include "google/protobuf/port_def.inc"
48 
49 namespace google {
50 namespace protobuf {
51 namespace {
52 
53 using ::protobuf_unittest::TestAllTypes;
54 using ::testing::A;
55 using ::testing::AllOf;
56 using ::testing::ElementsAre;
57 using ::testing::Ge;
58 using ::testing::Le;
59 
TEST(RepeatedFieldIterator,Traits)60 TEST(RepeatedFieldIterator, Traits) {
61   using It = RepeatedField<absl::Cord>::iterator;
62   EXPECT_TRUE((std::is_same<It::value_type, absl::Cord>::value));
63   EXPECT_TRUE((std::is_same<It::reference, absl::Cord&>::value));
64   EXPECT_TRUE((std::is_same<It::pointer, absl::Cord*>::value));
65   EXPECT_TRUE((std::is_same<It::difference_type, std::ptrdiff_t>::value));
66   EXPECT_TRUE((std::is_same<It::iterator_category,
67                             std::random_access_iterator_tag>::value));
68 #if __cplusplus >= 202002L
69   EXPECT_TRUE((
70       std::is_same<It::iterator_concept, std::contiguous_iterator_tag>::value));
71 #else
72   EXPECT_TRUE((std::is_same<It::iterator_concept,
73                             std::random_access_iterator_tag>::value));
74 #endif
75 }
76 
TEST(ConstRepeatedFieldIterator,Traits)77 TEST(ConstRepeatedFieldIterator, Traits) {
78   using It = RepeatedField<absl::Cord>::const_iterator;
79   EXPECT_TRUE((std::is_same<It::value_type, absl::Cord>::value));
80   EXPECT_TRUE((std::is_same<It::reference, const absl::Cord&>::value));
81   EXPECT_TRUE((std::is_same<It::pointer, const absl::Cord*>::value));
82   EXPECT_TRUE((std::is_same<It::difference_type, std::ptrdiff_t>::value));
83   EXPECT_TRUE((std::is_same<It::iterator_category,
84                             std::random_access_iterator_tag>::value));
85 #if __cplusplus >= 202002L
86   EXPECT_TRUE((
87       std::is_same<It::iterator_concept, std::contiguous_iterator_tag>::value));
88 #else
89   EXPECT_TRUE((std::is_same<It::iterator_concept,
90                             std::random_access_iterator_tag>::value));
91 #endif
92 }
93 
TEST(RepeatedField,ConstInit)94 TEST(RepeatedField, ConstInit) {
95   PROTOBUF_CONSTINIT static RepeatedField<int> field{};  // NOLINT
96   EXPECT_TRUE(field.empty());
97 }
98 
99 // Test operations on a small RepeatedField.
TEST(RepeatedField,Small)100 TEST(RepeatedField, Small) {
101   RepeatedField<int> field;
102 
103   EXPECT_TRUE(field.empty());
104   EXPECT_EQ(field.size(), 0);
105 
106   field.Add(5);
107 
108   EXPECT_FALSE(field.empty());
109   EXPECT_EQ(field.size(), 1);
110   EXPECT_EQ(field.Get(0), 5);
111   EXPECT_EQ(field.at(0), 5);
112 
113   field.Add(42);
114 
115   EXPECT_FALSE(field.empty());
116   EXPECT_EQ(field.size(), 2);
117   EXPECT_EQ(field.Get(0), 5);
118   EXPECT_EQ(field.at(0), 5);
119   EXPECT_EQ(field.Get(1), 42);
120   EXPECT_EQ(field.at(1), 42);
121 
122   field.Set(1, 23);
123 
124   EXPECT_FALSE(field.empty());
125   EXPECT_EQ(field.size(), 2);
126   EXPECT_EQ(field.Get(0), 5);
127   EXPECT_EQ(field.at(0), 5);
128   EXPECT_EQ(field.Get(1), 23);
129   EXPECT_EQ(field.at(1), 23);
130 
131   field.at(1) = 25;
132 
133   EXPECT_FALSE(field.empty());
134   EXPECT_EQ(field.size(), 2);
135   EXPECT_EQ(field.Get(0), 5);
136   EXPECT_EQ(field.at(0), 5);
137   EXPECT_EQ(field.Get(1), 25);
138   EXPECT_EQ(field.at(1), 25);
139 
140   field.RemoveLast();
141 
142   EXPECT_FALSE(field.empty());
143   EXPECT_EQ(field.size(), 1);
144   EXPECT_EQ(field.Get(0), 5);
145   EXPECT_EQ(field.at(0), 5);
146 
147   field.Clear();
148 
149   EXPECT_TRUE(field.empty());
150   EXPECT_EQ(field.size(), 0);
151   if (sizeof(void*) == 8) {
152     // Usage should be 0 because this should fit in SOO space.
153     EXPECT_EQ(field.SpaceUsedExcludingSelf(), 0);
154   }
155 }
156 
157 
158 // Test operations on a RepeatedField which is large enough to allocate a
159 // separate array.
TEST(RepeatedField,Large)160 TEST(RepeatedField, Large) {
161   RepeatedField<int> field;
162 
163   for (int i = 0; i < 16; i++) {
164     field.Add(i * i);
165   }
166 
167   EXPECT_FALSE(field.empty());
168   EXPECT_EQ(field.size(), 16);
169 
170   for (int i = 0; i < 16; i++) {
171     EXPECT_EQ(field.Get(i), i * i);
172   }
173 
174   int expected_usage = 16 * sizeof(int);
175   EXPECT_GE(field.SpaceUsedExcludingSelf(), expected_usage);
176 }
177 
178 template <typename Rep>
CheckAllocationSizes()179 void CheckAllocationSizes() {
180   using T = typename Rep::value_type;
181   // Use a large initial block to make the checks below easier to predict.
182   std::string buf(1 << 20, 0);
183 
184   Arena arena(&buf[0], buf.size());
185   auto* rep = Arena::Create<Rep>(&arena);
186   size_t prev = arena.SpaceUsed();
187 
188   for (int i = 0; i < 100; ++i) {
189     rep->Add(T{});
190     if (sizeof(void*) == 8) {
191       size_t new_used = arena.SpaceUsed();
192       size_t last_alloc = new_used - prev;
193       prev = new_used;
194 
195       // When we actually allocated something, check the size.
196       if (last_alloc != 0) {
197         // Must be `>= 16`, as expected by the Arena.
198         ASSERT_GE(last_alloc, 16);
199         // Must be of a power of two.
200         size_t log2 = absl::bit_width(last_alloc) - 1;
201         ASSERT_EQ((1 << log2), last_alloc);
202       }
203 
204       // The byte size must be a multiple of 8 when not SOO.
205       const int capacity_bytes = rep->Capacity() * sizeof(T);
206       if (capacity_bytes > internal::kSooCapacityBytes) {
207         ASSERT_EQ(capacity_bytes % 8, 0);
208       }
209     }
210   }
211 }
212 
TEST(RepeatedField,ArenaAllocationSizesMatchExpectedValues)213 TEST(RepeatedField, ArenaAllocationSizesMatchExpectedValues) {
214   // RepeatedField guarantees that in 64-bit mode we never allocate anything
215   // smaller than 16 bytes from an arena.
216   // This is important to avoid a branch in the reallocation path.
217   // This is also important because allocating anything less would be wasting
218   // memory.
219   // If the allocation size is wrong, ReturnArrayMemory will ABSL_DCHECK.
220   EXPECT_NO_FATAL_FAILURE(CheckAllocationSizes<RepeatedField<bool>>());
221   EXPECT_NO_FATAL_FAILURE(CheckAllocationSizes<RepeatedField<uint32_t>>());
222   EXPECT_NO_FATAL_FAILURE(CheckAllocationSizes<RepeatedField<uint64_t>>());
223   EXPECT_NO_FATAL_FAILURE(CheckAllocationSizes<RepeatedField<absl::Cord>>());
224 }
225 
TEST(RepeatedField,NaturalGrowthOnArenasReuseBlocks)226 TEST(RepeatedField, NaturalGrowthOnArenasReuseBlocks) {
227   Arena arena;
228   std::vector<RepeatedField<int>*> values;
229 
230   static constexpr int kNumFields = 100;
231   static constexpr int kNumElems = 1000;
232   for (int i = 0; i < kNumFields; ++i) {
233     values.push_back(Arena::Create<RepeatedField<int>>(&arena));
234     auto& field = *values.back();
235     for (int j = 0; j < kNumElems; ++j) {
236       field.Add(j);
237     }
238   }
239 
240   size_t expected = values.size() * values[0]->Capacity() * sizeof(int);
241   // Use a 2% slack for other overhead. If we were not reusing the blocks, the
242   // actual value would be ~2x the expected.
243   EXPECT_THAT(arena.SpaceUsed(), AllOf(Ge(expected), Le(1.02 * expected)));
244 }
245 
246 // Test swapping between various types of RepeatedFields.
TEST(RepeatedField,SwapSmallSmall)247 TEST(RepeatedField, SwapSmallSmall) {
248   RepeatedField<int> field1;
249   RepeatedField<int> field2;
250 
251   field1.Add(5);
252   field1.Add(42);
253 
254   EXPECT_FALSE(field1.empty());
255   EXPECT_EQ(field1.size(), 2);
256   EXPECT_EQ(field1.Get(0), 5);
257   EXPECT_EQ(field1.Get(1), 42);
258 
259   EXPECT_TRUE(field2.empty());
260   EXPECT_EQ(field2.size(), 0);
261 
262   field1.Swap(&field2);
263 
264   EXPECT_TRUE(field1.empty());
265   EXPECT_EQ(field1.size(), 0);
266 
267   EXPECT_FALSE(field2.empty());
268   EXPECT_EQ(field2.size(), 2);
269   EXPECT_EQ(field2.Get(0), 5);
270   EXPECT_EQ(field2.Get(1), 42);
271 }
272 
TEST(RepeatedField,SwapLargeSmall)273 TEST(RepeatedField, SwapLargeSmall) {
274   RepeatedField<int> field1;
275   RepeatedField<int> field2;
276 
277   for (int i = 0; i < 16; i++) {
278     field1.Add(i * i);
279   }
280   field2.Add(5);
281   field2.Add(42);
282   field1.Swap(&field2);
283 
284   EXPECT_EQ(field1.size(), 2);
285   EXPECT_EQ(field1.Get(0), 5);
286   EXPECT_EQ(field1.Get(1), 42);
287   EXPECT_EQ(field2.size(), 16);
288   for (int i = 0; i < 16; i++) {
289     EXPECT_EQ(field2.Get(i), i * i);
290   }
291 }
292 
TEST(RepeatedField,SwapLargeLarge)293 TEST(RepeatedField, SwapLargeLarge) {
294   RepeatedField<int> field1;
295   RepeatedField<int> field2;
296 
297   field1.Add(5);
298   field1.Add(42);
299   for (int i = 0; i < 16; i++) {
300     field1.Add(i);
301     field2.Add(i * i);
302   }
303   field2.Swap(&field1);
304 
305   EXPECT_EQ(field1.size(), 16);
306   for (int i = 0; i < 16; i++) {
307     EXPECT_EQ(field1.Get(i), i * i);
308   }
309   EXPECT_EQ(field2.size(), 18);
310   EXPECT_EQ(field2.Get(0), 5);
311   EXPECT_EQ(field2.Get(1), 42);
312   for (int i = 2; i < 18; i++) {
313     EXPECT_EQ(field2.Get(i), i - 2);
314   }
315 }
316 
317 template <int kSize>
TestMemswap()318 void TestMemswap() {
319   SCOPED_TRACE(kSize);
320 
321   const auto a_char = [](int i) -> char { return (i % ('z' - 'a')) + 'a'; };
322   const auto b_char = [](int i) -> char { return (i % ('Z' - 'A')) + 'A'; };
323   std::string a, b;
324   for (int i = 0; i < kSize; ++i) {
325     a += a_char(i);
326     b += b_char(i);
327   }
328   // We will not swap these.
329   a += '+';
330   b += '-';
331 
332   std::string expected_a = b, expected_b = a;
333   expected_a.back() = '+';
334   expected_b.back() = '-';
335 
336   internal::memswap<kSize>(&a[0], &b[0]);
337 
338   // ODR use the functions in a way that forces the linker to keep them. That
339   // way we can see their generated code.
340   volatile auto odr_use_for_asm_dump = &internal::memswap<kSize>;
341   (void)odr_use_for_asm_dump;
342 
343   EXPECT_EQ(expected_a, a);
344   EXPECT_EQ(expected_b, b);
345 }
346 
TEST(Memswap,VerifyWithSmallAndLargeSizes)347 TEST(Memswap, VerifyWithSmallAndLargeSizes) {
348   // Arbitrary sizes
349   TestMemswap<0>();
350   TestMemswap<1>();
351   TestMemswap<10>();
352   TestMemswap<100>();
353   TestMemswap<1000>();
354   TestMemswap<10000>();
355   TestMemswap<100000>();
356   TestMemswap<1000000>();
357 
358   // Pointer aligned sizes
359   TestMemswap<sizeof(void*) * 1>();
360   TestMemswap<sizeof(void*) * 7>();
361   TestMemswap<sizeof(void*) * 17>();
362   TestMemswap<sizeof(void*) * 27>();
363 
364   // Test also just the block size and no leftover.
365   TestMemswap<64 * 1>();
366   TestMemswap<64 * 2>();
367   TestMemswap<64 * 3>();
368   TestMemswap<64 * 4>();
369 }
370 
371 // Determines how much space was reserved by the given field by adding elements
372 // to it until it re-allocates its space.
ReservedSpace(RepeatedField<int> * field)373 static int ReservedSpace(RepeatedField<int>* field) {
374   const int* ptr = field->data();
375   do {
376     field->Add(0);
377   } while (field->data() == ptr);
378 
379   return field->size() - 1;
380 }
381 
TEST(RepeatedField,ReserveMoreThanDouble)382 TEST(RepeatedField, ReserveMoreThanDouble) {
383   // Reserve more than double the previous space in the field and expect the
384   // field to reserve exactly the amount specified.
385   RepeatedField<int> field;
386   field.Reserve(20);
387 
388   EXPECT_LE(20, ReservedSpace(&field));
389 }
390 
TEST(RepeatedField,ReserveLessThanDouble)391 TEST(RepeatedField, ReserveLessThanDouble) {
392   // Reserve less than double the previous space in the field and expect the
393   // field to grow by double instead.
394   RepeatedField<int> field;
395   field.Reserve(20);
396   int capacity = field.Capacity();
397   field.Reserve(capacity * 1.5);
398 
399   EXPECT_LE(2 * capacity, ReservedSpace(&field));
400 }
401 
TEST(RepeatedField,ReserveLessThanExisting)402 TEST(RepeatedField, ReserveLessThanExisting) {
403   // Reserve less than the previous space in the field and expect the
404   // field to not re-allocate at all.
405   RepeatedField<int> field;
406   field.Reserve(20);
407   const int* previous_ptr = field.data();
408   field.Reserve(10);
409 
410   EXPECT_EQ(previous_ptr, field.data());
411   EXPECT_LE(20, ReservedSpace(&field));
412 }
413 
TEST(RepeatedField,Resize)414 TEST(RepeatedField, Resize) {
415   RepeatedField<int> field;
416   field.Resize(2, 1);
417   EXPECT_EQ(2, field.size());
418   field.Resize(5, 2);
419   EXPECT_EQ(5, field.size());
420   field.Resize(4, 3);
421   ASSERT_EQ(4, field.size());
422   EXPECT_EQ(1, field.Get(0));
423   EXPECT_EQ(1, field.Get(1));
424   EXPECT_EQ(2, field.Get(2));
425   EXPECT_EQ(2, field.Get(3));
426   field.Resize(0, 4);
427   EXPECT_TRUE(field.empty());
428 }
429 
TEST(RepeatedField,ReserveLowerClamp)430 TEST(RepeatedField, ReserveLowerClamp) {
431   int clamped_value = internal::CalculateReserveSize<bool, sizeof(void*)>(0, 1);
432   EXPECT_GE(clamped_value, sizeof(void*) / sizeof(bool));
433   EXPECT_EQ((internal::RepeatedFieldLowerClampLimit<bool, sizeof(void*)>()),
434             clamped_value);
435   // EXPECT_EQ(clamped_value, (internal::CalculateReserveSize<bool,
436   // sizeof(void*)>( clamped_value, 2)));
437 
438   clamped_value = internal::CalculateReserveSize<int, sizeof(void*)>(0, 1);
439   EXPECT_GE(clamped_value, sizeof(void*) / sizeof(int));
440   EXPECT_EQ((internal::RepeatedFieldLowerClampLimit<int, sizeof(void*)>()),
441             clamped_value);
442   // EXPECT_EQ(clamped_value, (internal::CalculateReserveSize<int,
443   // sizeof(void*)>( clamped_value, 2)));
444 }
445 
TEST(RepeatedField,ReserveGrowth)446 TEST(RepeatedField, ReserveGrowth) {
447   // Make sure the field capacity doubles in size on repeated reservation.
448   for (int size = internal::RepeatedFieldLowerClampLimit<int, sizeof(void*)>(),
449            i = 0;
450        i < 4; ++i) {
451     int next =
452         sizeof(Arena*) >= sizeof(int)
453             ?
454             // for small enough elements, we double number of total bytes
455             ((2 * (size * sizeof(int) + sizeof(Arena*))) - sizeof(Arena*)) /
456                 sizeof(int)
457             :
458             // we just double the number of elements if too large size.
459             size * 2;
460     EXPECT_EQ(next, (internal::CalculateReserveSize<int, sizeof(void*)>(
461                         size, size + 1)));
462     size = next;
463   }
464 }
465 
TEST(RepeatedField,ReserveLarge)466 TEST(RepeatedField, ReserveLarge) {
467   const int old_size = 10;
468   // This is a size we won't get by doubling:
469   const int new_size = old_size * 3 + 1;
470 
471   // Reserving more than 2x current capacity should grow directly to that size.
472   EXPECT_EQ(new_size, (internal::CalculateReserveSize<int, sizeof(void*)>(
473                           old_size, new_size)));
474 }
475 
TEST(RepeatedField,ReserveHuge)476 TEST(RepeatedField, ReserveHuge) {
477 #if defined(PROTOBUF_ASAN) || defined(PROTOBUF_MSAN)
478   GTEST_SKIP() << "Disabled because sanitizer is active";
479 #endif
480   // Largest value that does not clamp to the large limit:
481   constexpr int non_clamping_limit =
482       (std::numeric_limits<int>::max() - sizeof(Arena*)) / 2;
483   ASSERT_LT(2 * non_clamping_limit, std::numeric_limits<int>::max());
484   EXPECT_LT((internal::CalculateReserveSize<int, sizeof(void*)>(
485                 non_clamping_limit, non_clamping_limit + 1)),
486             std::numeric_limits<int>::max());
487 
488   // Smallest size that *will* clamp to the upper limit:
489   constexpr int min_clamping_size = std::numeric_limits<int>::max() / 2 + 1;
490   EXPECT_EQ((internal::CalculateReserveSize<int, sizeof(void*)>(
491                 min_clamping_size, min_clamping_size + 1)),
492             std::numeric_limits<int>::max());
493 
494 #ifdef PROTOBUF_TEST_ALLOW_LARGE_ALLOC
495   // The rest of this test may allocate several GB of memory, so it is only
496   // built if explicitly requested.
497   RepeatedField<int> huge_field;
498 
499   // Reserve a size for huge_field that will clamp.
500   huge_field.Reserve(min_clamping_size);
501   EXPECT_GE(huge_field.Capacity(), min_clamping_size);
502   ASSERT_LT(huge_field.Capacity(), std::numeric_limits<int>::max() - 1);
503 
504   // The array containing all the fields is, in theory, up to MAXINT-1 in size.
505   // However, some compilers can't handle a struct whose size is larger
506   // than 2GB, and the protocol buffer format doesn't handle more than 2GB of
507   // data at once, either.  So we limit it, but the code below accesses beyond
508   // that limit.
509 
510   // Allocation may return more memory than we requested. However, the updated
511   // size must still be clamped to a valid range.
512   huge_field.Reserve(huge_field.Capacity() + 1);
513   EXPECT_EQ(huge_field.Capacity(), std::numeric_limits<int>::max());
514 #endif  // PROTOBUF_TEST_ALLOW_LARGE_ALLOC
515 }
516 
TEST(RepeatedField,MergeFrom)517 TEST(RepeatedField, MergeFrom) {
518   RepeatedField<int> source, destination;
519   source.Add(4);
520   source.Add(5);
521   destination.Add(1);
522   destination.Add(2);
523   destination.Add(3);
524 
525   destination.MergeFrom(source);
526 
527   ASSERT_EQ(5, destination.size());
528   EXPECT_EQ(1, destination.Get(0));
529   EXPECT_EQ(2, destination.Get(1));
530   EXPECT_EQ(3, destination.Get(2));
531   EXPECT_EQ(4, destination.Get(3));
532   EXPECT_EQ(5, destination.Get(4));
533 }
534 
535 
TEST(RepeatedField,CopyFrom)536 TEST(RepeatedField, CopyFrom) {
537   RepeatedField<int> source, destination;
538   source.Add(4);
539   source.Add(5);
540   destination.Add(1);
541   destination.Add(2);
542   destination.Add(3);
543 
544   destination.CopyFrom(source);
545 
546   ASSERT_EQ(2, destination.size());
547   EXPECT_EQ(4, destination.Get(0));
548   EXPECT_EQ(5, destination.Get(1));
549 }
550 
TEST(RepeatedField,CopyFromSelf)551 TEST(RepeatedField, CopyFromSelf) {
552   RepeatedField<int> me;
553   me.Add(3);
554   me.CopyFrom(me);
555   ASSERT_EQ(1, me.size());
556   EXPECT_EQ(3, me.Get(0));
557 }
558 
TEST(RepeatedField,Erase)559 TEST(RepeatedField, Erase) {
560   RepeatedField<int> me;
561   RepeatedField<int>::iterator it = me.erase(me.begin(), me.end());
562   EXPECT_TRUE(me.begin() == it);
563   EXPECT_EQ(0, me.size());
564 
565   me.Add(1);
566   me.Add(2);
567   me.Add(3);
568   it = me.erase(me.begin(), me.end());
569   EXPECT_TRUE(me.begin() == it);
570   EXPECT_EQ(0, me.size());
571 
572   me.Add(4);
573   me.Add(5);
574   me.Add(6);
575   it = me.erase(me.begin() + 2, me.end());
576   EXPECT_TRUE(me.begin() + 2 == it);
577   EXPECT_EQ(2, me.size());
578   EXPECT_EQ(4, me.Get(0));
579   EXPECT_EQ(5, me.Get(1));
580 
581   me.Add(6);
582   me.Add(7);
583   me.Add(8);
584   it = me.erase(me.begin() + 1, me.begin() + 3);
585   EXPECT_TRUE(me.begin() + 1 == it);
586   EXPECT_EQ(3, me.size());
587   EXPECT_EQ(4, me.Get(0));
588   EXPECT_EQ(7, me.Get(1));
589   EXPECT_EQ(8, me.Get(2));
590 }
591 
592 // Add contents of empty container to an empty field.
TEST(RepeatedField,AddRange1)593 TEST(RepeatedField, AddRange1) {
594   RepeatedField<int> me;
595   std::vector<int> values;
596 
597   me.Add(values.begin(), values.end());
598   ASSERT_EQ(me.size(), 0);
599 }
600 
601 // Add contents of container with one thing to an empty field.
TEST(RepeatedField,AddRange2)602 TEST(RepeatedField, AddRange2) {
603   RepeatedField<int> me;
604   std::vector<int> values;
605   values.push_back(-1);
606 
607   me.Add(values.begin(), values.end());
608   ASSERT_EQ(me.size(), 1);
609   ASSERT_EQ(me.Get(0), values[0]);
610 }
611 
612 // Add contents of container with more than one thing to an empty field.
TEST(RepeatedField,AddRange3)613 TEST(RepeatedField, AddRange3) {
614   RepeatedField<int> me;
615   std::vector<int> values;
616   values.push_back(0);
617   values.push_back(1);
618 
619   me.Add(values.begin(), values.end());
620   ASSERT_EQ(me.size(), 2);
621   ASSERT_EQ(me.Get(0), values[0]);
622   ASSERT_EQ(me.Get(1), values[1]);
623 }
624 
625 // Add contents of container with more than one thing to a non-empty field.
TEST(RepeatedField,AddRange4)626 TEST(RepeatedField, AddRange4) {
627   RepeatedField<int> me;
628   me.Add(0);
629   me.Add(1);
630 
631   std::vector<int> values;
632   values.push_back(2);
633   values.push_back(3);
634 
635   me.Add(values.begin(), values.end());
636   ASSERT_EQ(me.size(), 4);
637   ASSERT_EQ(me.Get(0), 0);
638   ASSERT_EQ(me.Get(1), 1);
639   ASSERT_EQ(me.Get(2), values[0]);
640   ASSERT_EQ(me.Get(3), values[1]);
641 }
642 
643 // Add contents of a stringstream in order to test code paths where there is
644 // an input iterator.
TEST(RepeatedField,AddRange5)645 TEST(RepeatedField, AddRange5) {
646   RepeatedField<int> me;
647   me.Add(0);
648 
649   std::stringstream ss;
650   ss << 1 << ' ' << 2;
651 
652   me.Add(std::istream_iterator<int>(ss), std::istream_iterator<int>());
653   ASSERT_EQ(me.size(), 3);
654   ASSERT_EQ(me.Get(0), 0);
655   ASSERT_EQ(me.Get(1), 1);
656   ASSERT_EQ(me.Get(2), 2);
657 }
658 
659 // Add contents of container with a quirky iterator like std::vector<bool>
TEST(RepeatedField,AddRange6)660 TEST(RepeatedField, AddRange6) {
661   RepeatedField<bool> me;
662   me.Add(true);
663   me.Add(false);
664 
665   std::vector<bool> values;
666   values.push_back(true);
667   values.push_back(true);
668   values.push_back(false);
669 
670   me.Add(values.begin(), values.end());
671   ASSERT_EQ(me.size(), 5);
672   ASSERT_EQ(me.Get(0), true);
673   ASSERT_EQ(me.Get(1), false);
674   ASSERT_EQ(me.Get(2), true);
675   ASSERT_EQ(me.Get(3), true);
676   ASSERT_EQ(me.Get(4), false);
677 }
678 
679 // Add contents of absl::Span which evaluates to const T on access.
TEST(RepeatedField,AddRange7)680 TEST(RepeatedField, AddRange7) {
681   int ints[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
682   absl::Span<const int> span(ints);
683   auto p = span.begin();
684   static_assert(std::is_convertible<decltype(p), const int*>::value, "");
685   RepeatedField<int> me;
686   me.Add(span.begin(), span.end());
687 
688   ASSERT_EQ(me.size(), 10);
689   for (int i = 0; i < 10; ++i) {
690     ASSERT_EQ(me.Get(i), i);
691   }
692 }
693 
TEST(RepeatedField,AddAndAssignRanges)694 TEST(RepeatedField, AddAndAssignRanges) {
695   RepeatedField<int> field;
696 
697   int vals[] = {2, 27, 2875, 609250};
698   field.Assign(std::begin(vals), std::end(vals));
699 
700   ASSERT_EQ(field.size(), 4);
701   EXPECT_EQ(field.Get(0), 2);
702   EXPECT_EQ(field.Get(1), 27);
703   EXPECT_EQ(field.Get(2), 2875);
704   EXPECT_EQ(field.Get(3), 609250);
705 
706   field.Add(std::begin(vals), std::end(vals));
707   ASSERT_EQ(field.size(), 8);
708   EXPECT_EQ(field.Get(0), 2);
709   EXPECT_EQ(field.Get(1), 27);
710   EXPECT_EQ(field.Get(2), 2875);
711   EXPECT_EQ(field.Get(3), 609250);
712   EXPECT_EQ(field.Get(4), 2);
713   EXPECT_EQ(field.Get(5), 27);
714   EXPECT_EQ(field.Get(6), 2875);
715   EXPECT_EQ(field.Get(7), 609250);
716 }
717 
TEST(RepeatedField,CopyConstructIntegers)718 TEST(RepeatedField, CopyConstructIntegers) {
719   auto token = internal::InternalVisibilityForTesting{};
720   using RepeatedType = RepeatedField<int>;
721   RepeatedType original;
722   original.Add(1);
723   original.Add(2);
724 
725   RepeatedType fields1(original);
726   ASSERT_EQ(2, fields1.size());
727   EXPECT_EQ(1, fields1.Get(0));
728   EXPECT_EQ(2, fields1.Get(1));
729 
730   RepeatedType fields2(token, nullptr, original);
731   ASSERT_EQ(2, fields2.size());
732   EXPECT_EQ(1, fields2.Get(0));
733   EXPECT_EQ(2, fields2.Get(1));
734 }
735 
TEST(RepeatedField,CopyConstructCords)736 TEST(RepeatedField, CopyConstructCords) {
737   auto token = internal::InternalVisibilityForTesting{};
738   using RepeatedType = RepeatedField<absl::Cord>;
739   RepeatedType original;
740   original.Add(absl::Cord("hello"));
741   original.Add(absl::Cord("world and text to avoid SSO"));
742 
743   RepeatedType fields1(original);
744   ASSERT_EQ(2, fields1.size());
745   EXPECT_EQ("hello", fields1.Get(0));
746   EXPECT_EQ("world and text to avoid SSO", fields1.Get(1));
747 
748   RepeatedType fields2(token, nullptr, original);
749   ASSERT_EQ(2, fields1.size());
750   EXPECT_EQ("hello", fields1.Get(0));
751   EXPECT_EQ("world and text to avoid SSO", fields2.Get(1));
752 }
753 
TEST(RepeatedField,CopyConstructIntegersWithArena)754 TEST(RepeatedField, CopyConstructIntegersWithArena) {
755   auto token = internal::InternalVisibilityForTesting{};
756   using RepeatedType = RepeatedField<int>;
757   RepeatedType original;
758   original.Add(1);
759   original.Add(2);
760 
761   Arena arena;
762   alignas(RepeatedType) char mem[sizeof(RepeatedType)];
763   RepeatedType& fields1 = *new (mem) RepeatedType(token, &arena, original);
764   ASSERT_EQ(2, fields1.size());
765   EXPECT_EQ(1, fields1.Get(0));
766   EXPECT_EQ(2, fields1.Get(1));
767 }
768 
TEST(RepeatedField,CopyConstructCordsWithArena)769 TEST(RepeatedField, CopyConstructCordsWithArena) {
770   auto token = internal::InternalVisibilityForTesting{};
771   using RepeatedType = RepeatedField<absl::Cord>;
772   RepeatedType original;
773   original.Add(absl::Cord("hello"));
774   original.Add(absl::Cord("world and text to avoid SSO"));
775 
776   Arena arena;
777   alignas(RepeatedType) char mem[sizeof(RepeatedType)];
778   RepeatedType& fields1 = *new (mem) RepeatedType(token, &arena, original);
779   ASSERT_EQ(2, fields1.size());
780   EXPECT_EQ("hello", fields1.Get(0));
781   EXPECT_EQ("world and text to avoid SSO", fields1.Get(1));
782 
783   // Contract requires dtor to be invoked for absl::Cord
784   fields1.~RepeatedType();
785 }
786 
TEST(RepeatedField,IteratorConstruct)787 TEST(RepeatedField, IteratorConstruct) {
788   std::vector<int> values;
789   RepeatedField<int> empty(values.begin(), values.end());
790   ASSERT_EQ(values.size(), empty.size());
791 
792   values.push_back(1);
793   values.push_back(2);
794 
795   RepeatedField<int> field(values.begin(), values.end());
796   ASSERT_EQ(values.size(), field.size());
797   EXPECT_EQ(values[0], field.Get(0));
798   EXPECT_EQ(values[1], field.Get(1));
799 
800   RepeatedField<int> other(field.begin(), field.end());
801   ASSERT_EQ(values.size(), other.size());
802   EXPECT_EQ(values[0], other.Get(0));
803   EXPECT_EQ(values[1], other.Get(1));
804 }
805 
TEST(RepeatedField,CopyAssign)806 TEST(RepeatedField, CopyAssign) {
807   RepeatedField<int> source, destination;
808   source.Add(4);
809   source.Add(5);
810   destination.Add(1);
811   destination.Add(2);
812   destination.Add(3);
813 
814   destination = source;
815 
816   ASSERT_EQ(2, destination.size());
817   EXPECT_EQ(4, destination.Get(0));
818   EXPECT_EQ(5, destination.Get(1));
819 }
820 
TEST(RepeatedField,SelfAssign)821 TEST(RepeatedField, SelfAssign) {
822   // Verify that assignment to self does not destroy data.
823   RepeatedField<int> source, *p;
824   p = &source;
825   source.Add(7);
826   source.Add(8);
827 
828   *p = source;
829 
830   ASSERT_EQ(2, source.size());
831   EXPECT_EQ(7, source.Get(0));
832   EXPECT_EQ(8, source.Get(1));
833 }
834 
TEST(RepeatedField,MoveConstruct)835 TEST(RepeatedField, MoveConstruct) {
836   {
837     RepeatedField<int> source;
838     source.Add(1);
839     source.Add(2);
840     RepeatedField<int> destination = std::move(source);
841     EXPECT_THAT(destination, ElementsAre(1, 2));
842     // This property isn't guaranteed but it's useful to have a test that would
843     // catch changes in this area.
844     EXPECT_TRUE(source.empty());
845   }
846   {
847     Arena arena;
848     RepeatedField<int>* source = Arena::Create<RepeatedField<int>>(&arena);
849     source->Add(1);
850     source->Add(2);
851     RepeatedField<int> destination = std::move(*source);
852     EXPECT_EQ(nullptr, destination.GetArena());
853     EXPECT_THAT(destination, ElementsAre(1, 2));
854     // This property isn't guaranteed but it's useful to have a test that would
855     // catch changes in this area.
856     EXPECT_THAT(*source, ElementsAre(1, 2));
857   }
858 }
859 
TEST(RepeatedField,MoveAssign)860 TEST(RepeatedField, MoveAssign) {
861   {
862     RepeatedField<int> source;
863     source.Add(1);
864     source.Add(2);
865     RepeatedField<int> destination;
866     destination.Add(3);
867     destination = std::move(source);
868     EXPECT_THAT(destination, ElementsAre(1, 2));
869     EXPECT_THAT(source, ElementsAre(3));
870   }
871   {
872     Arena arena;
873     RepeatedField<int>* source = Arena::Create<RepeatedField<int>>(&arena);
874     source->Add(1);
875     source->Add(2);
876     RepeatedField<int>* destination = Arena::Create<RepeatedField<int>>(&arena);
877     destination->Add(3);
878     *destination = std::move(*source);
879     EXPECT_THAT(*destination, ElementsAre(1, 2));
880     EXPECT_THAT(*source, ElementsAre(3));
881   }
882   {
883     Arena source_arena;
884     RepeatedField<int>* source =
885         Arena::Create<RepeatedField<int>>(&source_arena);
886     source->Add(1);
887     source->Add(2);
888     Arena destination_arena;
889     RepeatedField<int>* destination =
890         Arena::Create<RepeatedField<int>>(&destination_arena);
891     destination->Add(3);
892     *destination = std::move(*source);
893     EXPECT_THAT(*destination, ElementsAre(1, 2));
894     // This property isn't guaranteed but it's useful to have a test that would
895     // catch changes in this area.
896     EXPECT_THAT(*source, ElementsAre(1, 2));
897   }
898   {
899     Arena arena;
900     RepeatedField<int>* source = Arena::Create<RepeatedField<int>>(&arena);
901     source->Add(1);
902     source->Add(2);
903     RepeatedField<int> destination;
904     destination.Add(3);
905     destination = std::move(*source);
906     EXPECT_THAT(destination, ElementsAre(1, 2));
907     // This property isn't guaranteed but it's useful to have a test that would
908     // catch changes in this area.
909     EXPECT_THAT(*source, ElementsAre(1, 2));
910   }
911   {
912     RepeatedField<int> source;
913     source.Add(1);
914     source.Add(2);
915     Arena arena;
916     RepeatedField<int>* destination = Arena::Create<RepeatedField<int>>(&arena);
917     destination->Add(3);
918     *destination = std::move(source);
919     EXPECT_THAT(*destination, ElementsAre(1, 2));
920     // This property isn't guaranteed but it's useful to have a test that would
921     // catch changes in this area.
922     EXPECT_THAT(source, ElementsAre(1, 2));
923   }
924   {
925     RepeatedField<int> field;
926     // An alias to defeat -Wself-move.
927     RepeatedField<int>& alias = field;
928     field.Add(1);
929     field.Add(2);
930     field = std::move(alias);
931     EXPECT_THAT(field, ElementsAre(1, 2));
932   }
933   {
934     Arena arena;
935     RepeatedField<int>* field = Arena::Create<RepeatedField<int>>(&arena);
936     field->Add(1);
937     field->Add(2);
938     *field = std::move(*field);
939     EXPECT_THAT(*field, ElementsAre(1, 2));
940   }
941 }
942 
TEST(Movable,Works)943 TEST(Movable, Works) {
944   class NonMoveConstructible {
945    public:
946     NonMoveConstructible(NonMoveConstructible&&) = delete;
947     NonMoveConstructible& operator=(NonMoveConstructible&&) { return *this; }
948   };
949   class NonMoveAssignable {
950    public:
951     NonMoveAssignable(NonMoveAssignable&&) {}
952     NonMoveAssignable& operator=(NonMoveConstructible&&) = delete;
953   };
954   class NonMovable {
955    public:
956     NonMovable(NonMovable&&) = delete;
957     NonMovable& operator=(NonMovable&&) = delete;
958   };
959 
960   EXPECT_TRUE(internal::IsMovable<std::string>::value);
961 
962   EXPECT_FALSE(std::is_move_constructible<NonMoveConstructible>::value);
963   EXPECT_TRUE(std::is_move_assignable<NonMoveConstructible>::value);
964   EXPECT_FALSE(internal::IsMovable<NonMoveConstructible>::value);
965 
966   EXPECT_TRUE(std::is_move_constructible<NonMoveAssignable>::value);
967   EXPECT_FALSE(std::is_move_assignable<NonMoveAssignable>::value);
968   EXPECT_FALSE(internal::IsMovable<NonMoveAssignable>::value);
969 
970   EXPECT_FALSE(internal::IsMovable<NonMovable>::value);
971 }
972 
TEST(RepeatedField,MutableDataIsMutable)973 TEST(RepeatedField, MutableDataIsMutable) {
974   RepeatedField<int> field;
975   field.Add(1);
976   EXPECT_EQ(1, field.Get(0));
977   // The fact that this line compiles would be enough, but we'll check the
978   // value anyway.
979   *field.mutable_data() = 2;
980   EXPECT_EQ(2, field.Get(0));
981 }
982 
TEST(RepeatedField,SubscriptOperators)983 TEST(RepeatedField, SubscriptOperators) {
984   RepeatedField<int> field;
985   field.Add(1);
986   EXPECT_EQ(1, field.Get(0));
987   EXPECT_EQ(1, field[0]);
988   EXPECT_EQ(field.Mutable(0), &field[0]);
989   const RepeatedField<int>& const_field = field;
990   EXPECT_EQ(field.data(), &const_field[0]);
991 }
992 
TEST(RepeatedField,Truncate)993 TEST(RepeatedField, Truncate) {
994   RepeatedField<int> field;
995 
996   field.Add(12);
997   field.Add(34);
998   field.Add(56);
999   field.Add(78);
1000   EXPECT_EQ(4, field.size());
1001 
1002   field.Truncate(3);
1003   EXPECT_EQ(3, field.size());
1004 
1005   field.Add(90);
1006   EXPECT_EQ(4, field.size());
1007   EXPECT_EQ(90, field.Get(3));
1008 
1009   // Truncations that don't change the size are allowed, but growing is not
1010   // allowed.
1011   field.Truncate(field.size());
1012 #if GTEST_HAS_DEATH_TEST
1013   EXPECT_DEBUG_DEATH(field.Truncate(field.size() + 1), "new_size");
1014 #endif
1015 }
1016 
TEST(RepeatedCordField,AddRemoveLast)1017 TEST(RepeatedCordField, AddRemoveLast) {
1018   RepeatedField<absl::Cord> field;
1019   field.Add(absl::Cord("foo"));
1020   field.RemoveLast();
1021 }
1022 
TEST(RepeatedCordField,AddClear)1023 TEST(RepeatedCordField, AddClear) {
1024   RepeatedField<absl::Cord> field;
1025   field.Add(absl::Cord("foo"));
1026   field.Clear();
1027 }
1028 
TEST(RepeatedCordField,Resize)1029 TEST(RepeatedCordField, Resize) {
1030   RepeatedField<absl::Cord> field;
1031   field.Resize(10, absl::Cord("foo"));
1032 }
1033 
TEST(RepeatedField,Cords)1034 TEST(RepeatedField, Cords) {
1035   RepeatedField<absl::Cord> field;
1036 
1037   field.Add(absl::Cord("foo"));
1038   field.Add(absl::Cord("bar"));
1039   field.Add(absl::Cord("baz"));
1040   field.Add(absl::Cord("moo"));
1041   field.Add(absl::Cord("corge"));
1042 
1043   EXPECT_EQ("foo", std::string(field.Get(0)));
1044   EXPECT_EQ("corge", std::string(field.Get(4)));
1045 
1046   // Test swap.  Note:  One of the swapped objects is using internal storage,
1047   //   the other is not.
1048   RepeatedField<absl::Cord> field2;
1049   field2.Add(absl::Cord("grault"));
1050   field.Swap(&field2);
1051   EXPECT_EQ(1, field.size());
1052   EXPECT_EQ("grault", std::string(field.Get(0)));
1053   EXPECT_EQ(5, field2.size());
1054   EXPECT_EQ("foo", std::string(field2.Get(0)));
1055   EXPECT_EQ("corge", std::string(field2.Get(4)));
1056 
1057   // Test SwapElements().
1058   field2.SwapElements(1, 3);
1059   EXPECT_EQ("moo", std::string(field2.Get(1)));
1060   EXPECT_EQ("bar", std::string(field2.Get(3)));
1061 
1062   // Make sure cords are cleared correctly.
1063   field2.RemoveLast();
1064   EXPECT_TRUE(field2.Add()->empty());
1065   field2.Clear();
1066   EXPECT_TRUE(field2.Add()->empty());
1067 }
1068 
TEST(RepeatedField,TruncateCords)1069 TEST(RepeatedField, TruncateCords) {
1070   RepeatedField<absl::Cord> field;
1071 
1072   field.Add(absl::Cord("foo"));
1073   field.Add(absl::Cord("bar"));
1074   field.Add(absl::Cord("baz"));
1075   field.Add(absl::Cord("moo"));
1076   EXPECT_EQ(4, field.size());
1077 
1078   field.Truncate(3);
1079   EXPECT_EQ(3, field.size());
1080 
1081   field.Add(absl::Cord("corge"));
1082   EXPECT_EQ(4, field.size());
1083   EXPECT_EQ("corge", std::string(field.Get(3)));
1084 
1085   // Truncating to the current size should be fine (no-op), but truncating
1086   // to a larger size should crash.
1087   field.Truncate(field.size());
1088 #if defined(GTEST_HAS_DEATH_TEST) && !defined(NDEBUG)
1089   EXPECT_DEATH(field.Truncate(field.size() + 1), "new_size");
1090 #endif
1091 }
1092 
TEST(RepeatedField,ResizeCords)1093 TEST(RepeatedField, ResizeCords) {
1094   RepeatedField<absl::Cord> field;
1095   field.Resize(2, absl::Cord("foo"));
1096   EXPECT_EQ(2, field.size());
1097   field.Resize(5, absl::Cord("bar"));
1098   EXPECT_EQ(5, field.size());
1099   field.Resize(4, absl::Cord("baz"));
1100   ASSERT_EQ(4, field.size());
1101   EXPECT_EQ("foo", std::string(field.Get(0)));
1102   EXPECT_EQ("foo", std::string(field.Get(1)));
1103   EXPECT_EQ("bar", std::string(field.Get(2)));
1104   EXPECT_EQ("bar", std::string(field.Get(3)));
1105   field.Resize(0, absl::Cord("moo"));
1106   EXPECT_TRUE(field.empty());
1107 }
1108 
TEST(RepeatedField,ExtractSubrange)1109 TEST(RepeatedField, ExtractSubrange) {
1110   // Exhaustively test every subrange in arrays of all sizes from 0 through 9.
1111   for (int sz = 0; sz < 10; ++sz) {
1112     for (int num = 0; num <= sz; ++num) {
1113       for (int start = 0; start < sz - num; ++start) {
1114         // Create RepeatedField with sz elements having values 0 through sz-1.
1115         RepeatedField<int32_t> field;
1116         for (int i = 0; i < sz; ++i) field.Add(i);
1117         EXPECT_EQ(field.size(), sz);
1118 
1119         // Create a catcher array and call ExtractSubrange.
1120         int32_t catcher[10];
1121         for (int i = 0; i < 10; ++i) catcher[i] = -1;
1122         field.ExtractSubrange(start, num, catcher);
1123 
1124         // Does the resulting array have the right size?
1125         EXPECT_EQ(field.size(), sz - num);
1126 
1127         // Were the removed elements extracted into the catcher array?
1128         for (int i = 0; i < num; ++i) EXPECT_EQ(catcher[i], start + i);
1129         EXPECT_EQ(catcher[num], -1);
1130 
1131         // Does the resulting array contain the right values?
1132         for (int i = 0; i < start; ++i) EXPECT_EQ(field.Get(i), i);
1133         for (int i = start; i < field.size(); ++i)
1134           EXPECT_EQ(field.Get(i), i + num);
1135       }
1136     }
1137   }
1138 }
1139 
TEST(RepeatedField,TestSAddFromSelf)1140 TEST(RepeatedField, TestSAddFromSelf) {
1141   RepeatedField<int> field;
1142   field.Add(0);
1143   for (int i = 0; i < 1000; i++) {
1144     field.Add(field[0]);
1145   }
1146 }
1147 
1148 // We have, or at least had bad callers that never triggered our DCHECKS
1149 // Here we check we DO fail on bad Truncate calls under debug, and do nothing
1150 // under opt compiles.
TEST(RepeatedField,HardenAgainstBadTruncate)1151 TEST(RepeatedField, HardenAgainstBadTruncate) {
1152   RepeatedField<int> field;
1153   for (int size = 0; size < 10; ++size) {
1154     field.Truncate(size);
1155 #if GTEST_HAS_DEATH_TEST
1156     EXPECT_DEBUG_DEATH(field.Truncate(size + 1), "new_size <= old_size");
1157     EXPECT_DEBUG_DEATH(field.Truncate(size + 2), "new_size <= old_size");
1158 #elif defined(NDEBUG)
1159     field.Truncate(size + 1);
1160     field.Truncate(size + 1);
1161 #endif
1162     EXPECT_EQ(field.size(), size);
1163     field.Add(1);
1164   }
1165 }
1166 
1167 #if defined(GTEST_HAS_DEATH_TEST) && \
1168     (defined(PROTOBUF_ASAN) || defined(PROTOBUF_MSAN))
1169 
1170 // This function verifies that the code dies under ASAN or MSAN trying to both
1171 // read and write the reserved element directly beyond the last element.
VerifyDeathOnWriteAndReadAccessBeyondEnd(RepeatedField<int64_t> & field)1172 void VerifyDeathOnWriteAndReadAccessBeyondEnd(RepeatedField<int64_t>& field) {
1173   auto* end = field.Mutable(field.size() - 1) + 1;
1174 #if defined(PROTOBUF_ASAN)
1175   EXPECT_DEATH(*end = 1, "container-overflow");
1176   EXPECT_DEATH(EXPECT_NE(*end, 1), "container-overflow");
1177 #elif defined(PROTOBUF_MSAN)
1178   EXPECT_DEATH(EXPECT_NE(*end, 1), "use-of-uninitialized-value");
1179 #endif
1180 
1181   // Confirm we died a death of *SAN
1182   EXPECT_EQ(field.AddAlreadyReserved(), end);
1183   *end = 1;
1184   EXPECT_EQ(*end, 1);
1185 }
1186 
TEST(RepeatedField,PoisonsMemoryOnAdd)1187 TEST(RepeatedField, PoisonsMemoryOnAdd) {
1188   RepeatedField<int64_t> field;
1189   do {
1190     field.Add(0);
1191   } while (field.size() == field.Capacity());
1192   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1193 }
1194 
TEST(RepeatedField,PoisonsMemoryOnAddAlreadyReserved)1195 TEST(RepeatedField, PoisonsMemoryOnAddAlreadyReserved) {
1196   RepeatedField<int64_t> field;
1197   field.Reserve(2);
1198   field.AddAlreadyReserved();
1199   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1200 }
1201 
TEST(RepeatedField,PoisonsMemoryOnAddNAlreadyReserved)1202 TEST(RepeatedField, PoisonsMemoryOnAddNAlreadyReserved) {
1203   RepeatedField<int64_t> field;
1204   field.Reserve(10);
1205   field.AddNAlreadyReserved(8);
1206   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1207 }
1208 
TEST(RepeatedField,PoisonsMemoryOnResize)1209 TEST(RepeatedField, PoisonsMemoryOnResize) {
1210   RepeatedField<int64_t> field;
1211   field.Add(0);
1212   do {
1213     field.Resize(field.size() + 1, 1);
1214   } while (field.size() == field.Capacity());
1215   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1216 
1217   // Shrink size
1218   field.Resize(field.size() - 1, 1);
1219   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1220 }
1221 
TEST(RepeatedField,PoisonsMemoryOnTruncate)1222 TEST(RepeatedField, PoisonsMemoryOnTruncate) {
1223   RepeatedField<int64_t> field;
1224   field.Add(0);
1225   field.Add(1);
1226   field.Truncate(1);
1227   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1228 }
1229 
TEST(RepeatedField,PoisonsMemoryOnReserve)1230 TEST(RepeatedField, PoisonsMemoryOnReserve) {
1231   RepeatedField<int64_t> field;
1232   field.Add(1);
1233   field.Reserve(field.Capacity() + 1);
1234   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1235 }
1236 
TEST(RepeatedField,PoisonsMemoryOnAssign)1237 TEST(RepeatedField, PoisonsMemoryOnAssign) {
1238   RepeatedField<int64_t> src;
1239   RepeatedField<int64_t> field;
1240   src.Add(1);
1241   src.Add(2);
1242   field.Reserve(3);
1243   field = src;
1244   VerifyDeathOnWriteAndReadAccessBeyondEnd(field);
1245 }
1246 
1247 #endif
1248 
TEST(RepeatedField,Cleanups)1249 TEST(RepeatedField, Cleanups) {
1250   Arena arena;
1251   auto growth = internal::CleanupGrowth(
1252       arena, [&] { Arena::Create<RepeatedField<int>>(&arena); });
1253   EXPECT_THAT(growth.cleanups, testing::IsEmpty());
1254 
1255   void* ptr;
1256   growth = internal::CleanupGrowth(
1257       arena, [&] { ptr = Arena::Create<RepeatedField<absl::Cord>>(&arena); });
1258   EXPECT_THAT(growth.cleanups, testing::UnorderedElementsAre(ptr));
1259 }
1260 
TEST(RepeatedField,InitialSooCapacity)1261 TEST(RepeatedField, InitialSooCapacity) {
1262   if (sizeof(void*) == 8) {
1263     EXPECT_EQ(RepeatedField<bool>().Capacity(), 3);
1264     EXPECT_EQ(RepeatedField<int32_t>().Capacity(), 2);
1265     EXPECT_EQ(RepeatedField<int64_t>().Capacity(), 1);
1266     EXPECT_EQ(RepeatedField<absl::Cord>().Capacity(), 0);
1267   } else {
1268     EXPECT_EQ(RepeatedField<bool>().Capacity(), 0);
1269     EXPECT_EQ(RepeatedField<int32_t>().Capacity(), 0);
1270     EXPECT_EQ(RepeatedField<int64_t>().Capacity(), 0);
1271     EXPECT_EQ(RepeatedField<absl::Cord>().Capacity(), 0);
1272   }
1273 }
1274 
1275 // ===================================================================
1276 
1277 // Iterator tests stolen from net/proto/proto-array_unittest.
1278 
1279 class RepeatedFieldIteratorTest : public testing::Test {
1280  protected:
SetUp()1281   void SetUp() override {
1282     for (int i = 0; i < 3; ++i) {
1283       proto_array_.Add(i);
1284     }
1285   }
1286 
1287   RepeatedField<int> proto_array_;
1288 };
1289 
TEST_F(RepeatedFieldIteratorTest,Convertible)1290 TEST_F(RepeatedFieldIteratorTest, Convertible) {
1291   RepeatedField<int>::iterator iter = proto_array_.begin();
1292   RepeatedField<int>::const_iterator c_iter = iter;
1293   RepeatedField<int>::value_type value = *c_iter;
1294   EXPECT_EQ(0, value);
1295 }
1296 
TEST_F(RepeatedFieldIteratorTest,MutableIteration)1297 TEST_F(RepeatedFieldIteratorTest, MutableIteration) {
1298   RepeatedField<int>::iterator iter = proto_array_.begin();
1299   EXPECT_EQ(0, *iter);
1300   ++iter;
1301   EXPECT_EQ(1, *iter++);
1302   EXPECT_EQ(2, *iter);
1303   ++iter;
1304   EXPECT_TRUE(proto_array_.end() == iter);
1305 
1306   EXPECT_EQ(2, *(proto_array_.end() - 1));
1307 }
1308 
TEST_F(RepeatedFieldIteratorTest,ConstIteration)1309 TEST_F(RepeatedFieldIteratorTest, ConstIteration) {
1310   const RepeatedField<int>& const_proto_array = proto_array_;
1311   RepeatedField<int>::const_iterator iter = const_proto_array.begin();
1312   EXPECT_EQ(0, *iter);
1313   ++iter;
1314   EXPECT_EQ(1, *iter++);
1315   EXPECT_EQ(2, *iter);
1316   ++iter;
1317   EXPECT_TRUE(const_proto_array.end() == iter);
1318   EXPECT_EQ(2, *(const_proto_array.end() - 1));
1319 }
1320 
TEST_F(RepeatedFieldIteratorTest,Mutation)1321 TEST_F(RepeatedFieldIteratorTest, Mutation) {
1322   RepeatedField<int>::iterator iter = proto_array_.begin();
1323   *iter = 7;
1324   EXPECT_EQ(7, proto_array_.Get(0));
1325 }
1326 
1327 // -----------------------------------------------------------------------------
1328 // Unit-tests for the insert iterators
1329 // `google::protobuf::RepeatedFieldBackInserter`,
1330 // `google::protobuf::AllocatedRepeatedFieldBackInserter`
1331 // Ported from util/gtl/proto-array-iterators_unittest.
1332 
1333 class RepeatedFieldInsertionIteratorsTest : public testing::Test {
1334  protected:
1335   std::list<double> halves;
1336   std::list<int> fibonacci;
1337   TestAllTypes protobuffer;
1338 
SetUp()1339   void SetUp() override {
1340     fibonacci.push_back(1);
1341     fibonacci.push_back(1);
1342     fibonacci.push_back(2);
1343     fibonacci.push_back(3);
1344     fibonacci.push_back(5);
1345     fibonacci.push_back(8);
1346     std::copy(fibonacci.begin(), fibonacci.end(),
1347               RepeatedFieldBackInserter(protobuffer.mutable_repeated_int32()));
1348 
1349     halves.push_back(1.0);
1350     halves.push_back(0.5);
1351     halves.push_back(0.25);
1352     halves.push_back(0.125);
1353     halves.push_back(0.0625);
1354     std::copy(halves.begin(), halves.end(),
1355               RepeatedFieldBackInserter(protobuffer.mutable_repeated_double()));
1356   }
1357 };
1358 
TEST_F(RepeatedFieldInsertionIteratorsTest,Fibonacci)1359 TEST_F(RepeatedFieldInsertionIteratorsTest, Fibonacci) {
1360   EXPECT_TRUE(std::equal(fibonacci.begin(), fibonacci.end(),
1361                          protobuffer.repeated_int32().begin()));
1362   EXPECT_TRUE(std::equal(protobuffer.repeated_int32().begin(),
1363                          protobuffer.repeated_int32().end(),
1364                          fibonacci.begin()));
1365 }
1366 
TEST_F(RepeatedFieldInsertionIteratorsTest,Halves)1367 TEST_F(RepeatedFieldInsertionIteratorsTest, Halves) {
1368   EXPECT_TRUE(std::equal(halves.begin(), halves.end(),
1369                          protobuffer.repeated_double().begin()));
1370   EXPECT_TRUE(std::equal(protobuffer.repeated_double().begin(),
1371                          protobuffer.repeated_double().end(), halves.begin()));
1372 }
1373 
1374 }  // namespace
1375 
1376 }  // namespace protobuf
1377 }  // namespace google
1378 
1379 #include "google/protobuf/port_undef.inc"
1380