• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "absl/container/flat_hash_map.h"
16 
17 #include <cstddef>
18 #include <memory>
19 #include <type_traits>
20 #include <utility>
21 #include <vector>
22 
23 #include "gtest/gtest.h"
24 #include "absl/container/internal/hash_generator_testing.h"
25 #include "absl/container/internal/unordered_map_constructor_test.h"
26 #include "absl/container/internal/unordered_map_lookup_test.h"
27 #include "absl/container/internal/unordered_map_members_test.h"
28 #include "absl/container/internal/unordered_map_modifiers_test.h"
29 #include "absl/log/check.h"
30 #include "absl/meta/type_traits.h"
31 #include "absl/types/any.h"
32 
33 namespace absl {
34 ABSL_NAMESPACE_BEGIN
35 namespace container_internal {
36 namespace {
37 using ::absl::container_internal::hash_internal::Enum;
38 using ::absl::container_internal::hash_internal::EnumClass;
39 using ::testing::_;
40 using ::testing::IsEmpty;
41 using ::testing::Pair;
42 using ::testing::UnorderedElementsAre;
43 
44 // Check that absl::flat_hash_map works in a global constructor.
45 struct BeforeMain {
BeforeMainabsl::container_internal::__anon046c53a40111::BeforeMain46   BeforeMain() {
47     absl::flat_hash_map<int, int> x;
48     x.insert({1, 1});
49     CHECK(x.find(0) == x.end()) << "x should not contain 0";
50     auto it = x.find(1);
51     CHECK(it != x.end()) << "x should contain 1";
52     CHECK(it->second) << "1 should map to 1";
53   }
54 };
55 const BeforeMain before_main;
56 
57 template <class K, class V>
58 using Map = flat_hash_map<K, V, StatefulTestingHash, StatefulTestingEqual,
59                           Alloc<std::pair<const K, V>>>;
60 
61 static_assert(!std::is_standard_layout<NonStandardLayout>(), "");
62 
63 using MapTypes =
64     ::testing::Types<Map<int, int>, Map<std::string, int>,
65                      Map<Enum, std::string>, Map<EnumClass, int>,
66                      Map<int, NonStandardLayout>, Map<NonStandardLayout, int>>;
67 
68 INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, ConstructorTest, MapTypes);
69 INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, LookupTest, MapTypes);
70 INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, MembersTest, MapTypes);
71 INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, ModifiersTest, MapTypes);
72 
73 using UniquePtrMapTypes = ::testing::Types<Map<int, std::unique_ptr<int>>>;
74 
75 INSTANTIATE_TYPED_TEST_SUITE_P(FlatHashMap, UniquePtrModifiersTest,
76                                UniquePtrMapTypes);
77 
TEST(FlatHashMap,StandardLayout)78 TEST(FlatHashMap, StandardLayout) {
79   struct Int {
80     explicit Int(size_t value) : value(value) {}
81     Int() : value(0) { ADD_FAILURE(); }
82     Int(const Int& other) : value(other.value) { ADD_FAILURE(); }
83     Int(Int&&) = default;
84     bool operator==(const Int& other) const { return value == other.value; }
85     size_t value;
86   };
87   static_assert(std::is_standard_layout<Int>(), "");
88 
89   struct Hash {
90     size_t operator()(const Int& obj) const { return obj.value; }
91   };
92 
93   // Verify that neither the key nor the value get default-constructed or
94   // copy-constructed.
95   {
96     flat_hash_map<Int, Int, Hash> m;
97     m.try_emplace(Int(1), Int(2));
98     m.try_emplace(Int(3), Int(4));
99     m.erase(Int(1));
100     m.rehash(2 * m.bucket_count());
101   }
102   {
103     flat_hash_map<Int, Int, Hash> m;
104     m.try_emplace(Int(1), Int(2));
105     m.try_emplace(Int(3), Int(4));
106     m.erase(Int(1));
107     m.clear();
108   }
109 }
110 
TEST(FlatHashMap,Relocatability)111 TEST(FlatHashMap, Relocatability) {
112   static_assert(absl::is_trivially_relocatable<int>::value, "");
113   static_assert(
114       absl::is_trivially_relocatable<std::pair<const int, int>>::value, "");
115   static_assert(
116       std::is_same<decltype(absl::container_internal::FlatHashMapPolicy<
117                             int, int>::transfer<std::allocator<char>>(nullptr,
118                                                                       nullptr,
119                                                                       nullptr)),
120                    std::true_type>::value,
121       "");
122 
123     struct NonRelocatable {
124       NonRelocatable() = default;
125       NonRelocatable(NonRelocatable&&) {}
126       NonRelocatable& operator=(NonRelocatable&&) { return *this; }
127       void* self = nullptr;
128     };
129 
130   EXPECT_FALSE(absl::is_trivially_relocatable<NonRelocatable>::value);
131   EXPECT_TRUE(
132       (std::is_same<decltype(absl::container_internal::FlatHashMapPolicy<
133                             int, NonRelocatable>::
134                                 transfer<std::allocator<char>>(nullptr, nullptr,
135                                                                nullptr)),
136                    std::false_type>::value));
137 }
138 
139 // gcc becomes unhappy if this is inside the method, so pull it out here.
140 struct balast {};
141 
TEST(FlatHashMap,IteratesMsan)142 TEST(FlatHashMap, IteratesMsan) {
143   // Because SwissTable randomizes on pointer addresses, we keep old tables
144   // around to ensure we don't reuse old memory.
145   std::vector<absl::flat_hash_map<int, balast>> garbage;
146   for (int i = 0; i < 100; ++i) {
147     absl::flat_hash_map<int, balast> t;
148     for (int j = 0; j < 100; ++j) {
149       t[j];
150       for (const auto& p : t) EXPECT_THAT(p, Pair(_, _));
151     }
152     garbage.push_back(std::move(t));
153   }
154 }
155 
156 // Demonstration of the "Lazy Key" pattern.  This uses heterogeneous insert to
157 // avoid creating expensive key elements when the item is already present in the
158 // map.
159 struct LazyInt {
LazyIntabsl::container_internal::__anon046c53a40111::LazyInt160   explicit LazyInt(size_t value, int* tracker)
161       : value(value), tracker(tracker) {}
162 
operator size_tabsl::container_internal::__anon046c53a40111::LazyInt163   explicit operator size_t() const {
164     ++*tracker;
165     return value;
166   }
167 
168   size_t value;
169   int* tracker;
170 };
171 
172 struct Hash {
173   using is_transparent = void;
174   int* tracker;
operator ()absl::container_internal::__anon046c53a40111::Hash175   size_t operator()(size_t obj) const {
176     ++*tracker;
177     return obj;
178   }
operator ()absl::container_internal::__anon046c53a40111::Hash179   size_t operator()(const LazyInt& obj) const {
180     ++*tracker;
181     return obj.value;
182   }
183 };
184 
185 struct Eq {
186   using is_transparent = void;
operator ()absl::container_internal::__anon046c53a40111::Eq187   bool operator()(size_t lhs, size_t rhs) const { return lhs == rhs; }
operator ()absl::container_internal::__anon046c53a40111::Eq188   bool operator()(size_t lhs, const LazyInt& rhs) const {
189     return lhs == rhs.value;
190   }
191 };
192 
TEST(FlatHashMap,LazyKeyPattern)193 TEST(FlatHashMap, LazyKeyPattern) {
194   // hashes are only guaranteed in opt mode, we use assertions to track internal
195   // state that can cause extra calls to hash.
196   int conversions = 0;
197   int hashes = 0;
198   flat_hash_map<size_t, size_t, Hash, Eq> m(0, Hash{&hashes});
199   m.reserve(3);
200 
201   m[LazyInt(1, &conversions)] = 1;
202   EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 1)));
203   EXPECT_EQ(conversions, 1);
204 #ifdef NDEBUG
205   EXPECT_EQ(hashes, 1);
206 #endif
207 
208   m[LazyInt(1, &conversions)] = 2;
209   EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2)));
210   EXPECT_EQ(conversions, 1);
211 #ifdef NDEBUG
212   EXPECT_EQ(hashes, 2);
213 #endif
214 
215   m.try_emplace(LazyInt(2, &conversions), 3);
216   EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2), Pair(2, 3)));
217   EXPECT_EQ(conversions, 2);
218 #ifdef NDEBUG
219   EXPECT_EQ(hashes, 3);
220 #endif
221 
222   m.try_emplace(LazyInt(2, &conversions), 4);
223   EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 2), Pair(2, 3)));
224   EXPECT_EQ(conversions, 2);
225 #ifdef NDEBUG
226   EXPECT_EQ(hashes, 4);
227 #endif
228 }
229 
TEST(FlatHashMap,BitfieldArgument)230 TEST(FlatHashMap, BitfieldArgument) {
231   union {
232     int n : 1;
233   };
234   n = 0;
235   flat_hash_map<int, int> m;
236   m.erase(n);
237   m.count(n);
238   m.prefetch(n);
239   m.find(n);
240   m.contains(n);
241   m.equal_range(n);
242   m.insert_or_assign(n, n);
243   m.insert_or_assign(m.end(), n, n);
244   m.try_emplace(n);
245   m.try_emplace(m.end(), n);
246   m.at(n);
247   m[n];
248 }
249 
TEST(FlatHashMap,MergeExtractInsert)250 TEST(FlatHashMap, MergeExtractInsert) {
251   // We can't test mutable keys, or non-copyable keys with flat_hash_map.
252   // Test that the nodes have the proper API.
253   absl::flat_hash_map<int, int> m = {{1, 7}, {2, 9}};
254   auto node = m.extract(1);
255   EXPECT_TRUE(node);
256   EXPECT_EQ(node.key(), 1);
257   EXPECT_EQ(node.mapped(), 7);
258   EXPECT_THAT(m, UnorderedElementsAre(Pair(2, 9)));
259 
260   node.mapped() = 17;
261   m.insert(std::move(node));
262   EXPECT_THAT(m, UnorderedElementsAre(Pair(1, 17), Pair(2, 9)));
263 }
264 
FirstIsEven(std::pair<const int,int> p)265 bool FirstIsEven(std::pair<const int, int> p) { return p.first % 2 == 0; }
266 
TEST(FlatHashMap,EraseIf)267 TEST(FlatHashMap, EraseIf) {
268   // Erase all elements.
269   {
270     flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
271     EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return true; }), 5);
272     EXPECT_THAT(s, IsEmpty());
273   }
274   // Erase no elements.
275   {
276     flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
277     EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return false; }), 0);
278     EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3),
279                                         Pair(4, 4), Pair(5, 5)));
280   }
281   // Erase specific elements.
282   {
283     flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
284     EXPECT_EQ(erase_if(s,
285                        [](std::pair<const int, int> kvp) {
286                          return kvp.first % 2 == 1;
287                        }),
288               3);
289     EXPECT_THAT(s, UnorderedElementsAre(Pair(2, 2), Pair(4, 4)));
290   }
291   // Predicate is function reference.
292   {
293     flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
294     EXPECT_EQ(erase_if(s, FirstIsEven), 2);
295     EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
296   }
297   // Predicate is function pointer.
298   {
299     flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
300     EXPECT_EQ(erase_if(s, &FirstIsEven), 2);
301     EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
302   }
303 }
304 
305 // This test requires std::launder for mutable key access in node handles.
306 #if defined(__cpp_lib_launder) && __cpp_lib_launder >= 201606
TEST(FlatHashMap,NodeHandleMutableKeyAccess)307 TEST(FlatHashMap, NodeHandleMutableKeyAccess) {
308   flat_hash_map<std::string, std::string> map;
309 
310   map["key1"] = "mapped";
311 
312   auto nh = map.extract(map.begin());
313   nh.key().resize(3);
314   map.insert(std::move(nh));
315 
316   EXPECT_THAT(map, testing::ElementsAre(Pair("key", "mapped")));
317 }
318 #endif
319 
TEST(FlatHashMap,Reserve)320 TEST(FlatHashMap, Reserve) {
321   // Verify that if we reserve(size() + n) then we can perform n insertions
322   // without a rehash, i.e., without invalidating any references.
323   for (size_t trial = 0; trial < 20; ++trial) {
324     for (size_t initial = 3; initial < 100; ++initial) {
325       // Fill in `initial` entries, then erase 2 of them, then reserve space for
326       // two inserts and check for reference stability while doing the inserts.
327       flat_hash_map<size_t, size_t> map;
328       for (size_t i = 0; i < initial; ++i) {
329         map[i] = i;
330       }
331       map.erase(0);
332       map.erase(1);
333       map.reserve(map.size() + 2);
334       size_t& a2 = map[2];
335       // In the event of a failure, asan will complain in one of these two
336       // assignments.
337       map[initial] = a2;
338       map[initial + 1] = a2;
339       // Fail even when not under asan:
340       size_t& a2new = map[2];
341       EXPECT_EQ(&a2, &a2new);
342     }
343   }
344 }
345 
TEST(FlatHashMap,RecursiveTypeCompiles)346 TEST(FlatHashMap, RecursiveTypeCompiles) {
347   struct RecursiveType {
348     flat_hash_map<int, RecursiveType> m;
349   };
350   RecursiveType t;
351   t.m[0] = RecursiveType{};
352 }
353 
354 }  // namespace
355 }  // namespace container_internal
356 ABSL_NAMESPACE_END
357 }  // namespace absl
358