• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc.  All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 //     * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 //     * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 //     * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 #ifndef GOOGLE_PROTOBUF_MAP_H__
32 #define GOOGLE_PROTOBUF_MAP_H__
33 
34 #include <google/protobuf/stubs/hash.h>
35 #include <iterator>
36 #include <limits>  // To support Visual Studio 2008
37 #include <set>
38 #include <utility>
39 
40 #include <google/protobuf/stubs/common.h>
41 #include <google/protobuf/arena.h>
42 #include <google/protobuf/generated_enum_util.h>
43 #include <google/protobuf/map_type_handler.h>
44 #include <google/protobuf/message.h>
45 #include <google/protobuf/descriptor.h>
46 #if __cpp_exceptions && LANG_CXX11
47 #include <random>
48 #endif
49 
50 namespace google {
51 namespace protobuf {
52 
53 // The Map and MapIterator types are provided by this header file.
54 // Please avoid using other types defined here, unless they are public
55 // types within Map or MapIterator, such as Map::value_type.
56 template <typename Key, typename T>
57 class Map;
58 
59 class MapIterator;
60 
61 template <typename Enum> struct is_proto_enum;
62 
63 namespace internal {
64 template <typename Key, typename T,
65           WireFormatLite::FieldType key_wire_type,
66           WireFormatLite::FieldType value_wire_type,
67           int default_enum_value>
68 class MapFieldLite;
69 
70 template <typename Key, typename T,
71           WireFormatLite::FieldType key_wire_type,
72           WireFormatLite::FieldType value_wire_type,
73           int default_enum_value>
74 class MapField;
75 
76 template <typename Key, typename T>
77 class TypeDefinedMapFieldBase;
78 
79 class DynamicMapField;
80 
81 class GeneratedMessageReflection;
82 }  // namespace internal
83 
84 #define TYPE_CHECK(EXPECTEDTYPE, METHOD)                        \
85   if (type() != EXPECTEDTYPE) {                                 \
86     GOOGLE_LOG(FATAL)                                                  \
87         << "Protocol Buffer map usage error:\n"                 \
88         << METHOD << " type does not match\n"                   \
89         << "  Expected : "                                      \
90         << FieldDescriptor::CppTypeName(EXPECTEDTYPE) << "\n"   \
91         << "  Actual   : "                                      \
92         << FieldDescriptor::CppTypeName(type());                \
93   }
94 
95 // MapKey is an union type for representing any possible
96 // map key.
97 class LIBPROTOBUF_EXPORT MapKey {
98  public:
MapKey()99   MapKey() : type_(0) {
100   }
MapKey(const MapKey & other)101   MapKey(const MapKey& other) : type_(0) {
102     CopyFrom(other);
103   }
104 
~MapKey()105   ~MapKey() {
106     if (type_ == FieldDescriptor::CPPTYPE_STRING) {
107       delete val_.string_value_;
108     }
109   }
110 
type()111   FieldDescriptor::CppType type() const {
112     if (type_ == 0) {
113       GOOGLE_LOG(FATAL)
114           << "Protocol Buffer map usage error:\n"
115           << "MapKey::type MapKey is not initialized. "
116           << "Call set methods to initialize MapKey.";
117     }
118     return (FieldDescriptor::CppType)type_;
119   }
120 
SetInt64Value(int64 value)121   void SetInt64Value(int64 value) {
122     SetType(FieldDescriptor::CPPTYPE_INT64);
123     val_.int64_value_ = value;
124   }
SetUInt64Value(uint64 value)125   void SetUInt64Value(uint64 value) {
126     SetType(FieldDescriptor::CPPTYPE_UINT64);
127     val_.uint64_value_ = value;
128   }
SetInt32Value(int32 value)129   void SetInt32Value(int32 value) {
130     SetType(FieldDescriptor::CPPTYPE_INT32);
131     val_.int32_value_ = value;
132   }
SetUInt32Value(uint32 value)133   void SetUInt32Value(uint32 value) {
134     SetType(FieldDescriptor::CPPTYPE_UINT32);
135     val_.uint32_value_ = value;
136   }
SetBoolValue(bool value)137   void SetBoolValue(bool value) {
138     SetType(FieldDescriptor::CPPTYPE_BOOL);
139     val_.bool_value_ = value;
140   }
SetStringValue(const string & val)141   void SetStringValue(const string& val) {
142     SetType(FieldDescriptor::CPPTYPE_STRING);
143     *val_.string_value_ = val;
144   }
145 
GetInt64Value()146   int64 GetInt64Value() const {
147     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT64,
148                "MapKey::GetInt64Value");
149     return val_.int64_value_;
150   }
GetUInt64Value()151   uint64 GetUInt64Value() const {
152     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT64,
153                "MapKey::GetUInt64Value");
154     return val_.uint64_value_;
155   }
GetInt32Value()156   int32 GetInt32Value() const {
157     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT32,
158                "MapKey::GetInt32Value");
159     return val_.int32_value_;
160   }
GetUInt32Value()161   uint32 GetUInt32Value() const {
162     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT32,
163                "MapKey::GetUInt32Value");
164     return val_.uint32_value_;
165   }
GetBoolValue()166   bool GetBoolValue() const {
167     TYPE_CHECK(FieldDescriptor::CPPTYPE_BOOL,
168                "MapKey::GetBoolValue");
169     return val_.bool_value_;
170   }
GetStringValue()171   const string& GetStringValue() const {
172     TYPE_CHECK(FieldDescriptor::CPPTYPE_STRING,
173                "MapKey::GetStringValue");
174     return *val_.string_value_;
175   }
176 
177   bool operator<(const MapKey& other) const {
178     if (type_ != other.type_) {
179       // We could define a total order that handles this case, but
180       // there currently no need.  So, for now, fail.
181       GOOGLE_LOG(FATAL) << "Unsupported: type mismatch";
182     }
183     switch (type()) {
184       case FieldDescriptor::CPPTYPE_DOUBLE:
185       case FieldDescriptor::CPPTYPE_FLOAT:
186       case FieldDescriptor::CPPTYPE_ENUM:
187       case FieldDescriptor::CPPTYPE_MESSAGE:
188         GOOGLE_LOG(FATAL) << "Unsupported";
189         return false;
190       case FieldDescriptor::CPPTYPE_STRING:
191         return *val_.string_value_ < *other.val_.string_value_;
192       case FieldDescriptor::CPPTYPE_INT64:
193         return val_.int64_value_ < other.val_.int64_value_;
194       case FieldDescriptor::CPPTYPE_INT32:
195         return val_.int32_value_ < other.val_.int32_value_;
196       case FieldDescriptor::CPPTYPE_UINT64:
197         return val_.uint64_value_ < other.val_.uint64_value_;
198       case FieldDescriptor::CPPTYPE_UINT32:
199         return val_.uint32_value_ < other.val_.uint32_value_;
200       case FieldDescriptor::CPPTYPE_BOOL:
201         return val_.bool_value_ < other.val_.bool_value_;
202     }
203     return false;
204   }
205 
206   bool operator==(const MapKey& other) const {
207     if (type_ != other.type_) {
208       // To be consistent with operator<, we don't allow this either.
209       GOOGLE_LOG(FATAL) << "Unsupported: type mismatch";
210     }
211     switch (type()) {
212       case FieldDescriptor::CPPTYPE_DOUBLE:
213       case FieldDescriptor::CPPTYPE_FLOAT:
214       case FieldDescriptor::CPPTYPE_ENUM:
215       case FieldDescriptor::CPPTYPE_MESSAGE:
216         GOOGLE_LOG(FATAL) << "Unsupported";
217         break;
218       case FieldDescriptor::CPPTYPE_STRING:
219         return *val_.string_value_ == *other.val_.string_value_;
220       case FieldDescriptor::CPPTYPE_INT64:
221         return val_.int64_value_ == other.val_.int64_value_;
222       case FieldDescriptor::CPPTYPE_INT32:
223         return val_.int32_value_ == other.val_.int32_value_;
224       case FieldDescriptor::CPPTYPE_UINT64:
225         return val_.uint64_value_ == other.val_.uint64_value_;
226       case FieldDescriptor::CPPTYPE_UINT32:
227         return val_.uint32_value_ == other.val_.uint32_value_;
228       case FieldDescriptor::CPPTYPE_BOOL:
229         return val_.bool_value_ == other.val_.bool_value_;
230     }
231     GOOGLE_LOG(FATAL) << "Can't get here.";
232     return false;
233   }
234 
CopyFrom(const MapKey & other)235   void CopyFrom(const MapKey& other) {
236     SetType(other.type());
237     switch (type_) {
238       case FieldDescriptor::CPPTYPE_DOUBLE:
239       case FieldDescriptor::CPPTYPE_FLOAT:
240       case FieldDescriptor::CPPTYPE_ENUM:
241       case FieldDescriptor::CPPTYPE_MESSAGE:
242         GOOGLE_LOG(FATAL) << "Unsupported";
243         break;
244       case FieldDescriptor::CPPTYPE_STRING:
245         *val_.string_value_ = *other.val_.string_value_;
246         break;
247       case FieldDescriptor::CPPTYPE_INT64:
248         val_.int64_value_ = other.val_.int64_value_;
249         break;
250       case FieldDescriptor::CPPTYPE_INT32:
251         val_.int32_value_ = other.val_.int32_value_;
252         break;
253       case FieldDescriptor::CPPTYPE_UINT64:
254         val_.uint64_value_ = other.val_.uint64_value_;
255         break;
256       case FieldDescriptor::CPPTYPE_UINT32:
257         val_.uint32_value_ = other.val_.uint32_value_;
258         break;
259       case FieldDescriptor::CPPTYPE_BOOL:
260         val_.bool_value_ = other.val_.bool_value_;
261         break;
262     }
263   }
264 
265  private:
266   template <typename K, typename V>
267   friend class internal::TypeDefinedMapFieldBase;
268   friend class MapIterator;
269   friend class internal::DynamicMapField;
270 
271   union KeyValue {
KeyValue()272     KeyValue() {}
273     string* string_value_;
274     int64 int64_value_;
275     int32 int32_value_;
276     uint64 uint64_value_;
277     uint32 uint32_value_;
278     bool bool_value_;
279   } val_;
280 
SetType(FieldDescriptor::CppType type)281   void SetType(FieldDescriptor::CppType type) {
282     if (type_ == type) return;
283     if (type_ == FieldDescriptor::CPPTYPE_STRING) {
284       delete val_.string_value_;
285     }
286     type_ = type;
287     if (type_ == FieldDescriptor::CPPTYPE_STRING) {
288       val_.string_value_ = new string;
289     }
290   }
291 
292   // type_ is 0 or a valid FieldDescriptor::CppType.
293   int type_;
294 };
295 
296 // MapValueRef points to a map value.
297 class LIBPROTOBUF_EXPORT MapValueRef {
298  public:
MapValueRef()299   MapValueRef() : data_(NULL), type_(0) {}
300 
SetInt64Value(int64 value)301   void SetInt64Value(int64 value) {
302     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT64,
303                "MapValueRef::SetInt64Value");
304     *reinterpret_cast<int64*>(data_) = value;
305   }
SetUInt64Value(uint64 value)306   void SetUInt64Value(uint64 value) {
307     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT64,
308                "MapValueRef::SetUInt64Value");
309     *reinterpret_cast<uint64*>(data_) = value;
310   }
SetInt32Value(int32 value)311   void SetInt32Value(int32 value) {
312     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT32,
313                "MapValueRef::SetInt32Value");
314     *reinterpret_cast<int32*>(data_) = value;
315   }
SetUInt32Value(uint32 value)316   void SetUInt32Value(uint32 value) {
317     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT32,
318                "MapValueRef::SetUInt32Value");
319     *reinterpret_cast<uint32*>(data_) = value;
320   }
SetBoolValue(bool value)321   void SetBoolValue(bool value) {
322     TYPE_CHECK(FieldDescriptor::CPPTYPE_BOOL,
323                "MapValueRef::SetBoolValue");
324     *reinterpret_cast<bool*>(data_) = value;
325   }
326   // TODO(jieluo) - Checks that enum is member.
SetEnumValue(int value)327   void SetEnumValue(int value) {
328     TYPE_CHECK(FieldDescriptor::CPPTYPE_ENUM,
329                "MapValueRef::SetEnumValue");
330     *reinterpret_cast<int*>(data_) = value;
331   }
SetStringValue(const string & value)332   void SetStringValue(const string& value) {
333     TYPE_CHECK(FieldDescriptor::CPPTYPE_STRING,
334                "MapValueRef::SetStringValue");
335     *reinterpret_cast<string*>(data_) = value;
336   }
SetFloatValue(float value)337   void SetFloatValue(float value) {
338     TYPE_CHECK(FieldDescriptor::CPPTYPE_FLOAT,
339                "MapValueRef::SetFloatValue");
340     *reinterpret_cast<float*>(data_) = value;
341   }
SetDoubleValue(double value)342   void SetDoubleValue(double value) {
343     TYPE_CHECK(FieldDescriptor::CPPTYPE_DOUBLE,
344                "MapValueRef::SetDoubleValue");
345     *reinterpret_cast<double*>(data_) = value;
346   }
347 
GetInt64Value()348   int64 GetInt64Value() const {
349     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT64,
350                "MapValueRef::GetInt64Value");
351     return *reinterpret_cast<int64*>(data_);
352   }
GetUInt64Value()353   uint64 GetUInt64Value() const {
354     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT64,
355                "MapValueRef::GetUInt64Value");
356     return *reinterpret_cast<uint64*>(data_);
357   }
GetInt32Value()358   int32 GetInt32Value() const {
359     TYPE_CHECK(FieldDescriptor::CPPTYPE_INT32,
360                "MapValueRef::GetInt32Value");
361     return *reinterpret_cast<int32*>(data_);
362   }
GetUInt32Value()363   uint32 GetUInt32Value() const {
364     TYPE_CHECK(FieldDescriptor::CPPTYPE_UINT32,
365                "MapValueRef::GetUInt32Value");
366     return *reinterpret_cast<uint32*>(data_);
367   }
GetBoolValue()368   bool GetBoolValue() const {
369     TYPE_CHECK(FieldDescriptor::CPPTYPE_BOOL,
370                "MapValueRef::GetBoolValue");
371     return *reinterpret_cast<bool*>(data_);
372   }
GetEnumValue()373   int GetEnumValue() const {
374     TYPE_CHECK(FieldDescriptor::CPPTYPE_ENUM,
375                "MapValueRef::GetEnumValue");
376     return *reinterpret_cast<int*>(data_);
377   }
GetStringValue()378   const string& GetStringValue() const {
379     TYPE_CHECK(FieldDescriptor::CPPTYPE_STRING,
380                "MapValueRef::GetStringValue");
381     return *reinterpret_cast<string*>(data_);
382   }
GetFloatValue()383   float GetFloatValue() const {
384     TYPE_CHECK(FieldDescriptor::CPPTYPE_FLOAT,
385                "MapValueRef::GetFloatValue");
386     return *reinterpret_cast<float*>(data_);
387   }
GetDoubleValue()388   double GetDoubleValue() const {
389     TYPE_CHECK(FieldDescriptor::CPPTYPE_DOUBLE,
390                "MapValueRef::GetDoubleValue");
391     return *reinterpret_cast<double*>(data_);
392   }
393 
GetMessageValue()394   const Message& GetMessageValue() const {
395     TYPE_CHECK(FieldDescriptor::CPPTYPE_MESSAGE,
396                "MapValueRef::GetMessageValue");
397     return *reinterpret_cast<Message*>(data_);
398   }
399 
MutableMessageValue()400   Message* MutableMessageValue() {
401     TYPE_CHECK(FieldDescriptor::CPPTYPE_MESSAGE,
402                "MapValueRef::MutableMessageValue");
403     return reinterpret_cast<Message*>(data_);
404   }
405 
406  private:
407   template <typename K, typename V,
408             internal::WireFormatLite::FieldType key_wire_type,
409             internal::WireFormatLite::FieldType value_wire_type,
410             int default_enum_value>
411   friend class internal::MapField;
412   template <typename K, typename V>
413   friend class internal::TypeDefinedMapFieldBase;
414   friend class MapIterator;
415   friend class internal::GeneratedMessageReflection;
416   friend class internal::DynamicMapField;
417 
SetType(FieldDescriptor::CppType type)418   void SetType(FieldDescriptor::CppType type) {
419     type_ = type;
420   }
421 
type()422   FieldDescriptor::CppType type() const {
423     if (type_ == 0 || data_ == NULL) {
424       GOOGLE_LOG(FATAL)
425           << "Protocol Buffer map usage error:\n"
426           << "MapValueRef::type MapValueRef is not initialized.";
427     }
428     return (FieldDescriptor::CppType)type_;
429   }
SetValue(const void * val)430   void SetValue(const void* val) {
431     data_ = const_cast<void*>(val);
432   }
CopyFrom(const MapValueRef & other)433   void CopyFrom(const MapValueRef& other) {
434     type_ = other.type_;
435     data_ = other.data_;
436   }
437   // Only used in DynamicMapField
DeleteData()438   void DeleteData() {
439     switch (type_) {
440 #define HANDLE_TYPE(CPPTYPE, TYPE)                              \
441       case google::protobuf::FieldDescriptor::CPPTYPE_##CPPTYPE: {        \
442         delete reinterpret_cast<TYPE*>(data_);                  \
443         break;                                                  \
444       }
445       HANDLE_TYPE(INT32, int32);
446       HANDLE_TYPE(INT64, int64);
447       HANDLE_TYPE(UINT32, uint32);
448       HANDLE_TYPE(UINT64, uint64);
449       HANDLE_TYPE(DOUBLE, double);
450       HANDLE_TYPE(FLOAT, float);
451       HANDLE_TYPE(BOOL, bool);
452       HANDLE_TYPE(STRING, string);
453       HANDLE_TYPE(ENUM, int32);
454       HANDLE_TYPE(MESSAGE, Message);
455 #undef HANDLE_TYPE
456     }
457   }
458   // data_ point to a map value. MapValueRef does not
459   // own this value.
460   void* data_;
461   // type_ is 0 or a valid FieldDescriptor::CppType.
462   int type_;
463 };
464 
465 #undef TYPE_CHECK
466 
467 // This is the class for google::protobuf::Map's internal value_type. Instead of using
468 // std::pair as value_type, we use this class which provides us more control of
469 // its process of construction and destruction.
470 template <typename Key, typename T>
471 class MapPair {
472  public:
473   typedef const Key first_type;
474   typedef T second_type;
475 
MapPair(const Key & other_first,const T & other_second)476   MapPair(const Key& other_first, const T& other_second)
477       : first(other_first), second(other_second) {}
MapPair(const Key & other_first)478   explicit MapPair(const Key& other_first) : first(other_first), second() {}
MapPair(const MapPair & other)479   MapPair(const MapPair& other)
480       : first(other.first), second(other.second) {}
481 
~MapPair()482   ~MapPair() {}
483 
484   // Implicitly convertible to std::pair of compatible types.
485   template <typename T1, typename T2>
486   operator std::pair<T1, T2>() const {
487     return std::pair<T1, T2>(first, second);
488   }
489 
490   const Key first;
491   T second;
492 
493  private:
494   friend class ::google::protobuf::Arena;
495   friend class Map<Key, T>;
496 };
497 
498 // google::protobuf::Map is an associative container type used to store protobuf map
499 // fields.  Each Map instance may or may not use a different hash function, a
500 // different iteration order, and so on.  E.g., please don't examine
501 // implementation details to decide if the following would work:
502 //  Map<int, int> m0, m1;
503 //  m0[0] = m1[0] = m0[1] = m1[1] = 0;
504 //  assert(m0.begin()->first == m1.begin()->first);  // Bug!
505 //
506 // Map's interface is similar to std::unordered_map, except that Map is not
507 // designed to play well with exceptions.
508 template <typename Key, typename T>
509 class Map {
510  public:
511   typedef Key key_type;
512   typedef T mapped_type;
513   typedef MapPair<Key, T> value_type;
514 
515   typedef value_type* pointer;
516   typedef const value_type* const_pointer;
517   typedef value_type& reference;
518   typedef const value_type& const_reference;
519 
520   typedef size_t size_type;
521   typedef hash<Key> hasher;
522 
523   explicit Map(bool old_style = true)
arena_(NULL)524       : arena_(NULL),
525         default_enum_value_(0),
526         old_style_(old_style) {
527     Init();
528   }
529   explicit Map(Arena* arena, bool old_style = true)
arena_(arena)530       : arena_(arena),
531         default_enum_value_(0),
532         old_style_(old_style) {
533     Init();
534   }
Map(const Map & other)535   Map(const Map& other)
536       : arena_(NULL),
537         default_enum_value_(other.default_enum_value_),
538         old_style_(other.old_style_) {
539     Init();
540     insert(other.begin(), other.end());
541   }
542   template <class InputIt>
543   Map(const InputIt& first, const InputIt& last, bool old_style = true)
arena_(NULL)544       : arena_(NULL),
545         default_enum_value_(0),
546         old_style_(old_style) {
547     Init();
548     insert(first, last);
549   }
550 
~Map()551   ~Map() {
552     clear();
553     if (arena_ == NULL) {
554       if (old_style_)
555         delete deprecated_elements_;
556       else
557         delete elements_;
558     }
559   }
560 
561  private:
Init()562   void Init() {
563     if (old_style_)
564       deprecated_elements_ = Arena::Create<DeprecatedInnerMap>(
565           arena_, 0, hasher(), equal_to<Key>(),
566           MapAllocator<std::pair<const Key, MapPair<Key, T>*> >(arena_));
567     else
568       elements_ =
569           Arena::Create<InnerMap>(arena_, 0, hasher(), Allocator(arena_));
570   }
571 
572   // re-implement std::allocator to use arena allocator for memory allocation.
573   // Used for google::protobuf::Map implementation. Users should not use this class
574   // directly.
575   template <typename U>
576   class MapAllocator {
577    public:
578     typedef U value_type;
579     typedef value_type* pointer;
580     typedef const value_type* const_pointer;
581     typedef value_type& reference;
582     typedef const value_type& const_reference;
583     typedef size_t size_type;
584     typedef ptrdiff_t difference_type;
585 
MapAllocator()586     MapAllocator() : arena_(NULL) {}
MapAllocator(Arena * arena)587     explicit MapAllocator(Arena* arena) : arena_(arena) {}
588     template <typename X>
MapAllocator(const MapAllocator<X> & allocator)589     MapAllocator(const MapAllocator<X>& allocator)
590         : arena_(allocator.arena()) {}
591 
592     pointer allocate(size_type n, const_pointer hint = 0) {
593       // If arena is not given, malloc needs to be called which doesn't
594       // construct element object.
595       if (arena_ == NULL) {
596         return reinterpret_cast<pointer>(malloc(n * sizeof(value_type)));
597       } else {
598         return reinterpret_cast<pointer>(
599             Arena::CreateArray<uint8>(arena_, n * sizeof(value_type)));
600       }
601     }
602 
deallocate(pointer p,size_type n)603     void deallocate(pointer p, size_type n) {
604       if (arena_ == NULL) {
605         free(p);
606       }
607     }
608 
609 #if __cplusplus >= 201103L && !defined(GOOGLE_PROTOBUF_OS_APPLE) && \
610     !defined(GOOGLE_PROTOBUF_OS_NACL) &&                            \
611     !defined(GOOGLE_PROTOBUF_OS_ANDROID) &&                         \
612     !defined(GOOGLE_PROTOBUF_OS_EMSCRIPTEN)
613     template<class NodeType, class... Args>
construct(NodeType * p,Args &&...args)614     void construct(NodeType* p, Args&&... args) {
615       // Clang 3.6 doesn't compile static casting to void* directly. (Issue
616       // #1266) According C++ standard 5.2.9/1: "The static_cast operator shall
617       // not cast away constness". So first the maybe const pointer is casted to
618       // const void* and after the const void* is const casted.
619       new (const_cast<void*>(static_cast<const void*>(p)))
620           NodeType(std::forward<Args>(args)...);
621     }
622 
623     template<class NodeType>
destroy(NodeType * p)624     void destroy(NodeType* p) {
625       p->~NodeType();
626     }
627 #else
construct(pointer p,const_reference t)628     void construct(pointer p, const_reference t) { new (p) value_type(t); }
629 
destroy(pointer p)630     void destroy(pointer p) { p->~value_type(); }
631 #endif
632 
633     template <typename X>
634     struct rebind {
635       typedef MapAllocator<X> other;
636     };
637 
638     template <typename X>
639     bool operator==(const MapAllocator<X>& other) const {
640       return arena_ == other.arena_;
641     }
642 
643     template <typename X>
644     bool operator!=(const MapAllocator<X>& other) const {
645       return arena_ != other.arena_;
646     }
647 
648     // To support Visual Studio 2008
max_size()649     size_type max_size() const {
650       return std::numeric_limits<size_type>::max();
651     }
652 
653     // To support gcc-4.4, which does not properly
654     // support templated friend classes
arena()655     Arena* arena() const {
656       return arena_;
657     }
658 
659    private:
660     typedef void DestructorSkippable_;
661     Arena* const arena_;
662   };
663 
664   // InnerMap's key type is Key and its value type is value_type*.  We use a
665   // custom class here and for Node, below, to ensure that k_ is at offset 0,
666   // allowing safe conversion from pointer to Node to pointer to Key, and vice
667   // versa when appropriate.
668   class KeyValuePair {
669    public:
KeyValuePair(const Key & k,value_type * v)670     KeyValuePair(const Key& k, value_type* v) : k_(k), v_(v) {}
671 
key()672     const Key& key() const { return k_; }
key()673     Key& key() { return k_; }
value()674     value_type* const value() const { return v_; }
value()675     value_type*& value() { return v_; }
676 
677    private:
678     Key k_;
679     value_type* v_;
680   };
681 
682   typedef MapAllocator<KeyValuePair> Allocator;
683 
684   // InnerMap is a generic hash-based map.  It doesn't contain any
685   // protocol-buffer-specific logic.  It is a chaining hash map with the
686   // additional feature that some buckets can be converted to use an ordered
687   // container.  This ensures O(lg n) bounds on find, insert, and erase, while
688   // avoiding the overheads of ordered containers most of the time.
689   //
690   // The implementation doesn't need the full generality of unordered_map,
691   // and it doesn't have it.  More bells and whistles can be added as needed.
692   // Some implementation details:
693   // 1. The hash function has type hasher and the equality function
694   //    equal_to<Key>.  We inherit from hasher to save space
695   //    (empty-base-class optimization).
696   // 2. The number of buckets is a power of two.
697   // 3. Buckets are converted to trees in pairs: if we convert bucket b then
698   //    buckets b and b^1 will share a tree.  Invariant: buckets b and b^1 have
699   //    the same non-NULL value iff they are sharing a tree.  (An alternative
700   //    implementation strategy would be to have a tag bit per bucket.)
701   // 4. As is typical for hash_map and such, the Keys and Values are always
702   //    stored in linked list nodes.  Pointers to elements are never invalidated
703   //    until the element is deleted.
704   // 5. The trees' payload type is pointer to linked-list node.  Tree-converting
705   //    a bucket doesn't copy Key-Value pairs.
706   // 6. Once we've tree-converted a bucket, it is never converted back. However,
707   //    the items a tree contains may wind up assigned to trees or lists upon a
708   //    rehash.
709   // 7. The code requires no C++ features from C++11 or later.
710   // 8. Mutations to a map do not invalidate the map's iterators, pointers to
711   //    elements, or references to elements.
712   // 9. Except for erase(iterator), any non-const method can reorder iterators.
713   class InnerMap : private hasher {
714    public:
715     typedef value_type* Value;
716 
InnerMap(size_type n,hasher h,Allocator alloc)717     InnerMap(size_type n, hasher h, Allocator alloc)
718         : hasher(h),
719           num_elements_(0),
720           seed_(Seed()),
721           table_(NULL),
722           alloc_(alloc) {
723       n = TableSize(n);
724       table_ = CreateEmptyTable(n);
725       num_buckets_ = index_of_first_non_null_ = n;
726     }
727 
~InnerMap()728     ~InnerMap() {
729       if (table_ != NULL) {
730         clear();
731         Dealloc<void*>(table_, num_buckets_);
732       }
733     }
734 
735    private:
736     enum { kMinTableSize = 8 };
737 
738     // Linked-list nodes, as one would expect for a chaining hash table.
739     struct Node {
740       KeyValuePair kv;
741       Node* next;
742     };
743 
744     // This is safe only if the given pointer is known to point to a Key that is
745     // part of a Node.
NodePtrFromKeyPtr(Key * k)746     static Node* NodePtrFromKeyPtr(Key* k) {
747       return reinterpret_cast<Node*>(k);
748     }
749 
KeyPtrFromNodePtr(Node * node)750     static Key* KeyPtrFromNodePtr(Node* node) { return &node->kv.key(); }
751 
752     // Trees.  The payload type is pointer to Key, so that we can query the tree
753     // with Keys that are not in any particular data structure.  When we insert,
754     // though, the pointer is always pointing to a Key that is inside a Node.
755     struct KeyCompare {
operatorKeyCompare756       bool operator()(const Key* n0, const Key* n1) const { return *n0 < *n1; }
757     };
758     typedef typename Allocator::template rebind<Key*>::other KeyPtrAllocator;
759     typedef std::set<Key*, KeyCompare, KeyPtrAllocator> Tree;
760 
761     // iterator and const_iterator are instantiations of iterator_base.
762     template <typename KeyValueType>
763     class iterator_base {
764      public:
765       typedef KeyValueType& reference;
766       typedef KeyValueType* pointer;
767       typedef typename Tree::iterator TreeIterator;
768 
769       // Invariants:
770       // node_ is always correct. This is handy because the most common
771       // operations are operator* and operator-> and they only use node_.
772       // When node_ is set to a non-NULL value, all the other non-const fields
773       // are updated to be correct also, but those fields can become stale
774       // if the underlying map is modified.  When those fields are needed they
775       // are rechecked, and updated if necessary.
iterator_base()776       iterator_base() : node_(NULL) {}
777 
iterator_base(const InnerMap * m)778       explicit iterator_base(const InnerMap* m) : m_(m) {
779         SearchFrom(m->index_of_first_non_null_);
780       }
781 
782       // Any iterator_base can convert to any other.  This is overkill, and we
783       // rely on the enclosing class to use it wisely.  The standard "iterator
784       // can convert to const_iterator" is OK but the reverse direction is not.
785       template <typename U>
iterator_base(const iterator_base<U> & it)786       explicit iterator_base(const iterator_base<U>& it)
787           : node_(it.node_),
788             m_(it.m_),
789             bucket_index_(it.bucket_index_),
790             tree_it_(it.tree_it_) {}
791 
iterator_base(Node * n,const InnerMap * m,size_type index)792       iterator_base(Node* n, const InnerMap* m, size_type index)
793           : node_(n),
794             m_(m),
795             bucket_index_(index) {}
796 
iterator_base(TreeIterator tree_it,const InnerMap * m,size_type index)797       iterator_base(TreeIterator tree_it, const InnerMap* m, size_type index)
798           : node_(NodePtrFromKeyPtr(*tree_it)),
799             m_(m),
800             bucket_index_(index),
801             tree_it_(tree_it) {
802         // Invariant: iterators that use tree_it_ have an even bucket_index_.
803         GOOGLE_DCHECK_EQ(bucket_index_ % 2, 0);
804       }
805 
806       // Advance through buckets, looking for the first that isn't empty.
807       // If nothing non-empty is found then leave node_ == NULL.
SearchFrom(size_type start_bucket)808       void SearchFrom(size_type start_bucket) {
809         GOOGLE_DCHECK(m_->index_of_first_non_null_ == m_->num_buckets_ ||
810                m_->table_[m_->index_of_first_non_null_] != NULL);
811         node_ = NULL;
812         for (bucket_index_ = start_bucket; bucket_index_ < m_->num_buckets_;
813              bucket_index_++) {
814           if (m_->TableEntryIsNonEmptyList(bucket_index_)) {
815             node_ = static_cast<Node*>(m_->table_[bucket_index_]);
816             break;
817           } else if (m_->TableEntryIsTree(bucket_index_)) {
818             Tree* tree = static_cast<Tree*>(m_->table_[bucket_index_]);
819             GOOGLE_DCHECK(!tree->empty());
820             tree_it_ = tree->begin();
821             node_ = NodePtrFromKeyPtr(*tree_it_);
822             break;
823           }
824         }
825       }
826 
827       reference operator*() const { return node_->kv; }
828       pointer operator->() const { return &(operator*()); }
829 
830       friend bool operator==(const iterator_base& a, const iterator_base& b) {
831         return a.node_ == b.node_;
832       }
833       friend bool operator!=(const iterator_base& a, const iterator_base& b) {
834         return a.node_ != b.node_;
835       }
836 
837       iterator_base& operator++() {
838         if (node_->next == NULL) {
839           const bool is_list = revalidate_if_necessary();
840           if (is_list) {
841             SearchFrom(bucket_index_ + 1);
842           } else {
843             GOOGLE_DCHECK_EQ(bucket_index_ & 1, 0);
844             Tree* tree = static_cast<Tree*>(m_->table_[bucket_index_]);
845             if (++tree_it_ == tree->end()) {
846               SearchFrom(bucket_index_ + 2);
847             } else {
848               node_ = NodePtrFromKeyPtr(*tree_it_);
849             }
850           }
851         } else {
852           node_ = node_->next;
853         }
854         return *this;
855       }
856 
857       iterator_base operator++(int /* unused */) {
858         iterator_base tmp = *this;
859         ++*this;
860         return tmp;
861       }
862 
863       // Assumes node_ and m_ are correct and non-NULL, but other fields may be
864       // stale.  Fix them as needed.  Then return true iff node_ points to a
865       // Node in a list.
revalidate_if_necessary()866       bool revalidate_if_necessary() {
867         GOOGLE_DCHECK(node_ != NULL && m_ != NULL);
868         // Force bucket_index_ to be in range.
869         bucket_index_ &= (m_->num_buckets_ - 1);
870         // Common case: the bucket we think is relevant points to node_.
871         if (m_->table_[bucket_index_] == static_cast<void*>(node_))
872           return true;
873         // Less common: the bucket is a linked list with node_ somewhere in it,
874         // but not at the head.
875         if (m_->TableEntryIsNonEmptyList(bucket_index_)) {
876           Node* l = static_cast<Node*>(m_->table_[bucket_index_]);
877           while ((l = l->next) != NULL) {
878             if (l == node_) {
879               return true;
880             }
881           }
882         }
883         // Well, bucket_index_ still might be correct, but probably
884         // not.  Revalidate just to be sure.  This case is rare enough that we
885         // don't worry about potential optimizations, such as having a custom
886         // find-like method that compares Node* instead of const Key&.
887         iterator_base i(m_->find(*KeyPtrFromNodePtr(node_)));
888         bucket_index_ = i.bucket_index_;
889         tree_it_ = i.tree_it_;
890         return m_->TableEntryIsList(bucket_index_);
891       }
892 
893       Node* node_;
894       const InnerMap* m_;
895       size_type bucket_index_;
896       TreeIterator tree_it_;
897     };
898 
899    public:
900     typedef iterator_base<KeyValuePair> iterator;
901     typedef iterator_base<const KeyValuePair> const_iterator;
902 
begin()903     iterator begin() { return iterator(this); }
end()904     iterator end() { return iterator(); }
begin()905     const_iterator begin() const { return const_iterator(this); }
end()906     const_iterator end() const { return const_iterator(); }
907 
clear()908     void clear() {
909       for (size_type b = 0; b < num_buckets_; b++) {
910         if (TableEntryIsNonEmptyList(b)) {
911           Node* node = static_cast<Node*>(table_[b]);
912           table_[b] = NULL;
913           do {
914             Node* next = node->next;
915             DestroyNode(node);
916             node = next;
917           } while (node != NULL);
918         } else if (TableEntryIsTree(b)) {
919           Tree* tree = static_cast<Tree*>(table_[b]);
920           GOOGLE_DCHECK(table_[b] == table_[b + 1] && (b & 1) == 0);
921           table_[b] = table_[b + 1] = NULL;
922           typename Tree::iterator tree_it = tree->begin();
923           do {
924             Node* node = NodePtrFromKeyPtr(*tree_it);
925             typename Tree::iterator next = tree_it;
926             ++next;
927             tree->erase(tree_it);
928             DestroyNode(node);
929             tree_it = next;
930           } while (tree_it != tree->end());
931           DestroyTree(tree);
932           b++;
933         }
934       }
935       num_elements_ = 0;
936       index_of_first_non_null_ = num_buckets_;
937     }
938 
hash_function()939     const hasher& hash_function() const { return *this; }
940 
max_size()941     static size_type max_size() {
942       return static_cast<size_type>(1) << (sizeof(void**) >= 8 ? 60 : 28);
943     }
size()944     size_type size() const { return num_elements_; }
empty()945     bool empty() const { return size() == 0; }
946 
find(const Key & k)947     iterator find(const Key& k) { return iterator(FindHelper(k).first); }
find(const Key & k)948     const_iterator find(const Key& k) const { return FindHelper(k).first; }
949 
950     // In traditional C++ style, this performs "insert if not present."
insert(const KeyValuePair & kv)951     std::pair<iterator, bool> insert(const KeyValuePair& kv) {
952       std::pair<const_iterator, size_type> p = FindHelper(kv.key());
953       // Case 1: key was already present.
954       if (p.first.node_ != NULL)
955         return std::make_pair(iterator(p.first), false);
956       // Case 2: insert.
957       if (ResizeIfLoadIsOutOfRange(num_elements_ + 1)) {
958         p = FindHelper(kv.key());
959       }
960       const size_type b = p.second;  // bucket number
961       Node* node = Alloc<Node>(1);
962       alloc_.construct(&node->kv, kv);
963       iterator result = InsertUnique(b, node);
964       ++num_elements_;
965       return std::make_pair(result, true);
966     }
967 
968     // The same, but if an insertion is necessary then the value portion of the
969     // inserted key-value pair is left uninitialized.
insert(const Key & k)970     std::pair<iterator, bool> insert(const Key& k) {
971       std::pair<const_iterator, size_type> p = FindHelper(k);
972       // Case 1: key was already present.
973       if (p.first.node_ != NULL)
974         return std::make_pair(iterator(p.first), false);
975       // Case 2: insert.
976       if (ResizeIfLoadIsOutOfRange(num_elements_ + 1)) {
977         p = FindHelper(k);
978       }
979       const size_type b = p.second;  // bucket number
980       Node* node = Alloc<Node>(1);
981       typedef typename Allocator::template rebind<Key>::other KeyAllocator;
982       KeyAllocator(alloc_).construct(&node->kv.key(), k);
983       iterator result = InsertUnique(b, node);
984       ++num_elements_;
985       return std::make_pair(result, true);
986     }
987 
988     Value& operator[](const Key& k) {
989       KeyValuePair kv(k, Value());
990       return insert(kv).first->value();
991     }
992 
erase(iterator it)993     void erase(iterator it) {
994       GOOGLE_DCHECK_EQ(it.m_, this);
995       const bool is_list = it.revalidate_if_necessary();
996       size_type b = it.bucket_index_;
997       Node* const item = it.node_;
998       if (is_list) {
999         GOOGLE_DCHECK(TableEntryIsNonEmptyList(b));
1000         Node* head = static_cast<Node*>(table_[b]);
1001         head = EraseFromLinkedList(item, head);
1002         table_[b] = static_cast<void*>(head);
1003       } else {
1004         GOOGLE_DCHECK(TableEntryIsTree(b));
1005         Tree* tree = static_cast<Tree*>(table_[b]);
1006         tree->erase(it.tree_it_);
1007         if (tree->empty()) {
1008           // Force b to be the minimum of b and b ^ 1.  This is important
1009           // only because we want index_of_first_non_null_ to be correct.
1010           b &= ~static_cast<size_type>(1);
1011           DestroyTree(tree);
1012           table_[b] = table_[b + 1] = NULL;
1013         }
1014       }
1015       DestroyNode(item);
1016       --num_elements_;
1017       if (GOOGLE_PREDICT_FALSE(b == index_of_first_non_null_)) {
1018         while (index_of_first_non_null_ < num_buckets_ &&
1019                table_[index_of_first_non_null_] == NULL) {
1020           ++index_of_first_non_null_;
1021         }
1022       }
1023     }
1024 
1025    private:
FindHelper(const Key & k)1026     std::pair<const_iterator, size_type> FindHelper(const Key& k) const {
1027       size_type b = BucketNumber(k);
1028       if (TableEntryIsNonEmptyList(b)) {
1029         Node* node = static_cast<Node*>(table_[b]);
1030         do {
1031           if (IsMatch(*KeyPtrFromNodePtr(node), k)) {
1032             return std::make_pair(const_iterator(node, this, b), b);
1033           } else {
1034             node = node->next;
1035           }
1036         } while (node != NULL);
1037       } else if (TableEntryIsTree(b)) {
1038         GOOGLE_DCHECK_EQ(table_[b], table_[b ^ 1]);
1039         b &= ~static_cast<size_t>(1);
1040         Tree* tree = static_cast<Tree*>(table_[b]);
1041         Key* key = const_cast<Key*>(&k);
1042         typename Tree::iterator tree_it = tree->find(key);
1043         if (tree_it != tree->end()) {
1044           return std::make_pair(const_iterator(tree_it, this, b), b);
1045         }
1046       }
1047       return std::make_pair(end(), b);
1048     }
1049 
1050     // Insert the given Node in bucket b.  If that would make bucket b too big,
1051     // and bucket b is not a tree, create a tree for buckets b and b^1 to share.
1052     // Requires count(*KeyPtrFromNodePtr(node)) == 0 and that b is the correct
1053     // bucket.  num_elements_ is not modified.
InsertUnique(size_type b,Node * node)1054     iterator InsertUnique(size_type b, Node* node) {
1055       GOOGLE_DCHECK(index_of_first_non_null_ == num_buckets_ ||
1056              table_[index_of_first_non_null_] != NULL);
1057       // In practice, the code that led to this point may have already
1058       // determined whether we are inserting into an empty list, a short list,
1059       // or whatever.  But it's probably cheap enough to recompute that here;
1060       // it's likely that we're inserting into an empty or short list.
1061       iterator result;
1062       GOOGLE_DCHECK(find(*KeyPtrFromNodePtr(node)) == end());
1063       if (TableEntryIsEmpty(b)) {
1064         result = InsertUniqueInList(b, node);
1065       } else if (TableEntryIsNonEmptyList(b)) {
1066         if (GOOGLE_PREDICT_FALSE(TableEntryIsTooLong(b))) {
1067           TreeConvert(b);
1068           result = InsertUniqueInTree(b, node);
1069           GOOGLE_DCHECK_EQ(result.bucket_index_, b & ~static_cast<size_type>(1));
1070         } else {
1071           // Insert into a pre-existing list.  This case cannot modify
1072           // index_of_first_non_null_, so we skip the code to update it.
1073           return InsertUniqueInList(b, node);
1074         }
1075       } else {
1076         // Insert into a pre-existing tree.  This case cannot modify
1077         // index_of_first_non_null_, so we skip the code to update it.
1078         return InsertUniqueInTree(b, node);
1079       }
1080       index_of_first_non_null_ =
1081           std::min(index_of_first_non_null_, result.bucket_index_);
1082       return result;
1083     }
1084 
1085     // Helper for InsertUnique.  Handles the case where bucket b is a
1086     // not-too-long linked list.
InsertUniqueInList(size_type b,Node * node)1087     iterator InsertUniqueInList(size_type b, Node* node) {
1088       node->next = static_cast<Node*>(table_[b]);
1089       table_[b] = static_cast<void*>(node);
1090       return iterator(node, this, b);
1091     }
1092 
1093     // Helper for InsertUnique.  Handles the case where bucket b points to a
1094     // Tree.
InsertUniqueInTree(size_type b,Node * node)1095     iterator InsertUniqueInTree(size_type b, Node* node) {
1096       GOOGLE_DCHECK_EQ(table_[b], table_[b ^ 1]);
1097       // Maintain the invariant that node->next is NULL for all Nodes in Trees.
1098       node->next = NULL;
1099       return iterator(static_cast<Tree*>(table_[b])
1100                       ->insert(KeyPtrFromNodePtr(node))
1101                       .first,
1102                       this, b & ~static_cast<size_t>(1));
1103     }
1104 
1105     // Returns whether it did resize.  Currently this is only used when
1106     // num_elements_ increases, though it could be used in other situations.
1107     // It checks for load too low as well as load too high: because any number
1108     // of erases can occur between inserts, the load could be as low as 0 here.
1109     // Resizing to a lower size is not always helpful, but failing to do so can
1110     // destroy the expected big-O bounds for some operations. By having the
1111     // policy that sometimes we resize down as well as up, clients can easily
1112     // keep O(size()) = O(number of buckets) if they want that.
ResizeIfLoadIsOutOfRange(size_type new_size)1113     bool ResizeIfLoadIsOutOfRange(size_type new_size) {
1114       const size_type kMaxMapLoadTimes16 = 12;  // controls RAM vs CPU tradeoff
1115       const size_type hi_cutoff = num_buckets_ * kMaxMapLoadTimes16 / 16;
1116       const size_type lo_cutoff = hi_cutoff / 4;
1117       // We don't care how many elements are in trees.  If a lot are,
1118       // we may resize even though there are many empty buckets.  In
1119       // practice, this seems fine.
1120       if (GOOGLE_PREDICT_FALSE(new_size >= hi_cutoff)) {
1121         if (num_buckets_ <= max_size() / 2) {
1122           Resize(num_buckets_ * 2);
1123           return true;
1124         }
1125       } else if (GOOGLE_PREDICT_FALSE(new_size <= lo_cutoff &&
1126                                num_buckets_ > kMinTableSize)) {
1127         size_type lg2_of_size_reduction_factor = 1;
1128         // It's possible we want to shrink a lot here... size() could even be 0.
1129         // So, estimate how much to shrink by making sure we don't shrink so
1130         // much that we would need to grow the table after a few inserts.
1131         const size_type hypothetical_size = new_size * 5 / 4 + 1;
1132         while ((hypothetical_size << lg2_of_size_reduction_factor) <
1133                hi_cutoff) {
1134           ++lg2_of_size_reduction_factor;
1135         }
1136         size_type new_num_buckets = std::max<size_type>(
1137             kMinTableSize, num_buckets_ >> lg2_of_size_reduction_factor);
1138         if (new_num_buckets != num_buckets_) {
1139           Resize(new_num_buckets);
1140           return true;
1141         }
1142       }
1143       return false;
1144     }
1145 
1146     // Resize to the given number of buckets.
Resize(size_t new_num_buckets)1147     void Resize(size_t new_num_buckets) {
1148       GOOGLE_DCHECK_GE(new_num_buckets, kMinTableSize);
1149       void** const old_table = table_;
1150       const size_type old_table_size = num_buckets_;
1151       num_buckets_ = new_num_buckets;
1152       table_ = CreateEmptyTable(num_buckets_);
1153       const size_type start = index_of_first_non_null_;
1154       index_of_first_non_null_ = num_buckets_;
1155       for (size_type i = start; i < old_table_size; i++) {
1156         if (TableEntryIsNonEmptyList(old_table, i)) {
1157           TransferList(old_table, i);
1158         } else if (TableEntryIsTree(old_table, i)) {
1159           TransferTree(old_table, i++);
1160         }
1161       }
1162       Dealloc<void*>(old_table, old_table_size);
1163     }
1164 
TransferList(void * const * table,size_type index)1165     void TransferList(void* const* table, size_type index) {
1166       Node* node = static_cast<Node*>(table[index]);
1167       do {
1168         Node* next = node->next;
1169         InsertUnique(BucketNumber(*KeyPtrFromNodePtr(node)), node);
1170         node = next;
1171       } while (node != NULL);
1172     }
1173 
TransferTree(void * const * table,size_type index)1174     void TransferTree(void* const* table, size_type index) {
1175       Tree* tree = static_cast<Tree*>(table[index]);
1176       typename Tree::iterator tree_it = tree->begin();
1177       do {
1178         Node* node = NodePtrFromKeyPtr(*tree_it);
1179         InsertUnique(BucketNumber(**tree_it), node);
1180       } while (++tree_it != tree->end());
1181       DestroyTree(tree);
1182     }
1183 
EraseFromLinkedList(Node * item,Node * head)1184     Node* EraseFromLinkedList(Node* item, Node* head) {
1185       if (head == item) {
1186         return head->next;
1187       } else {
1188         head->next = EraseFromLinkedList(item, head->next);
1189         return head;
1190       }
1191     }
1192 
TableEntryIsEmpty(size_type b)1193     bool TableEntryIsEmpty(size_type b) const {
1194       return TableEntryIsEmpty(table_, b);
1195     }
TableEntryIsNonEmptyList(size_type b)1196     bool TableEntryIsNonEmptyList(size_type b) const {
1197       return TableEntryIsNonEmptyList(table_, b);
1198     }
TableEntryIsTree(size_type b)1199     bool TableEntryIsTree(size_type b) const {
1200       return TableEntryIsTree(table_, b);
1201     }
TableEntryIsList(size_type b)1202     bool TableEntryIsList(size_type b) const {
1203       return TableEntryIsList(table_, b);
1204     }
TableEntryIsEmpty(void * const * table,size_type b)1205     static bool TableEntryIsEmpty(void* const* table, size_type b) {
1206       return table[b] == NULL;
1207     }
TableEntryIsNonEmptyList(void * const * table,size_type b)1208     static bool TableEntryIsNonEmptyList(void* const* table, size_type b) {
1209       return table[b] != NULL && table[b] != table[b ^ 1];
1210     }
TableEntryIsTree(void * const * table,size_type b)1211     static bool TableEntryIsTree(void* const* table, size_type b) {
1212       return !TableEntryIsEmpty(table, b) &&
1213           !TableEntryIsNonEmptyList(table, b);
1214     }
TableEntryIsList(void * const * table,size_type b)1215     static bool TableEntryIsList(void* const* table, size_type b) {
1216       return !TableEntryIsTree(table, b);
1217     }
1218 
TreeConvert(size_type b)1219     void TreeConvert(size_type b) {
1220       GOOGLE_DCHECK(!TableEntryIsTree(b) && !TableEntryIsTree(b ^ 1));
1221       typename Allocator::template rebind<Tree>::other tree_allocator(alloc_);
1222       Tree* tree = tree_allocator.allocate(1);
1223       // We want to use the three-arg form of construct, if it exists, but we
1224       // create a temporary and use the two-arg construct that's known to exist.
1225       // It's clunky, but the compiler should be able to generate more-or-less
1226       // the same code.
1227       tree_allocator.construct(tree,
1228                                Tree(KeyCompare(), KeyPtrAllocator(alloc_)));
1229       // Now the tree is ready to use.
1230       size_type count = CopyListToTree(b, tree) + CopyListToTree(b ^ 1, tree);
1231       GOOGLE_DCHECK_EQ(count, tree->size());
1232       table_[b] = table_[b ^ 1] = static_cast<void*>(tree);
1233     }
1234 
1235     // Copy a linked list in the given bucket to a tree.
1236     // Returns the number of things it copied.
CopyListToTree(size_type b,Tree * tree)1237     size_type CopyListToTree(size_type b, Tree* tree) {
1238       size_type count = 0;
1239       Node* node = static_cast<Node*>(table_[b]);
1240       while (node != NULL) {
1241         tree->insert(KeyPtrFromNodePtr(node));
1242         ++count;
1243         Node* next = node->next;
1244         node->next = NULL;
1245         node = next;
1246       }
1247       return count;
1248     }
1249 
1250     // Return whether table_[b] is a linked list that seems awfully long.
1251     // Requires table_[b] to point to a non-empty linked list.
TableEntryIsTooLong(size_type b)1252     bool TableEntryIsTooLong(size_type b) {
1253       const size_type kMaxLength = 8;
1254       size_type count = 0;
1255       Node* node = static_cast<Node*>(table_[b]);
1256       do {
1257         ++count;
1258         node = node->next;
1259       } while (node != NULL);
1260       // Invariant: no linked list ever is more than kMaxLength in length.
1261       GOOGLE_DCHECK_LE(count, kMaxLength);
1262       return count >= kMaxLength;
1263     }
1264 
BucketNumber(const Key & k)1265     size_type BucketNumber(const Key& k) const {
1266       // We inherit from hasher, so one-arg operator() provides a hash function.
1267       size_type h = (*const_cast<InnerMap*>(this))(k);
1268       // To help prevent people from making assumptions about the hash function,
1269       // we use the seed differently depending on NDEBUG.  The default hash
1270       // function, the seeding, etc., are all likely to change in the future.
1271 #ifndef NDEBUG
1272       return (h * (seed_ | 1)) & (num_buckets_ - 1);
1273 #else
1274       return (h + seed_) & (num_buckets_ - 1);
1275 #endif
1276     }
1277 
IsMatch(const Key & k0,const Key & k1)1278     bool IsMatch(const Key& k0, const Key& k1) const {
1279       return std::equal_to<Key>()(k0, k1);
1280     }
1281 
1282     // Return a power of two no less than max(kMinTableSize, n).
1283     // Assumes either n < kMinTableSize or n is a power of two.
TableSize(size_type n)1284     size_type TableSize(size_type n) {
1285       return n < kMinTableSize ? kMinTableSize : n;
1286     }
1287 
1288     // Use alloc_ to allocate an array of n objects of type U.
1289     template <typename U>
Alloc(size_type n)1290     U* Alloc(size_type n) {
1291       typedef typename Allocator::template rebind<U>::other alloc_type;
1292       return alloc_type(alloc_).allocate(n);
1293     }
1294 
1295     // Use alloc_ to deallocate an array of n objects of type U.
1296     template <typename U>
Dealloc(U * t,size_type n)1297     void Dealloc(U* t, size_type n) {
1298       typedef typename Allocator::template rebind<U>::other alloc_type;
1299       alloc_type(alloc_).deallocate(t, n);
1300     }
1301 
DestroyNode(Node * node)1302     void DestroyNode(Node* node) {
1303       alloc_.destroy(&node->kv);
1304       Dealloc<Node>(node, 1);
1305     }
1306 
DestroyTree(Tree * tree)1307     void DestroyTree(Tree* tree) {
1308       typename Allocator::template rebind<Tree>::other tree_allocator(alloc_);
1309       tree_allocator.destroy(tree);
1310       tree_allocator.deallocate(tree, 1);
1311     }
1312 
CreateEmptyTable(size_type n)1313     void** CreateEmptyTable(size_type n) {
1314       GOOGLE_DCHECK(n >= kMinTableSize);
1315       GOOGLE_DCHECK_EQ(n & (n - 1), 0);
1316       void** result = Alloc<void*>(n);
1317       memset(result, 0, n * sizeof(result[0]));
1318       return result;
1319     }
1320 
1321     // Return a randomish value.
Seed()1322     size_type Seed() const {
1323       // random_device can throw, so avoid it unless we are compiling with
1324       // exceptions enabled.
1325 #if __cpp_exceptions && LANG_CXX11
1326       try {
1327         std::random_device rd;
1328         std::knuth_b knuth(rd());
1329         std::uniform_int_distribution<size_type> u;
1330         return u(knuth);
1331       } catch (...) { }
1332 #endif
1333       size_type s = static_cast<size_type>(reinterpret_cast<uintptr_t>(this));
1334 #if defined(__x86_64__) && defined(__GNUC__)
1335       uint32 hi, lo;
1336       asm("rdtsc" : "=a" (lo), "=d" (hi));
1337       s += ((static_cast<uint64>(hi) << 32) | lo);
1338 #endif
1339       return s;
1340     }
1341 
1342     size_type num_elements_;
1343     size_type num_buckets_;
1344     size_type seed_;
1345     size_type index_of_first_non_null_;
1346     void** table_;  // an array with num_buckets_ entries
1347     Allocator alloc_;
1348     GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(InnerMap);
1349   };  // end of class InnerMap
1350 
1351   typedef hash_map<Key, value_type*, hash<Key>, equal_to<Key>,
1352                    MapAllocator<std::pair<const Key, MapPair<Key, T>*> > >
1353       DeprecatedInnerMap;
1354 
1355  public:
1356   // Iterators
1357   class iterator_base {
1358    public:
1359     // We support "old style" and "new style" iterators for now. This is
1360     // temporary.  Also, for "iterator()" we have an unknown category.
1361     // TODO(gpike): get rid of this.
1362     enum IteratorStyle { kUnknown, kOld, kNew };
iterator_base(IteratorStyle style)1363     explicit iterator_base(IteratorStyle style) : iterator_style_(style) {}
1364 
OldStyle()1365     bool OldStyle() const {
1366       GOOGLE_DCHECK_NE(iterator_style_, kUnknown);
1367       return iterator_style_ == kOld;
1368     }
UnknownStyle()1369     bool UnknownStyle() const {
1370       return iterator_style_ == kUnknown;
1371     }
SameStyle(const iterator_base & other)1372     bool SameStyle(const iterator_base& other) const {
1373       return iterator_style_ == other.iterator_style_;
1374     }
1375 
1376    private:
1377     IteratorStyle iterator_style_;
1378   };
1379 
1380   class const_iterator
1381       : private iterator_base,
1382         public std::iterator<std::forward_iterator_tag, value_type, ptrdiff_t,
1383                              const value_type*, const value_type&> {
1384     typedef typename InnerMap::const_iterator InnerIt;
1385     typedef typename DeprecatedInnerMap::const_iterator DeprecatedInnerIt;
1386 
1387    public:
const_iterator()1388     const_iterator() : iterator_base(iterator_base::kUnknown) {}
const_iterator(const DeprecatedInnerIt & dit)1389     explicit const_iterator(const DeprecatedInnerIt& dit)
1390         : iterator_base(iterator_base::kOld), dit_(dit) {}
const_iterator(const InnerIt & it)1391     explicit const_iterator(const InnerIt& it)
1392         : iterator_base(iterator_base::kNew), it_(it) {}
1393 
const_iterator(const const_iterator & other)1394     const_iterator(const const_iterator& other)
1395         : iterator_base(other), it_(other.it_), dit_(other.dit_) {}
1396 
1397     const_reference operator*() const {
1398       return this->OldStyle() ? *dit_->second : *it_->value();
1399     }
1400     const_pointer operator->() const { return &(operator*()); }
1401 
1402     const_iterator& operator++() {
1403       if (this->OldStyle())
1404         ++dit_;
1405       else
1406         ++it_;
1407       return *this;
1408     }
1409     const_iterator operator++(int) {
1410       return this->OldStyle() ? const_iterator(dit_++) : const_iterator(it_++);
1411     }
1412 
1413     friend bool operator==(const const_iterator& a, const const_iterator& b) {
1414       if (!a.SameStyle(b)) return false;
1415       if (a.UnknownStyle()) return true;
1416       return a.OldStyle() ? (a.dit_ == b.dit_) : (a.it_ == b.it_);
1417     }
1418     friend bool operator!=(const const_iterator& a, const const_iterator& b) {
1419       return !(a == b);
1420     }
1421 
1422    private:
1423     InnerIt it_;
1424     DeprecatedInnerIt dit_;
1425   };
1426 
1427   class iterator : private iterator_base,
1428                    public std::iterator<std::forward_iterator_tag, value_type> {
1429     typedef typename InnerMap::iterator InnerIt;
1430     typedef typename DeprecatedInnerMap::iterator DeprecatedInnerIt;
1431 
1432    public:
iterator()1433     iterator() : iterator_base(iterator_base::kUnknown) {}
iterator(const DeprecatedInnerIt & dit)1434     explicit iterator(const DeprecatedInnerIt& dit)
1435         : iterator_base(iterator_base::kOld), dit_(dit) {}
iterator(const InnerIt & it)1436     explicit iterator(const InnerIt& it)
1437         : iterator_base(iterator_base::kNew), it_(it) {}
1438 
1439     reference operator*() const {
1440       return this->OldStyle() ? *dit_->second : *it_->value();
1441     }
1442     pointer operator->() const { return &(operator*()); }
1443 
1444     iterator& operator++() {
1445       if (this->OldStyle())
1446         ++dit_;
1447       else
1448         ++it_;
1449       return *this;
1450     }
1451     iterator operator++(int) {
1452       return this->OldStyle() ? iterator(dit_++) : iterator(it_++);
1453     }
1454 
1455     // Allow implicit conversion to const_iterator.
const_iterator()1456     operator const_iterator() const {
1457       return this->OldStyle() ?
1458           const_iterator(typename DeprecatedInnerMap::const_iterator(dit_)) :
1459           const_iterator(typename InnerMap::const_iterator(it_));
1460     }
1461 
1462     friend bool operator==(const iterator& a, const iterator& b) {
1463       if (!a.SameStyle(b)) return false;
1464       if (a.UnknownStyle()) return true;
1465       return a.OldStyle() ? a.dit_ == b.dit_ : a.it_ == b.it_;
1466     }
1467     friend bool operator!=(const iterator& a, const iterator& b) {
1468       return !(a == b);
1469     }
1470 
1471    private:
1472     friend class Map;
1473 
1474     InnerIt it_;
1475     DeprecatedInnerIt dit_;
1476   };
1477 
begin()1478   iterator begin() {
1479     return old_style_ ? iterator(deprecated_elements_->begin())
1480                       : iterator(elements_->begin());
1481   }
end()1482   iterator end() {
1483     return old_style_ ? iterator(deprecated_elements_->end())
1484                       : iterator(elements_->end());
1485   }
begin()1486   const_iterator begin() const {
1487     return old_style_ ? const_iterator(deprecated_elements_->begin())
1488                       : const_iterator(iterator(elements_->begin()));
1489   }
end()1490   const_iterator end() const {
1491     return old_style_ ? const_iterator(deprecated_elements_->end())
1492                       : const_iterator(iterator(elements_->end()));
1493   }
cbegin()1494   const_iterator cbegin() const { return begin(); }
cend()1495   const_iterator cend() const { return end(); }
1496 
1497   // Capacity
size()1498   size_type size() const {
1499     return old_style_ ? deprecated_elements_->size() : elements_->size();
1500   }
empty()1501   bool empty() const { return size() == 0; }
1502 
1503   // Element access
1504   T& operator[](const key_type& key) {
1505     value_type** value =
1506         old_style_ ? &(*deprecated_elements_)[key] : &(*elements_)[key];
1507     if (*value == NULL) {
1508       *value = CreateValueTypeInternal(key);
1509       internal::MapValueInitializer<google::protobuf::is_proto_enum<T>::value,
1510                                     T>::Initialize((*value)->second,
1511                                                    default_enum_value_);
1512     }
1513     return (*value)->second;
1514   }
at(const key_type & key)1515   const T& at(const key_type& key) const {
1516     const_iterator it = find(key);
1517     GOOGLE_CHECK(it != end());
1518     return it->second;
1519   }
at(const key_type & key)1520   T& at(const key_type& key) {
1521     iterator it = find(key);
1522     GOOGLE_CHECK(it != end());
1523     return it->second;
1524   }
1525 
1526   // Lookup
count(const key_type & key)1527   size_type count(const key_type& key) const {
1528     if (find(key) != end()) assert(key == find(key)->first);
1529     return find(key) == end() ? 0 : 1;
1530   }
find(const key_type & key)1531   const_iterator find(const key_type& key) const {
1532     return old_style_ ? const_iterator(deprecated_elements_->find(key))
1533         : const_iterator(iterator(elements_->find(key)));
1534   }
find(const key_type & key)1535   iterator find(const key_type& key) {
1536     return old_style_ ? iterator(deprecated_elements_->find(key))
1537                       : iterator(elements_->find(key));
1538   }
equal_range(const key_type & key)1539   std::pair<const_iterator, const_iterator> equal_range(
1540       const key_type& key) const {
1541     const_iterator it = find(key);
1542     if (it == end()) {
1543       return std::pair<const_iterator, const_iterator>(it, it);
1544     } else {
1545       const_iterator begin = it++;
1546       return std::pair<const_iterator, const_iterator>(begin, it);
1547     }
1548   }
equal_range(const key_type & key)1549   std::pair<iterator, iterator> equal_range(const key_type& key) {
1550     iterator it = find(key);
1551     if (it == end()) {
1552       return std::pair<iterator, iterator>(it, it);
1553     } else {
1554       iterator begin = it++;
1555       return std::pair<iterator, iterator>(begin, it);
1556     }
1557   }
1558 
1559   // insert
insert(const value_type & value)1560   std::pair<iterator, bool> insert(const value_type& value) {
1561     if (old_style_) {
1562       iterator it = find(value.first);
1563       if (it != end()) {
1564         return std::pair<iterator, bool>(it, false);
1565       } else {
1566         return std::pair<iterator, bool>(
1567             iterator(deprecated_elements_->insert(std::pair<Key, value_type*>(
1568                 value.first, CreateValueTypeInternal(value))).first), true);
1569       }
1570     } else {
1571       std::pair<typename InnerMap::iterator, bool> p =
1572           elements_->insert(value.first);
1573       if (p.second) {
1574         p.first->value() = CreateValueTypeInternal(value);
1575       }
1576       return std::pair<iterator, bool>(iterator(p.first), p.second);
1577     }
1578   }
1579   template <class InputIt>
insert(InputIt first,InputIt last)1580   void insert(InputIt first, InputIt last) {
1581     for (InputIt it = first; it != last; ++it) {
1582       iterator exist_it = find(it->first);
1583       if (exist_it == end()) {
1584         operator[](it->first) = it->second;
1585       }
1586     }
1587   }
1588 
1589   // Erase and clear
erase(const key_type & key)1590   size_type erase(const key_type& key) {
1591     iterator it = find(key);
1592     if (it == end()) {
1593       return 0;
1594     } else {
1595       erase(it);
1596       return 1;
1597     }
1598   }
erase(iterator pos)1599   iterator erase(iterator pos) {
1600     if (arena_ == NULL) delete pos.operator->();
1601     iterator i = pos++;
1602     if (old_style_)
1603       deprecated_elements_->erase(i.dit_);
1604     else
1605       elements_->erase(i.it_);
1606     return pos;
1607   }
erase(iterator first,iterator last)1608   void erase(iterator first, iterator last) {
1609     while (first != last) {
1610       first = erase(first);
1611     }
1612   }
clear()1613   void clear() { erase(begin(), end()); }
1614 
1615   // Assign
1616   Map& operator=(const Map& other) {
1617     if (this != &other) {
1618       clear();
1619       insert(other.begin(), other.end());
1620     }
1621     return *this;
1622   }
1623 
swap(Map & other)1624   void swap(Map& other) {
1625     if (arena_ == other.arena_ && old_style_ == other.old_style_) {
1626       std::swap(default_enum_value_, other.default_enum_value_);
1627       if (old_style_) {
1628         std::swap(deprecated_elements_, other.deprecated_elements_);
1629       } else {
1630         std::swap(elements_, other.elements_);
1631       }
1632     } else {
1633       // TODO(zuguang): optimize this. The temporary copy can be allocated
1634       // in the same arena as the other message, and the "other = copy" can
1635       // be replaced with the fast-path swap above.
1636       Map copy = *this;
1637       *this = other;
1638       other = copy;
1639     }
1640   }
1641 
1642   // Access to hasher.  Currently this returns a copy, but it may
1643   // be modified to return a const reference in the future.
hash_function()1644   hasher hash_function() const {
1645     return old_style_ ? deprecated_elements_->hash_function()
1646                       : elements_->hash_function();
1647   }
1648 
1649  private:
1650   // Set default enum value only for proto2 map field whose value is enum type.
SetDefaultEnumValue(int default_enum_value)1651   void SetDefaultEnumValue(int default_enum_value) {
1652     default_enum_value_ = default_enum_value;
1653   }
1654 
CreateValueTypeInternal(const Key & key)1655   value_type* CreateValueTypeInternal(const Key& key) {
1656     if (arena_ == NULL) {
1657       return new value_type(key);
1658     } else {
1659       value_type* value = reinterpret_cast<value_type*>(
1660           Arena::CreateArray<uint8>(arena_, sizeof(value_type)));
1661       Arena::CreateInArenaStorage(const_cast<Key*>(&value->first), arena_);
1662       Arena::CreateInArenaStorage(&value->second, arena_);
1663       const_cast<Key&>(value->first) = key;
1664       return value;
1665     }
1666   }
1667 
CreateValueTypeInternal(const value_type & value)1668   value_type* CreateValueTypeInternal(const value_type& value) {
1669     if (arena_ == NULL) {
1670       return new value_type(value);
1671     } else {
1672       value_type* p = reinterpret_cast<value_type*>(
1673           Arena::CreateArray<uint8>(arena_, sizeof(value_type)));
1674       Arena::CreateInArenaStorage(const_cast<Key*>(&p->first), arena_);
1675       Arena::CreateInArenaStorage(&p->second, arena_);
1676       const_cast<Key&>(p->first) = value.first;
1677       p->second = value.second;
1678       return p;
1679     }
1680   }
1681 
1682   Arena* arena_;
1683   int default_enum_value_;
1684   // The following is a tagged union because we support two map styles
1685   // for now.
1686   // TODO(gpike): get rid of the old style.
1687   const bool old_style_;
1688   union {
1689     InnerMap* elements_;
1690     DeprecatedInnerMap* deprecated_elements_;
1691   };
1692 
1693   friend class ::google::protobuf::Arena;
1694   typedef void InternalArenaConstructable_;
1695   typedef void DestructorSkippable_;
1696   template <typename K, typename V,
1697             internal::WireFormatLite::FieldType key_wire_type,
1698             internal::WireFormatLite::FieldType value_wire_type,
1699             int default_enum_value>
1700   friend class internal::MapFieldLite;
1701 };
1702 
1703 }  // namespace protobuf
1704 }  // namespace google
1705 
1706 GOOGLE_PROTOBUF_HASH_NAMESPACE_DECLARATION_START
1707 template<>
1708 struct hash<google::protobuf::MapKey> {
1709   size_t
1710   operator()(const google::protobuf::MapKey& map_key) const {
1711     switch (map_key.type()) {
1712       case google::protobuf::FieldDescriptor::CPPTYPE_DOUBLE:
1713       case google::protobuf::FieldDescriptor::CPPTYPE_FLOAT:
1714       case google::protobuf::FieldDescriptor::CPPTYPE_ENUM:
1715       case google::protobuf::FieldDescriptor::CPPTYPE_MESSAGE:
1716         GOOGLE_LOG(FATAL) << "Unsupported";
1717         break;
1718       case google::protobuf::FieldDescriptor::CPPTYPE_STRING:
1719         return hash<string>()(map_key.GetStringValue());
1720       case google::protobuf::FieldDescriptor::CPPTYPE_INT64:
1721         return hash< ::google::protobuf::int64>()(map_key.GetInt64Value());
1722       case google::protobuf::FieldDescriptor::CPPTYPE_INT32:
1723         return hash< ::google::protobuf::int32>()(map_key.GetInt32Value());
1724       case google::protobuf::FieldDescriptor::CPPTYPE_UINT64:
1725         return hash< ::google::protobuf::uint64>()(map_key.GetUInt64Value());
1726       case google::protobuf::FieldDescriptor::CPPTYPE_UINT32:
1727         return hash< ::google::protobuf::uint32>()(map_key.GetUInt32Value());
1728       case google::protobuf::FieldDescriptor::CPPTYPE_BOOL:
1729         return hash<bool>()(map_key.GetBoolValue());
1730     }
1731     GOOGLE_LOG(FATAL) << "Can't get here.";
1732     return 0;
1733   }
1734   bool
1735   operator()(const google::protobuf::MapKey& map_key1,
1736              const google::protobuf::MapKey& map_key2) const {
1737     return map_key1 < map_key2;
1738   }
1739 };
1740 GOOGLE_PROTOBUF_HASH_NAMESPACE_DECLARATION_END
1741 
1742 #endif  // GOOGLE_PROTOBUF_MAP_H__
1743