1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc. All rights reserved.
3 //
4 // Use of this source code is governed by a BSD-style
5 // license that can be found in the LICENSE file or at
6 // https://developers.google.com/open-source/licenses/bsd
7
8 #include "google/protobuf/map_field.h"
9
10 #include <atomic>
11 #include <string>
12 #include <utility>
13 #include <vector>
14
15 #include "absl/log/absl_check.h"
16 #include "google/protobuf/map.h"
17 #include "google/protobuf/map_field_inl.h"
18 #include "google/protobuf/port.h"
19 #include "google/protobuf/raw_ptr.h"
20
21 // Must be included last.
22 #include "google/protobuf/port_def.inc"
23
24 namespace google {
25 namespace protobuf {
26 namespace internal {
27
operator ()(const MapKey & value) const28 VariantKey RealKeyToVariantKey<MapKey>::operator()(const MapKey& value) const {
29 switch (value.type()) {
30 case FieldDescriptor::CPPTYPE_STRING:
31 return VariantKey(value.GetStringValue());
32 case FieldDescriptor::CPPTYPE_INT64:
33 return VariantKey(value.GetInt64Value());
34 case FieldDescriptor::CPPTYPE_INT32:
35 return VariantKey(value.GetInt32Value());
36 case FieldDescriptor::CPPTYPE_UINT64:
37 return VariantKey(value.GetUInt64Value());
38 case FieldDescriptor::CPPTYPE_UINT32:
39 return VariantKey(value.GetUInt32Value());
40 case FieldDescriptor::CPPTYPE_BOOL:
41 return VariantKey(static_cast<uint64_t>(value.GetBoolValue()));
42 default:
43 Unreachable();
44 return VariantKey(uint64_t{});
45 }
46 }
47
~MapFieldBase()48 MapFieldBase::~MapFieldBase() {
49 ABSL_DCHECK_EQ(arena(), nullptr);
50 delete maybe_payload();
51 }
52
GetMapImpl(const MapFieldBaseForParse & map,bool is_mutable)53 const UntypedMapBase& MapFieldBase::GetMapImpl(const MapFieldBaseForParse& map,
54 bool is_mutable) {
55 const auto& self = static_cast<const MapFieldBase&>(map);
56 self.SyncMapWithRepeatedField();
57 if (is_mutable) const_cast<MapFieldBase&>(self).SetMapDirty();
58 return self.GetMapRaw();
59 }
60
MapBegin(MapIterator * map_iter) const61 void MapFieldBase::MapBegin(MapIterator* map_iter) const {
62 map_iter->iter_ = GetMap().begin();
63 SetMapIteratorValue(map_iter);
64 }
65
MapEnd(MapIterator * map_iter) const66 void MapFieldBase::MapEnd(MapIterator* map_iter) const {
67 map_iter->iter_ = UntypedMapBase::EndIterator();
68 }
69
EqualIterator(const MapIterator & a,const MapIterator & b) const70 bool MapFieldBase::EqualIterator(const MapIterator& a,
71 const MapIterator& b) const {
72 return a.iter_.Equals(b.iter_);
73 }
74
IncreaseIterator(MapIterator * map_iter) const75 void MapFieldBase::IncreaseIterator(MapIterator* map_iter) const {
76 map_iter->iter_.PlusPlus();
77 SetMapIteratorValue(map_iter);
78 }
79
CopyIterator(MapIterator * this_iter,const MapIterator & that_iter) const80 void MapFieldBase::CopyIterator(MapIterator* this_iter,
81 const MapIterator& that_iter) const {
82 this_iter->iter_ = that_iter.iter_;
83 this_iter->key_.SetType(that_iter.key_.type());
84 // MapValueRef::type() fails when containing data is null. However, if
85 // this_iter points to MapEnd, data can be null.
86 this_iter->value_.SetType(
87 static_cast<FieldDescriptor::CppType>(that_iter.value_.type_));
88 SetMapIteratorValue(this_iter);
89 }
90
GetRepeatedField() const91 const RepeatedPtrFieldBase& MapFieldBase::GetRepeatedField() const {
92 ConstAccess();
93 return SyncRepeatedFieldWithMap(false);
94 }
95
MutableRepeatedField()96 RepeatedPtrFieldBase* MapFieldBase::MutableRepeatedField() {
97 MutableAccess();
98 auto& res = SyncRepeatedFieldWithMap(true);
99 SetRepeatedDirty();
100 return const_cast<RepeatedPtrFieldBase*>(&res);
101 }
102
103 template <typename T>
SwapRelaxed(std::atomic<T> & a,std::atomic<T> & b)104 static void SwapRelaxed(std::atomic<T>& a, std::atomic<T>& b) {
105 auto value_b = b.load(std::memory_order_relaxed);
106 auto value_a = a.load(std::memory_order_relaxed);
107 b.store(value_a, std::memory_order_relaxed);
108 a.store(value_b, std::memory_order_relaxed);
109 }
110
PayloadSlow() const111 MapFieldBase::ReflectionPayload& MapFieldBase::PayloadSlow() const {
112 auto p = payload_.load(std::memory_order_acquire);
113 if (!IsPayload(p)) {
114 auto* arena = ToArena(p);
115 auto* payload = Arena::Create<ReflectionPayload>(arena, arena);
116 auto new_p = ToTaggedPtr(payload);
117 if (payload_.compare_exchange_strong(p, new_p, std::memory_order_acq_rel)) {
118 // We were able to store it.
119 p = new_p;
120 } else {
121 // Someone beat us to it. Throw away the one we made. `p` already contains
122 // the one we want.
123 if (arena == nullptr) delete payload;
124 }
125 }
126 return *ToPayload(p);
127 }
128
SwapImpl(MapFieldBase & lhs,MapFieldBase & rhs)129 void MapFieldBase::SwapImpl(MapFieldBase& lhs, MapFieldBase& rhs) {
130 if (lhs.arena() == rhs.arena()) {
131 lhs.InternalSwap(&rhs);
132 return;
133 }
134 auto* p1 = lhs.maybe_payload();
135 auto* p2 = rhs.maybe_payload();
136 if (p1 == nullptr && p2 == nullptr) return;
137
138 if (p1 == nullptr) p1 = &lhs.payload();
139 if (p2 == nullptr) p2 = &rhs.payload();
140 p1->repeated_field.Swap(&p2->repeated_field);
141 SwapRelaxed(p1->state, p2->state);
142 }
143
UnsafeShallowSwapImpl(MapFieldBase & lhs,MapFieldBase & rhs)144 void MapFieldBase::UnsafeShallowSwapImpl(MapFieldBase& lhs, MapFieldBase& rhs) {
145 ABSL_DCHECK_EQ(lhs.arena(), rhs.arena());
146 lhs.InternalSwap(&rhs);
147 }
148
InternalSwap(MapFieldBase * other)149 void MapFieldBase::InternalSwap(MapFieldBase* other) {
150 SwapRelaxed(payload_, other->payload_);
151 }
152
SpaceUsedExcludingSelfLong() const153 size_t MapFieldBase::SpaceUsedExcludingSelfLong() const {
154 ConstAccess();
155 size_t size = 0;
156 if (auto* p = maybe_payload()) {
157 {
158 absl::MutexLock lock(&p->mutex);
159 size = SpaceUsedExcludingSelfNoLock();
160 }
161 ConstAccess();
162 }
163 return size;
164 }
165
IsMapValid() const166 bool MapFieldBase::IsMapValid() const {
167 ConstAccess();
168 // "Acquire" insures the operation after SyncRepeatedFieldWithMap won't get
169 // executed before state_ is checked.
170 return state() != STATE_MODIFIED_REPEATED;
171 }
172
IsRepeatedFieldValid() const173 bool MapFieldBase::IsRepeatedFieldValid() const {
174 ConstAccess();
175 return state() != STATE_MODIFIED_MAP;
176 }
177
SetMapDirty()178 void MapFieldBase::SetMapDirty() {
179 MutableAccess();
180 // These are called by (non-const) mutator functions. So by our API it's the
181 // callers responsibility to have these calls properly ordered.
182 payload().state.store(STATE_MODIFIED_MAP, std::memory_order_relaxed);
183 }
184
SetRepeatedDirty()185 void MapFieldBase::SetRepeatedDirty() {
186 MutableAccess();
187 // These are called by (non-const) mutator functions. So by our API it's the
188 // callers responsibility to have these calls properly ordered.
189 payload().state.store(STATE_MODIFIED_REPEATED, std::memory_order_relaxed);
190 }
191
SyncRepeatedFieldWithMap(bool for_mutation) const192 const RepeatedPtrFieldBase& MapFieldBase::SyncRepeatedFieldWithMap(
193 bool for_mutation) const {
194 ConstAccess();
195 if (state() == STATE_MODIFIED_MAP) {
196 auto* p = maybe_payload();
197 if (p == nullptr) {
198 // If we have no payload, and we do not want to mutate the object, and the
199 // map is empty, then do nothing.
200 // This prevents modifying global default instances which might be in ro
201 // memory.
202 if (!for_mutation && GetMapRaw().empty()) {
203 return *RawPtr<const RepeatedPtrFieldBase>();
204 }
205 p = &payload();
206 }
207
208 {
209 absl::MutexLock lock(&p->mutex);
210 // Double check state, because another thread may have seen the same
211 // state and done the synchronization before the current thread.
212 if (p->state.load(std::memory_order_relaxed) == STATE_MODIFIED_MAP) {
213 const_cast<MapFieldBase*>(this)->SyncRepeatedFieldWithMapNoLock();
214 p->state.store(CLEAN, std::memory_order_release);
215 }
216 }
217 ConstAccess();
218 return reinterpret_cast<const RepeatedPtrFieldBase&>(p->repeated_field);
219 }
220 return reinterpret_cast<const RepeatedPtrFieldBase&>(
221 payload().repeated_field);
222 }
223
SyncRepeatedFieldWithMapNoLock()224 void MapFieldBase::SyncRepeatedFieldWithMapNoLock() {
225 const Message* prototype = GetPrototype();
226 const Reflection* reflection = prototype->GetReflection();
227 const Descriptor* descriptor = prototype->GetDescriptor();
228 const FieldDescriptor* key_des = descriptor->map_key();
229 const FieldDescriptor* val_des = descriptor->map_value();
230
231 RepeatedPtrField<Message>& rep = payload().repeated_field;
232 rep.Clear();
233
234 MapIterator it(this, descriptor);
235 MapIterator end(this, descriptor);
236
237 it.iter_ = GetMapRaw().begin();
238 SetMapIteratorValue(&it);
239 end.iter_ = UntypedMapBase::EndIterator();
240
241 for (; !EqualIterator(it, end); IncreaseIterator(&it)) {
242 Message* new_entry = prototype->New(arena());
243 rep.AddAllocated(new_entry);
244 const MapKey& map_key = it.GetKey();
245 switch (key_des->cpp_type()) {
246 case FieldDescriptor::CPPTYPE_STRING:
247 reflection->SetString(new_entry, key_des,
248 std::string(map_key.GetStringValue()));
249 break;
250 case FieldDescriptor::CPPTYPE_INT64:
251 reflection->SetInt64(new_entry, key_des, map_key.GetInt64Value());
252 break;
253 case FieldDescriptor::CPPTYPE_INT32:
254 reflection->SetInt32(new_entry, key_des, map_key.GetInt32Value());
255 break;
256 case FieldDescriptor::CPPTYPE_UINT64:
257 reflection->SetUInt64(new_entry, key_des, map_key.GetUInt64Value());
258 break;
259 case FieldDescriptor::CPPTYPE_UINT32:
260 reflection->SetUInt32(new_entry, key_des, map_key.GetUInt32Value());
261 break;
262 case FieldDescriptor::CPPTYPE_BOOL:
263 reflection->SetBool(new_entry, key_des, map_key.GetBoolValue());
264 break;
265 default:
266 Unreachable();
267 }
268
269 const MapValueRef& map_val = it.GetValueRef();
270 switch (val_des->cpp_type()) {
271 case FieldDescriptor::CPPTYPE_STRING:
272 reflection->SetString(new_entry, val_des, map_val.GetStringValue());
273 break;
274 case FieldDescriptor::CPPTYPE_INT64:
275 reflection->SetInt64(new_entry, val_des, map_val.GetInt64Value());
276 break;
277 case FieldDescriptor::CPPTYPE_INT32:
278 reflection->SetInt32(new_entry, val_des, map_val.GetInt32Value());
279 break;
280 case FieldDescriptor::CPPTYPE_UINT64:
281 reflection->SetUInt64(new_entry, val_des, map_val.GetUInt64Value());
282 break;
283 case FieldDescriptor::CPPTYPE_UINT32:
284 reflection->SetUInt32(new_entry, val_des, map_val.GetUInt32Value());
285 break;
286 case FieldDescriptor::CPPTYPE_BOOL:
287 reflection->SetBool(new_entry, val_des, map_val.GetBoolValue());
288 break;
289 case FieldDescriptor::CPPTYPE_DOUBLE:
290 reflection->SetDouble(new_entry, val_des, map_val.GetDoubleValue());
291 break;
292 case FieldDescriptor::CPPTYPE_FLOAT:
293 reflection->SetFloat(new_entry, val_des, map_val.GetFloatValue());
294 break;
295 case FieldDescriptor::CPPTYPE_ENUM:
296 reflection->SetEnumValue(new_entry, val_des, map_val.GetEnumValue());
297 break;
298 case FieldDescriptor::CPPTYPE_MESSAGE: {
299 const Message& message = map_val.GetMessageValue();
300 reflection->MutableMessage(new_entry, val_des)->CopyFrom(message);
301 break;
302 }
303 }
304 }
305 }
306
SyncMapWithRepeatedField() const307 void MapFieldBase::SyncMapWithRepeatedField() const {
308 ConstAccess();
309 // acquire here matches with release below to ensure that we can only see a
310 // value of CLEAN after all previous changes have been synced.
311 if (state() == STATE_MODIFIED_REPEATED) {
312 auto& p = payload();
313 {
314 absl::MutexLock lock(&p.mutex);
315 // Double check state, because another thread may have seen the same state
316 // and done the synchronization before the current thread.
317 if (p.state.load(std::memory_order_relaxed) == STATE_MODIFIED_REPEATED) {
318 const_cast<MapFieldBase*>(this)->SyncMapWithRepeatedFieldNoLock();
319 p.state.store(CLEAN, std::memory_order_release);
320 }
321 }
322 ConstAccess();
323 }
324 }
325
SyncMapWithRepeatedFieldNoLock()326 void MapFieldBase::SyncMapWithRepeatedFieldNoLock() {
327 ClearMapNoSync();
328
329 RepeatedPtrField<Message>& rep = payload().repeated_field;
330
331 if (rep.empty()) return;
332
333 const Message* prototype = &rep[0];
334 const Reflection* reflection = prototype->GetReflection();
335 const Descriptor* descriptor = prototype->GetDescriptor();
336 const FieldDescriptor* key_des = descriptor->map_key();
337 const FieldDescriptor* val_des = descriptor->map_value();
338
339 for (const Message& elem : rep) {
340 // MapKey type will be set later.
341 Reflection::ScratchSpace map_key_scratch_space;
342 MapKey map_key;
343 switch (key_des->cpp_type()) {
344 case FieldDescriptor::CPPTYPE_STRING:
345 map_key.SetStringValue(
346 reflection->GetStringView(elem, key_des, map_key_scratch_space));
347 break;
348 case FieldDescriptor::CPPTYPE_INT64:
349 map_key.SetInt64Value(reflection->GetInt64(elem, key_des));
350 break;
351 case FieldDescriptor::CPPTYPE_INT32:
352 map_key.SetInt32Value(reflection->GetInt32(elem, key_des));
353 break;
354 case FieldDescriptor::CPPTYPE_UINT64:
355 map_key.SetUInt64Value(reflection->GetUInt64(elem, key_des));
356 break;
357 case FieldDescriptor::CPPTYPE_UINT32:
358 map_key.SetUInt32Value(reflection->GetUInt32(elem, key_des));
359 break;
360 case FieldDescriptor::CPPTYPE_BOOL:
361 map_key.SetBoolValue(reflection->GetBool(elem, key_des));
362 break;
363 default:
364 Unreachable();
365 }
366
367 MapValueRef map_val;
368 map_val.SetType(val_des->cpp_type());
369 InsertOrLookupMapValueNoSync(map_key, &map_val);
370
371 switch (val_des->cpp_type()) {
372 #define HANDLE_TYPE(CPPTYPE, METHOD) \
373 case FieldDescriptor::CPPTYPE_##CPPTYPE: \
374 map_val.Set##METHOD##Value(reflection->Get##METHOD(elem, val_des)); \
375 break;
376 HANDLE_TYPE(INT32, Int32);
377 HANDLE_TYPE(INT64, Int64);
378 HANDLE_TYPE(UINT32, UInt32);
379 HANDLE_TYPE(UINT64, UInt64);
380 HANDLE_TYPE(DOUBLE, Double);
381 HANDLE_TYPE(FLOAT, Float);
382 HANDLE_TYPE(BOOL, Bool);
383 HANDLE_TYPE(STRING, String);
384 #undef HANDLE_TYPE
385 case FieldDescriptor::CPPTYPE_ENUM:
386 map_val.SetEnumValue(reflection->GetEnumValue(elem, val_des));
387 break;
388 case FieldDescriptor::CPPTYPE_MESSAGE: {
389 map_val.MutableMessageValue()->CopyFrom(
390 reflection->GetMessage(elem, val_des));
391 break;
392 }
393 }
394 }
395 }
396
Clear()397 void MapFieldBase::Clear() {
398 if (ReflectionPayload* p = maybe_payload()) {
399 p->repeated_field.Clear();
400 }
401
402 ClearMapNoSync();
403 // Data in map and repeated field are both empty, but we can't set status
404 // CLEAN. Because clear is a generated API, we cannot invalidate previous
405 // reference to map.
406 SetMapDirty();
407 }
408
size() const409 int MapFieldBase::size() const { return GetMap().size(); }
410
InsertOrLookupMapValue(const MapKey & map_key,MapValueRef * val)411 bool MapFieldBase::InsertOrLookupMapValue(const MapKey& map_key,
412 MapValueRef* val) {
413 SyncMapWithRepeatedField();
414 SetMapDirty();
415 return InsertOrLookupMapValueNoSync(map_key, val);
416 }
417
418 // ------------------DynamicMapField------------------
DynamicMapField(const Message * default_entry)419 DynamicMapField::DynamicMapField(const Message* default_entry)
420 : DynamicMapField::TypeDefinedMapFieldBase(&kVTable),
421 default_entry_(default_entry) {}
422
DynamicMapField(const Message * default_entry,Arena * arena)423 DynamicMapField::DynamicMapField(const Message* default_entry, Arena* arena)
424 : TypeDefinedMapFieldBase<MapKey, MapValueRef>(&kVTable, arena),
425 default_entry_(default_entry) {}
426
427 constexpr DynamicMapField::VTable DynamicMapField::kVTable =
428 MakeVTable<DynamicMapField>();
429
~DynamicMapField()430 DynamicMapField::~DynamicMapField() {
431 ABSL_DCHECK_EQ(arena(), nullptr);
432 // DynamicMapField owns map values. Need to delete them before clearing the
433 // map.
434 for (auto& kv : map_) {
435 kv.second.DeleteData();
436 }
437 map_.clear();
438 }
439
ClearMapNoSyncImpl(MapFieldBase & base)440 void DynamicMapField::ClearMapNoSyncImpl(MapFieldBase& base) {
441 auto& self = static_cast<DynamicMapField&>(base);
442 if (self.arena() == nullptr) {
443 for (auto& elem : self.map_) {
444 elem.second.DeleteData();
445 }
446 }
447
448 self.map_.clear();
449 }
450
AllocateMapValue(MapValueRef * map_val)451 void DynamicMapField::AllocateMapValue(MapValueRef* map_val) {
452 const FieldDescriptor* val_des = default_entry_->GetDescriptor()->map_value();
453 map_val->SetType(val_des->cpp_type());
454 // Allocate memory for the MapValueRef, and initialize to
455 // default value.
456 switch (val_des->cpp_type()) {
457 #define HANDLE_TYPE(CPPTYPE, TYPE) \
458 case FieldDescriptor::CPPTYPE_##CPPTYPE: { \
459 auto* value = Arena::Create<TYPE>(arena()); \
460 map_val->SetValue(value); \
461 break; \
462 }
463 HANDLE_TYPE(INT32, int32_t);
464 HANDLE_TYPE(INT64, int64_t);
465 HANDLE_TYPE(UINT32, uint32_t);
466 HANDLE_TYPE(UINT64, uint64_t);
467 HANDLE_TYPE(DOUBLE, double);
468 HANDLE_TYPE(FLOAT, float);
469 HANDLE_TYPE(BOOL, bool);
470 HANDLE_TYPE(STRING, std::string);
471 HANDLE_TYPE(ENUM, int32_t);
472 #undef HANDLE_TYPE
473 case FieldDescriptor::CPPTYPE_MESSAGE: {
474 const Message& message =
475 default_entry_->GetReflection()->GetMessage(*default_entry_, val_des);
476 Message* value = message.New(arena());
477 map_val->SetValue(value);
478 break;
479 }
480 }
481 }
482
InsertOrLookupMapValueNoSyncImpl(MapFieldBase & base,const MapKey & map_key,MapValueRef * val)483 bool DynamicMapField::InsertOrLookupMapValueNoSyncImpl(MapFieldBase& base,
484 const MapKey& map_key,
485 MapValueRef* val) {
486 auto& self = static_cast<DynamicMapField&>(base);
487 Map<MapKey, MapValueRef>::iterator iter = self.map_.find(map_key);
488 if (iter == self.map_.end()) {
489 MapValueRef& map_val = self.map_[map_key];
490 self.AllocateMapValue(&map_val);
491 val->CopyFrom(map_val);
492 return true;
493 }
494 // map_key is already in the map. Make sure (*map)[map_key] is not called.
495 // [] may reorder the map and iterators.
496 val->CopyFrom(iter->second);
497 return false;
498 }
499
MergeFromImpl(MapFieldBase & base,const MapFieldBase & other)500 void DynamicMapField::MergeFromImpl(MapFieldBase& base,
501 const MapFieldBase& other) {
502 auto& self = static_cast<DynamicMapField&>(base);
503 ABSL_DCHECK(self.IsMapValid() && other.IsMapValid());
504 Map<MapKey, MapValueRef>* map = self.MutableMap();
505 const DynamicMapField& other_field =
506 reinterpret_cast<const DynamicMapField&>(other);
507 for (Map<MapKey, MapValueRef>::const_iterator other_it =
508 other_field.map_.begin();
509 other_it != other_field.map_.end(); ++other_it) {
510 Map<MapKey, MapValueRef>::iterator iter = map->find(other_it->first);
511 MapValueRef* map_val;
512 if (iter == map->end()) {
513 map_val = &self.map_[other_it->first];
514 self.AllocateMapValue(map_val);
515 } else {
516 map_val = &iter->second;
517 }
518
519 // Copy map value
520 const FieldDescriptor* field_descriptor =
521 self.default_entry_->GetDescriptor()->map_value();
522 switch (field_descriptor->cpp_type()) {
523 case FieldDescriptor::CPPTYPE_INT32: {
524 map_val->SetInt32Value(other_it->second.GetInt32Value());
525 break;
526 }
527 case FieldDescriptor::CPPTYPE_INT64: {
528 map_val->SetInt64Value(other_it->second.GetInt64Value());
529 break;
530 }
531 case FieldDescriptor::CPPTYPE_UINT32: {
532 map_val->SetUInt32Value(other_it->second.GetUInt32Value());
533 break;
534 }
535 case FieldDescriptor::CPPTYPE_UINT64: {
536 map_val->SetUInt64Value(other_it->second.GetUInt64Value());
537 break;
538 }
539 case FieldDescriptor::CPPTYPE_FLOAT: {
540 map_val->SetFloatValue(other_it->second.GetFloatValue());
541 break;
542 }
543 case FieldDescriptor::CPPTYPE_DOUBLE: {
544 map_val->SetDoubleValue(other_it->second.GetDoubleValue());
545 break;
546 }
547 case FieldDescriptor::CPPTYPE_BOOL: {
548 map_val->SetBoolValue(other_it->second.GetBoolValue());
549 break;
550 }
551 case FieldDescriptor::CPPTYPE_STRING: {
552 map_val->SetStringValue(other_it->second.GetStringValue());
553 break;
554 }
555 case FieldDescriptor::CPPTYPE_ENUM: {
556 map_val->SetEnumValue(other_it->second.GetEnumValue());
557 break;
558 }
559 case FieldDescriptor::CPPTYPE_MESSAGE: {
560 map_val->MutableMessageValue()->CopyFrom(
561 other_it->second.GetMessageValue());
562 break;
563 }
564 }
565 }
566 }
567
GetPrototypeImpl(const MapFieldBase & map)568 const Message* DynamicMapField::GetPrototypeImpl(const MapFieldBase& map) {
569 return static_cast<const DynamicMapField&>(map).default_entry_;
570 }
571
SpaceUsedExcludingSelfNoLockImpl(const MapFieldBase & map)572 size_t DynamicMapField::SpaceUsedExcludingSelfNoLockImpl(
573 const MapFieldBase& map) {
574 auto& self = static_cast<const DynamicMapField&>(map);
575 size_t size = 0;
576 if (auto* p = self.maybe_payload()) {
577 size += p->repeated_field.SpaceUsedExcludingSelfLong();
578 }
579 size_t map_size = self.map_.size();
580 if (map_size) {
581 Map<MapKey, MapValueRef>::const_iterator it = self.map_.begin();
582 size += sizeof(it->first) * map_size;
583 size += sizeof(it->second) * map_size;
584 // If key is string, add the allocated space.
585 if (it->first.type() == FieldDescriptor::CPPTYPE_STRING) {
586 size += sizeof(std::string) * map_size;
587 }
588 // Add the allocated space in MapValueRef.
589 switch (it->second.type()) {
590 #define HANDLE_TYPE(CPPTYPE, TYPE) \
591 case FieldDescriptor::CPPTYPE_##CPPTYPE: { \
592 size += sizeof(TYPE) * map_size; \
593 break; \
594 }
595 HANDLE_TYPE(INT32, int32_t);
596 HANDLE_TYPE(INT64, int64_t);
597 HANDLE_TYPE(UINT32, uint32_t);
598 HANDLE_TYPE(UINT64, uint64_t);
599 HANDLE_TYPE(DOUBLE, double);
600 HANDLE_TYPE(FLOAT, float);
601 HANDLE_TYPE(BOOL, bool);
602 HANDLE_TYPE(STRING, std::string);
603 HANDLE_TYPE(ENUM, int32_t);
604 #undef HANDLE_TYPE
605 case FieldDescriptor::CPPTYPE_MESSAGE: {
606 while (it != self.map_.end()) {
607 const Message& message = it->second.GetMessageValue();
608 size += message.GetReflection()->SpaceUsedLong(message);
609 ++it;
610 }
611 break;
612 }
613 }
614 }
615 return size;
616 }
617
618 } // namespace internal
619 } // namespace protobuf
620 } // namespace google
621
622 #include "google/protobuf/port_undef.inc"
623