1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc. All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 // * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 // * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31 // Author: kenton@google.com (Kenton Varda)
32 // Based on original Protocol Buffers design by
33 // Sanjay Ghemawat, Jeff Dean, and others.
34
35 #include <google/protobuf/compiler/cpp/cpp_message.h>
36
37 #include <algorithm>
38 #include <cstdint>
39 #include <functional>
40 #include <map>
41 #include <memory>
42 #include <unordered_map>
43 #include <utility>
44 #include <vector>
45
46 #include <google/protobuf/stubs/common.h>
47 #include <google/protobuf/io/coded_stream.h>
48 #include <google/protobuf/io/printer.h>
49 #include <google/protobuf/descriptor.h>
50 #include <google/protobuf/generated_message_util.h>
51 #include <google/protobuf/map_entry_lite.h>
52 #include <google/protobuf/wire_format.h>
53 #include <google/protobuf/stubs/strutil.h>
54 #include <google/protobuf/stubs/substitute.h>
55 #include <google/protobuf/compiler/cpp/cpp_enum.h>
56 #include <google/protobuf/compiler/cpp/cpp_extension.h>
57 #include <google/protobuf/compiler/cpp/cpp_field.h>
58 #include <google/protobuf/compiler/cpp/cpp_helpers.h>
59 #include <google/protobuf/compiler/cpp/cpp_padding_optimizer.h>
60 #include <google/protobuf/compiler/cpp/cpp_parse_function_generator.h>
61 #include <google/protobuf/descriptor.pb.h>
62 #include <google/protobuf/stubs/hash.h>
63
64
65 // Must be included last.
66 #include <google/protobuf/port_def.inc>
67
68 namespace google {
69 namespace protobuf {
70 namespace compiler {
71 namespace cpp {
72
73 using internal::WireFormat;
74 using internal::WireFormatLite;
75
76 namespace {
77
78 static constexpr int kNoHasbit = -1;
79
80 // Create an expression that evaluates to
81 // "for all i, (_has_bits_[i] & masks[i]) == masks[i]"
82 // masks is allowed to be shorter than _has_bits_, but at least one element of
83 // masks must be non-zero.
ConditionalToCheckBitmasks(const std::vector<uint32_t> & masks,bool return_success=true,StringPiece has_bits_var="_has_bits_")84 std::string ConditionalToCheckBitmasks(
85 const std::vector<uint32_t>& masks, bool return_success = true,
86 StringPiece has_bits_var = "_has_bits_") {
87 std::vector<std::string> parts;
88 for (int i = 0; i < masks.size(); i++) {
89 if (masks[i] == 0) continue;
90 std::string m = StrCat("0x", strings::Hex(masks[i], strings::ZERO_PAD_8));
91 // Each xor evaluates to 0 if the expected bits are present.
92 parts.push_back(
93 StrCat("((", has_bits_var, "[", i, "] & ", m, ") ^ ", m, ")"));
94 }
95 GOOGLE_CHECK(!parts.empty());
96 // If we have multiple parts, each expected to be 0, then bitwise-or them.
97 std::string result =
98 parts.size() == 1
99 ? parts[0]
100 : StrCat("(", Join(parts, "\n | "), ")");
101 return result + (return_success ? " == 0" : " != 0");
102 }
103
PrintPresenceCheck(const Formatter & format,const FieldDescriptor * field,const std::vector<int> & has_bit_indices,io::Printer * printer,int * cached_has_word_index)104 void PrintPresenceCheck(const Formatter& format, const FieldDescriptor* field,
105 const std::vector<int>& has_bit_indices,
106 io::Printer* printer, int* cached_has_word_index) {
107 if (!field->options().weak()) {
108 int has_bit_index = has_bit_indices[field->index()];
109 if (*cached_has_word_index != (has_bit_index / 32)) {
110 *cached_has_word_index = (has_bit_index / 32);
111 format("cached_has_bits = $has_bits$[$1$];\n", *cached_has_word_index);
112 }
113 const std::string mask =
114 StrCat(strings::Hex(1u << (has_bit_index % 32), strings::ZERO_PAD_8));
115 format("if (cached_has_bits & 0x$1$u) {\n", mask);
116 } else {
117 format("if (has_$1$()) {\n", FieldName(field));
118 }
119 format.Indent();
120 }
121
122 struct FieldOrderingByNumber {
operator ()google::protobuf::compiler::cpp::__anona2357d630111::FieldOrderingByNumber123 inline bool operator()(const FieldDescriptor* a,
124 const FieldDescriptor* b) const {
125 return a->number() < b->number();
126 }
127 };
128
129 // Sort the fields of the given Descriptor by number into a new[]'d array
130 // and return it.
SortFieldsByNumber(const Descriptor * descriptor)131 std::vector<const FieldDescriptor*> SortFieldsByNumber(
132 const Descriptor* descriptor) {
133 std::vector<const FieldDescriptor*> fields(descriptor->field_count());
134 for (int i = 0; i < descriptor->field_count(); i++) {
135 fields[i] = descriptor->field(i);
136 }
137 std::sort(fields.begin(), fields.end(), FieldOrderingByNumber());
138 return fields;
139 }
140
141 // Functor for sorting extension ranges by their "start" field number.
142 struct ExtensionRangeSorter {
operator ()google::protobuf::compiler::cpp::__anona2357d630111::ExtensionRangeSorter143 bool operator()(const Descriptor::ExtensionRange* left,
144 const Descriptor::ExtensionRange* right) const {
145 return left->start < right->start;
146 }
147 };
148
IsPOD(const FieldDescriptor * field)149 bool IsPOD(const FieldDescriptor* field) {
150 if (field->is_repeated() || field->is_extension()) return false;
151 switch (field->cpp_type()) {
152 case FieldDescriptor::CPPTYPE_ENUM:
153 case FieldDescriptor::CPPTYPE_INT32:
154 case FieldDescriptor::CPPTYPE_INT64:
155 case FieldDescriptor::CPPTYPE_UINT32:
156 case FieldDescriptor::CPPTYPE_UINT64:
157 case FieldDescriptor::CPPTYPE_FLOAT:
158 case FieldDescriptor::CPPTYPE_DOUBLE:
159 case FieldDescriptor::CPPTYPE_BOOL:
160 return true;
161 case FieldDescriptor::CPPTYPE_STRING:
162 return false;
163 default:
164 return false;
165 }
166 }
167
168 // Helper for the code that emits the SharedCtor() and InternalSwap() methods.
169 // Anything that is a POD or a "normal" message (represented by a pointer) can
170 // be manipulated as raw bytes.
CanBeManipulatedAsRawBytes(const FieldDescriptor * field,const Options & options,MessageSCCAnalyzer * scc_analyzer)171 bool CanBeManipulatedAsRawBytes(const FieldDescriptor* field,
172 const Options& options,
173 MessageSCCAnalyzer* scc_analyzer) {
174 bool ret = CanInitializeByZeroing(field);
175
176 // Non-repeated, non-lazy message fields are simply raw pointers, so we can
177 // swap them or use memset to initialize these in SharedCtor. We cannot use
178 // this in Clear, as we need to potentially delete the existing value.
179 ret =
180 ret || (!field->is_repeated() && !IsLazy(field, options, scc_analyzer) &&
181 field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE);
182 return ret;
183 }
184
StrContains(const std::string & haystack,const std::string & needle)185 bool StrContains(const std::string& haystack, const std::string& needle) {
186 return haystack.find(needle) != std::string::npos;
187 }
188
189 // Finds runs of fields for which `predicate` is true.
190 // RunMap maps from fields that start each run to the number of fields in that
191 // run. This is optimized for the common case that there are very few runs in
192 // a message and that most of the eligible fields appear together.
193 using RunMap = std::unordered_map<const FieldDescriptor*, size_t>;
FindRuns(const std::vector<const FieldDescriptor * > & fields,const std::function<bool (const FieldDescriptor *)> & predicate)194 RunMap FindRuns(const std::vector<const FieldDescriptor*>& fields,
195 const std::function<bool(const FieldDescriptor*)>& predicate) {
196 RunMap runs;
197 const FieldDescriptor* last_start = nullptr;
198
199 for (auto field : fields) {
200 if (predicate(field)) {
201 if (last_start == nullptr) {
202 last_start = field;
203 }
204
205 runs[last_start]++;
206 } else {
207 last_start = nullptr;
208 }
209 }
210 return runs;
211 }
212
213 // Emits an if-statement with a condition that evaluates to true if |field| is
214 // considered non-default (will be sent over the wire), for message types
215 // without true field presence. Should only be called if
216 // !HasHasbit(field).
EmitFieldNonDefaultCondition(io::Printer * printer,const std::string & prefix,const FieldDescriptor * field)217 bool EmitFieldNonDefaultCondition(io::Printer* printer,
218 const std::string& prefix,
219 const FieldDescriptor* field) {
220 GOOGLE_CHECK(!HasHasbit(field));
221 Formatter format(printer);
222 format.Set("prefix", prefix);
223 format.Set("name", FieldName(field));
224 // Merge and serialize semantics: primitive fields are merged/serialized only
225 // if non-zero (numeric) or non-empty (string).
226 if (!field->is_repeated() && !field->containing_oneof()) {
227 if (field->cpp_type() == FieldDescriptor::CPPTYPE_STRING) {
228 format("if (!$prefix$_internal_$name$().empty()) {\n");
229 } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
230 // Message fields still have has_$name$() methods.
231 format("if ($prefix$_internal_has_$name$()) {\n");
232 } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_FLOAT) {
233 format(
234 "static_assert(sizeof(uint32_t) == sizeof(float), \"Code assumes "
235 "uint32_t and float are the same size.\");\n"
236 "float tmp_$name$ = $prefix$_internal_$name$();\n"
237 "uint32_t raw_$name$;\n"
238 "memcpy(&raw_$name$, &tmp_$name$, sizeof(tmp_$name$));\n"
239 "if (raw_$name$ != 0) {\n");
240 } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_DOUBLE) {
241 format(
242 "static_assert(sizeof(uint64_t) == sizeof(double), \"Code assumes "
243 "uint64_t and double are the same size.\");\n"
244 "double tmp_$name$ = $prefix$_internal_$name$();\n"
245 "uint64_t raw_$name$;\n"
246 "memcpy(&raw_$name$, &tmp_$name$, sizeof(tmp_$name$));\n"
247 "if (raw_$name$ != 0) {\n");
248 } else {
249 format("if ($prefix$_internal_$name$() != 0) {\n");
250 }
251 format.Indent();
252 return true;
253 } else if (field->real_containing_oneof()) {
254 format("if (_internal_has_$name$()) {\n");
255 format.Indent();
256 return true;
257 }
258 return false;
259 }
260
261 // Does the given field have a has_$name$() method?
HasHasMethod(const FieldDescriptor * field)262 bool HasHasMethod(const FieldDescriptor* field) {
263 if (!IsProto3(field->file())) {
264 // In proto1/proto2, every field has a has_$name$() method.
265 return true;
266 }
267 // For message types without true field presence, only fields with a message
268 // type or inside an one-of have a has_$name$() method.
269 return field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE ||
270 field->has_optional_keyword() || field->real_containing_oneof();
271 }
272
273 // Collects map entry message type information.
CollectMapInfo(const Options & options,const Descriptor * descriptor,std::map<std::string,std::string> * variables)274 void CollectMapInfo(const Options& options, const Descriptor* descriptor,
275 std::map<std::string, std::string>* variables) {
276 GOOGLE_CHECK(IsMapEntryMessage(descriptor));
277 std::map<std::string, std::string>& vars = *variables;
278 const FieldDescriptor* key = descriptor->map_key();
279 const FieldDescriptor* val = descriptor->map_value();
280 vars["key_cpp"] = PrimitiveTypeName(options, key->cpp_type());
281 switch (val->cpp_type()) {
282 case FieldDescriptor::CPPTYPE_MESSAGE:
283 vars["val_cpp"] = FieldMessageTypeName(val, options);
284 break;
285 case FieldDescriptor::CPPTYPE_ENUM:
286 vars["val_cpp"] = ClassName(val->enum_type(), true);
287 break;
288 default:
289 vars["val_cpp"] = PrimitiveTypeName(options, val->cpp_type());
290 }
291 vars["key_wire_type"] =
292 "TYPE_" + ToUpper(DeclaredTypeMethodName(key->type()));
293 vars["val_wire_type"] =
294 "TYPE_" + ToUpper(DeclaredTypeMethodName(val->type()));
295 }
296
297 // Does the given field have a private (internal helper only) has_$name$()
298 // method?
HasPrivateHasMethod(const FieldDescriptor * field)299 bool HasPrivateHasMethod(const FieldDescriptor* field) {
300 // Only for oneofs in message types with no field presence. has_$name$(),
301 // based on the oneof case, is still useful internally for generated code.
302 return IsProto3(field->file()) && field->real_containing_oneof();
303 }
304
305 // TODO(ckennelly): Cull these exclusions if/when these protos do not have
306 // their methods overridden by subclasses.
307
ShouldMarkClassAsFinal(const Descriptor * descriptor,const Options & options)308 bool ShouldMarkClassAsFinal(const Descriptor* descriptor,
309 const Options& options) {
310 return true;
311 }
312
313
314 // Returns true to make the message serialize in order, decided by the following
315 // factors in the order of precedence.
316 // --options().message_set_wire_format() == true
317 // --the message is in the allowlist (true)
318 // --GOOGLE_PROTOBUF_SHUFFLE_SERIALIZE is defined (false)
319 // --a ranage of message names that are allowed to stay in order (true)
ShouldSerializeInOrder(const Descriptor * descriptor,const Options & options)320 bool ShouldSerializeInOrder(const Descriptor* descriptor,
321 const Options& options) {
322 return true;
323 }
324
IsCrossFileMapField(const FieldDescriptor * field)325 bool IsCrossFileMapField(const FieldDescriptor* field) {
326 if (!field->is_map()) {
327 return false;
328 }
329
330 const Descriptor* d = field->message_type();
331 const FieldDescriptor* value = d->FindFieldByNumber(2);
332
333 return IsCrossFileMessage(value);
334 }
335
IsCrossFileMaybeMap(const FieldDescriptor * field)336 bool IsCrossFileMaybeMap(const FieldDescriptor* field) {
337 if (IsCrossFileMapField(field)) {
338 return true;
339 }
340
341 return IsCrossFileMessage(field);
342 }
343
IsRequired(const std::vector<const FieldDescriptor * > & v)344 bool IsRequired(const std::vector<const FieldDescriptor*>& v) {
345 return v.front()->is_required();
346 }
347
HasSingularString(const Descriptor * desc,const Options & options)348 bool HasSingularString(const Descriptor* desc, const Options& options) {
349 for (const auto* field : FieldRange(desc)) {
350 if (IsString(field, options) && !field->is_repeated() &&
351 !field->real_containing_oneof()) {
352 return true;
353 }
354 }
355 return false;
356 }
357
358 // Collects neighboring fields based on a given criteria (equivalent predicate).
359 template <typename Predicate>
CollectFields(const std::vector<const FieldDescriptor * > & fields,const Predicate & equivalent)360 std::vector<std::vector<const FieldDescriptor*>> CollectFields(
361 const std::vector<const FieldDescriptor*>& fields,
362 const Predicate& equivalent) {
363 std::vector<std::vector<const FieldDescriptor*>> chunks;
364 for (auto field : fields) {
365 if (chunks.empty() || !equivalent(chunks.back().back(), field)) {
366 chunks.emplace_back();
367 }
368 chunks.back().push_back(field);
369 }
370 return chunks;
371 }
372
373 // Returns a bit mask based on has_bit index of "fields" that are typically on
374 // the same chunk. It is used in a group presence check where _has_bits_ is
375 // masked to tell if any thing in "fields" is present.
GenChunkMask(const std::vector<const FieldDescriptor * > & fields,const std::vector<int> & has_bit_indices)376 uint32_t GenChunkMask(const std::vector<const FieldDescriptor*>& fields,
377 const std::vector<int>& has_bit_indices) {
378 GOOGLE_CHECK(!fields.empty());
379 int first_index_offset = has_bit_indices[fields.front()->index()] / 32;
380 uint32_t chunk_mask = 0;
381 for (auto field : fields) {
382 // "index" defines where in the _has_bits_ the field appears.
383 int index = has_bit_indices[field->index()];
384 GOOGLE_CHECK_EQ(first_index_offset, index / 32);
385 chunk_mask |= static_cast<uint32_t>(1) << (index % 32);
386 }
387 GOOGLE_CHECK_NE(0, chunk_mask);
388 return chunk_mask;
389 }
390
391 // Return the number of bits set in n, a non-negative integer.
popcnt(uint32_t n)392 static int popcnt(uint32_t n) {
393 int result = 0;
394 while (n != 0) {
395 result += (n & 1);
396 n = n / 2;
397 }
398 return result;
399 }
400
401 // For a run of cold chunks, opens and closes an external if statement that
402 // checks multiple has_bits words to skip bulk of cold fields.
403 class ColdChunkSkipper {
404 public:
ColdChunkSkipper(const Options & options,const std::vector<std::vector<const FieldDescriptor * >> & chunks,const std::vector<int> & has_bit_indices,const double cold_threshold)405 ColdChunkSkipper(
406 const Options& options,
407 const std::vector<std::vector<const FieldDescriptor*>>& chunks,
408 const std::vector<int>& has_bit_indices, const double cold_threshold)
409 : chunks_(chunks),
410 has_bit_indices_(has_bit_indices),
411 access_info_map_(options.access_info_map),
412 cold_threshold_(cold_threshold) {
413 SetCommonVars(options, &variables_);
414 SetCommonMessageDataVariables(&variables_);
415 }
416
417 // May open an external if check for a batch of cold fields. "from" is the
418 // prefix to _has_bits_ to allow MergeFrom to use "from._has_bits_".
419 // Otherwise, it should be "".
420 void OnStartChunk(int chunk, int cached_has_word_index,
421 const std::string& from, io::Printer* printer);
422 bool OnEndChunk(int chunk, io::Printer* printer);
423
424 private:
425 bool IsColdChunk(int chunk);
426
HasbitWord(int chunk,int offset)427 int HasbitWord(int chunk, int offset) {
428 return has_bit_indices_[chunks_[chunk][offset]->index()] / 32;
429 }
430
431 const std::vector<std::vector<const FieldDescriptor*>>& chunks_;
432 const std::vector<int>& has_bit_indices_;
433 const AccessInfoMap* access_info_map_;
434 const double cold_threshold_;
435 std::map<std::string, std::string> variables_;
436 int limit_chunk_ = -1;
437 };
438
439 // Tuning parameters for ColdChunkSkipper.
440 const double kColdRatio = 0.005;
441
IsColdChunk(int chunk)442 bool ColdChunkSkipper::IsColdChunk(int chunk) {
443 // Mark this variable as used until it is actually used
444 (void)cold_threshold_;
445 return false;
446 }
447
448
OnStartChunk(int chunk,int cached_has_word_index,const std::string & from,io::Printer * printer)449 void ColdChunkSkipper::OnStartChunk(int chunk, int cached_has_word_index,
450 const std::string& from,
451 io::Printer* printer) {
452 Formatter format(printer, variables_);
453 if (!access_info_map_) {
454 return;
455 } else if (chunk < limit_chunk_) {
456 // We are already inside a run of cold chunks.
457 return;
458 } else if (!IsColdChunk(chunk)) {
459 // We can't start a run of cold chunks.
460 return;
461 }
462
463 // Find the end of consecutive cold chunks.
464 limit_chunk_ = chunk;
465 while (limit_chunk_ < chunks_.size() && IsColdChunk(limit_chunk_)) {
466 limit_chunk_++;
467 }
468
469 if (limit_chunk_ <= chunk + 1) {
470 // Require at least two chunks to emit external has_bit checks.
471 limit_chunk_ = -1;
472 return;
473 }
474
475 // Emit has_bit check for each has_bit_dword index.
476 format("if (PROTOBUF_PREDICT_FALSE(");
477 int first_word = HasbitWord(chunk, 0);
478 while (chunk < limit_chunk_) {
479 uint32_t mask = 0;
480 int this_word = HasbitWord(chunk, 0);
481 // Generate mask for chunks on the same word.
482 for (; chunk < limit_chunk_ && HasbitWord(chunk, 0) == this_word; chunk++) {
483 for (auto field : chunks_[chunk]) {
484 int hasbit_index = has_bit_indices_[field->index()];
485 // Fields on a chunk must be in the same word.
486 GOOGLE_CHECK_EQ(this_word, hasbit_index / 32);
487 mask |= 1 << (hasbit_index % 32);
488 }
489 }
490
491 if (this_word != first_word) {
492 format(" ||\n ");
493 }
494 format.Set("mask", strings::Hex(mask, strings::ZERO_PAD_8));
495 if (this_word == cached_has_word_index) {
496 format("(cached_has_bits & 0x$mask$u) != 0");
497 } else {
498 format("($1$_has_bits_[$2$] & 0x$mask$u) != 0", from, this_word);
499 }
500 }
501 format(")) {\n");
502 format.Indent();
503 }
504
OnEndChunk(int chunk,io::Printer * printer)505 bool ColdChunkSkipper::OnEndChunk(int chunk, io::Printer* printer) {
506 Formatter format(printer, variables_);
507 if (chunk != limit_chunk_ - 1) {
508 return false;
509 }
510 format.Outdent();
511 format("}\n");
512 return true;
513 }
514
MaySetAnnotationVariable(const Options & options,StringPiece annotation_name,StringPiece injector_template_prefix,StringPiece injector_template_suffix,std::map<std::string,std::string> * variables)515 void MaySetAnnotationVariable(const Options& options,
516 StringPiece annotation_name,
517 StringPiece injector_template_prefix,
518 StringPiece injector_template_suffix,
519 std::map<std::string, std::string>* variables) {
520 if (options.field_listener_options.forbidden_field_listener_events.count(
521 std::string(annotation_name)))
522 return;
523 (*variables)[StrCat("annotate_", annotation_name)] = strings::Substitute(
524 StrCat(injector_template_prefix, injector_template_suffix),
525 (*variables)["classtype"]);
526 }
527
GenerateExtensionAnnotations(const Descriptor * descriptor,const Options & options,std::map<std::string,std::string> * variables)528 void GenerateExtensionAnnotations(
529 const Descriptor* descriptor, const Options& options,
530 std::map<std::string, std::string>* variables) {
531 const std::map<std::string, std::string> accessor_annotations_to_hooks = {
532 {"annotate_extension_has", "OnHasExtension"},
533 {"annotate_extension_clear", "OnClearExtension"},
534 {"annotate_extension_repeated_size", "OnExtensionSize"},
535 {"annotate_extension_get", "OnGetExtension"},
536 {"annotate_extension_mutable", "OnMutableExtension"},
537 {"annotate_extension_set", "OnSetExtension"},
538 {"annotate_extension_release", "OnReleaseExtension"},
539 {"annotate_repeated_extension_get", "OnGetExtension"},
540 {"annotate_repeated_extension_mutable", "OnMutableExtension"},
541 {"annotate_repeated_extension_set", "OnSetExtension"},
542 {"annotate_repeated_extension_add", "OnAddExtension"},
543 {"annotate_repeated_extension_add_mutable", "OnAddMutableExtension"},
544 {"annotate_repeated_extension_list", "OnListExtension"},
545 {"annotate_repeated_extension_list_mutable", "OnMutableListExtension"},
546 };
547 for (const auto& annotation : accessor_annotations_to_hooks) {
548 (*variables)[annotation.first] = "";
549 }
550 if (!options.field_listener_options.inject_field_listener_events ||
551 descriptor->file()->options().optimize_for() ==
552 google::protobuf::FileOptions::LITE_RUNTIME) {
553 return;
554 }
555 StringPiece tracker = (*variables)["tracker"];
556 StringPiece extensions = (*variables)["extensions"];
557 for (const auto& annotation : accessor_annotations_to_hooks) {
558 const std::string& annotation_name = annotation.first;
559 const std::string& listener_call = annotation.second;
560 if (!StrContains(annotation_name, "repeated") &&
561 !StrContains(annotation_name, "size") &&
562 !StrContains(annotation_name, "clear")) {
563 // Primitive fields accessors.
564 // "Has" is here as users calling "has" on a repeated field is a mistake.
565 (*variables)[annotation_name] = StrCat(
566 " ", tracker, ".", listener_call,
567 "(this, id.number(), _proto_TypeTraits::GetPtr(id.number(), ",
568 extensions, ", id.default_value_ref()));");
569 } else if (StrContains(annotation_name, "repeated") &&
570 !StrContains(annotation_name, "list") &&
571 !StrContains(annotation_name, "size")) {
572 // Repeated index accessors.
573 std::string str_index = "index";
574 if (StrContains(annotation_name, "add")) {
575 str_index = StrCat(extensions, ".ExtensionSize(id.number()) - 1");
576 }
577 (*variables)[annotation_name] =
578 StrCat(" ", tracker, ".", listener_call,
579 "(this, id.number(), "
580 "_proto_TypeTraits::GetPtr(id.number(), ",
581 extensions, ", ", str_index, "));");
582 } else if (StrContains(annotation_name, "list") ||
583 StrContains(annotation_name, "size")) {
584 // Repeated full accessors.
585 (*variables)[annotation_name] = StrCat(
586 " ", tracker, ".", listener_call,
587 "(this, id.number(), _proto_TypeTraits::GetRepeatedPtr(id.number(), ",
588 extensions, "));");
589 } else {
590 // Generic accessors such as "clear".
591 // TODO(b/190614678): Generalize clear from both repeated and non repeated
592 // calls, currently their underlying memory interfaces are very different.
593 // Or think of removing clear callback as no usages are needed and no
594 // memory exist after calling clear().
595 }
596 }
597 }
598
599 } // anonymous namespace
600
601 // ===================================================================
602
MessageGenerator(const Descriptor * descriptor,const std::map<std::string,std::string> & vars,int index_in_file_messages,const Options & options,MessageSCCAnalyzer * scc_analyzer)603 MessageGenerator::MessageGenerator(
604 const Descriptor* descriptor,
605 const std::map<std::string, std::string>& vars, int index_in_file_messages,
606 const Options& options, MessageSCCAnalyzer* scc_analyzer)
607 : descriptor_(descriptor),
608 index_in_file_messages_(index_in_file_messages),
609 classname_(ClassName(descriptor, false)),
610 options_(options),
611 field_generators_(descriptor, options, scc_analyzer),
612 max_has_bit_index_(0),
613 max_inlined_string_index_(0),
614 num_weak_fields_(0),
615 scc_analyzer_(scc_analyzer),
616 variables_(vars) {
617 if (!message_layout_helper_) {
618 message_layout_helper_.reset(new PaddingOptimizer());
619 }
620 SetCommonMessageDataVariables(&variables_);
621
622 // Variables that apply to this class
623 variables_["classname"] = classname_;
624 variables_["classtype"] = QualifiedClassName(descriptor_, options);
625 variables_["full_name"] = descriptor_->full_name();
626 variables_["superclass"] = SuperClassName(descriptor_, options_);
627 variables_["annotate_serialize"] = "";
628 variables_["annotate_deserialize"] = "";
629 variables_["annotate_reflection"] = "";
630 variables_["annotate_bytesize"] = "";
631 variables_["annotate_mergefrom"] = "";
632
633 if (options.field_listener_options.inject_field_listener_events &&
634 descriptor->file()->options().optimize_for() !=
635 google::protobuf::FileOptions::LITE_RUNTIME) {
636 const std::string injector_template =
637 StrCat(" ", variables_["tracker"], ".");
638
639 MaySetAnnotationVariable(options, "serialize", injector_template,
640 "OnSerialize(this);\n", &variables_);
641 MaySetAnnotationVariable(options, "deserialize", injector_template,
642 "OnDeserialize(this);\n", &variables_);
643 // TODO(danilak): Ideally annotate_reflection should not exist and we need
644 // to annotate all reflective calls on our own, however, as this is a cause
645 // for side effects, i.e. reading values dynamically, we want the users know
646 // that dynamic access can happen.
647 MaySetAnnotationVariable(options, "reflection", injector_template,
648 "OnGetMetadata();\n", &variables_);
649 MaySetAnnotationVariable(options, "bytesize", injector_template,
650 "OnByteSize(this);\n", &variables_);
651 MaySetAnnotationVariable(options, "mergefrom", injector_template,
652 "OnMergeFrom(this, &from);\n", &variables_);
653 }
654
655 GenerateExtensionAnnotations(descriptor_, options_, &variables_);
656
657 SetUnknownFieldsVariable(descriptor_, options_, &variables_);
658
659 // Compute optimized field order to be used for layout and initialization
660 // purposes.
661 for (auto field : FieldRange(descriptor_)) {
662 if (IsFieldStripped(field, options_)) {
663 continue;
664 }
665
666 if (IsWeak(field, options_)) {
667 num_weak_fields_++;
668 } else if (!field->real_containing_oneof()) {
669 optimized_order_.push_back(field);
670 }
671 }
672
673 message_layout_helper_->OptimizeLayout(&optimized_order_, options_,
674 scc_analyzer_);
675
676 // This message has hasbits iff one or more fields need one.
677 for (auto field : optimized_order_) {
678 if (HasHasbit(field)) {
679 if (has_bit_indices_.empty()) {
680 has_bit_indices_.resize(descriptor_->field_count(), kNoHasbit);
681 }
682 has_bit_indices_[field->index()] = max_has_bit_index_++;
683 }
684 if (IsStringInlined(field, options_)) {
685 if (inlined_string_indices_.empty()) {
686 inlined_string_indices_.resize(descriptor_->field_count(), kNoHasbit);
687 // The bitset[0] is for arena dtor tracking. Donating states start from
688 // bitset[1];
689 max_inlined_string_index_++;
690 }
691 inlined_string_indices_[field->index()] = max_inlined_string_index_++;
692 }
693 }
694
695 if (!has_bit_indices_.empty()) {
696 field_generators_.SetHasBitIndices(has_bit_indices_);
697 }
698
699 if (!inlined_string_indices_.empty()) {
700 field_generators_.SetInlinedStringIndices(inlined_string_indices_);
701 }
702
703 num_required_fields_ = 0;
704 for (int i = 0; i < descriptor->field_count(); i++) {
705 if (descriptor->field(i)->is_required()) {
706 ++num_required_fields_;
707 }
708 }
709
710 parse_function_generator_.reset(new ParseFunctionGenerator(
711 descriptor_, max_has_bit_index_, has_bit_indices_,
712 inlined_string_indices_, options_, scc_analyzer_, variables_));
713 }
714
715 MessageGenerator::~MessageGenerator() = default;
716
HasBitsSize() const717 size_t MessageGenerator::HasBitsSize() const {
718 return (max_has_bit_index_ + 31) / 32;
719 }
720
InlinedStringDonatedSize() const721 size_t MessageGenerator::InlinedStringDonatedSize() const {
722 return (max_inlined_string_index_ + 31) / 32;
723 }
724
HasBitIndex(const FieldDescriptor * field) const725 int MessageGenerator::HasBitIndex(const FieldDescriptor* field) const {
726 return has_bit_indices_.empty() ? kNoHasbit
727 : has_bit_indices_[field->index()];
728 }
729
HasByteIndex(const FieldDescriptor * field) const730 int MessageGenerator::HasByteIndex(const FieldDescriptor* field) const {
731 int hasbit = HasBitIndex(field);
732 return hasbit == kNoHasbit ? kNoHasbit : hasbit / 8;
733 }
734
HasWordIndex(const FieldDescriptor * field) const735 int MessageGenerator::HasWordIndex(const FieldDescriptor* field) const {
736 int hasbit = HasBitIndex(field);
737 return hasbit == kNoHasbit ? kNoHasbit : hasbit / 32;
738 }
739
AddGenerators(std::vector<std::unique_ptr<EnumGenerator>> * enum_generators,std::vector<std::unique_ptr<ExtensionGenerator>> * extension_generators)740 void MessageGenerator::AddGenerators(
741 std::vector<std::unique_ptr<EnumGenerator>>* enum_generators,
742 std::vector<std::unique_ptr<ExtensionGenerator>>* extension_generators) {
743 for (int i = 0; i < descriptor_->enum_type_count(); i++) {
744 enum_generators->emplace_back(
745 new EnumGenerator(descriptor_->enum_type(i), variables_, options_));
746 enum_generators_.push_back(enum_generators->back().get());
747 }
748 for (int i = 0; i < descriptor_->extension_count(); i++) {
749 extension_generators->emplace_back(new ExtensionGenerator(
750 descriptor_->extension(i), options_, scc_analyzer_));
751 extension_generators_.push_back(extension_generators->back().get());
752 }
753 }
754
GenerateFieldAccessorDeclarations(io::Printer * printer)755 void MessageGenerator::GenerateFieldAccessorDeclarations(io::Printer* printer) {
756 Formatter format(printer, variables_);
757 // optimized_fields_ does not contain fields where
758 // field->real_containing_oneof()
759 // so we need to iterate over those as well.
760 //
761 // We place the non-oneof fields in optimized_order_, as that controls the
762 // order of the _has_bits_ entries and we want GDB's pretty printers to be
763 // able to infer these indices from the k[FIELDNAME]FieldNumber order.
764 std::vector<const FieldDescriptor*> ordered_fields;
765 ordered_fields.reserve(descriptor_->field_count());
766
767 ordered_fields.insert(ordered_fields.begin(), optimized_order_.begin(),
768 optimized_order_.end());
769 for (auto field : FieldRange(descriptor_)) {
770 if (!field->real_containing_oneof() && !field->options().weak() &&
771 !IsFieldStripped(field, options_)) {
772 continue;
773 }
774 ordered_fields.push_back(field);
775 }
776
777 if (!ordered_fields.empty()) {
778 format("enum : int {\n");
779 for (auto field : ordered_fields) {
780 Formatter::SaveState save(&format);
781
782 std::map<std::string, std::string> vars;
783 SetCommonFieldVariables(field, &vars, options_);
784 format.AddMap(vars);
785 format(" ${1$$2$$}$ = $number$,\n", field, FieldConstantName(field));
786 }
787 format("};\n");
788 }
789 for (auto field : ordered_fields) {
790 PrintFieldComment(format, field);
791
792 Formatter::SaveState save(&format);
793
794 std::map<std::string, std::string> vars;
795 SetCommonFieldVariables(field, &vars, options_);
796 format.AddMap(vars);
797
798 if (field->is_repeated()) {
799 format("$deprecated_attr$int ${1$$name$_size$}$() const$2$\n", field,
800 !IsFieldStripped(field, options_) ? ";" : " {__builtin_trap();}");
801 if (!IsFieldStripped(field, options_)) {
802 format(
803 "private:\n"
804 "int ${1$_internal_$name$_size$}$() const;\n"
805 "public:\n",
806 field);
807 }
808 } else if (HasHasMethod(field)) {
809 format("$deprecated_attr$bool ${1$has_$name$$}$() const$2$\n", field,
810 !IsFieldStripped(field, options_) ? ";" : " {__builtin_trap();}");
811 if (!IsFieldStripped(field, options_)) {
812 format(
813 "private:\n"
814 "bool _internal_has_$name$() const;\n"
815 "public:\n");
816 }
817 } else if (HasPrivateHasMethod(field)) {
818 if (!IsFieldStripped(field, options_)) {
819 format(
820 "private:\n"
821 "bool ${1$_internal_has_$name$$}$() const;\n"
822 "public:\n",
823 field);
824 }
825 }
826 format("$deprecated_attr$void ${1$clear_$name$$}$()$2$\n", field,
827 !IsFieldStripped(field, options_) ? ";" : "{__builtin_trap();}");
828
829 // Generate type-specific accessor declarations.
830 field_generators_.get(field).GenerateAccessorDeclarations(printer);
831
832 format("\n");
833 }
834
835 if (descriptor_->extension_range_count() > 0) {
836 // Generate accessors for extensions.
837 // We use "_proto_TypeTraits" as a type name below because "TypeTraits"
838 // causes problems if the class has a nested message or enum type with that
839 // name and "_TypeTraits" is technically reserved for the C++ library since
840 // it starts with an underscore followed by a capital letter.
841 //
842 // For similar reason, we use "_field_type" and "_is_packed" as parameter
843 // names below, so that "field_type" and "is_packed" can be used as field
844 // names.
845 format(R"(
846 template <typename _proto_TypeTraits,
847 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
848 bool _is_packed>
849 inline bool HasExtension(
850 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
851 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) const {
852 $annotate_extension_has$
853 return $extensions$.Has(id.number());
854 }
855
856 template <typename _proto_TypeTraits,
857 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
858 bool _is_packed>
859 inline void ClearExtension(
860 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
861 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
862 $extensions$.ClearExtension(id.number());
863 $annotate_extension_clear$
864 }
865
866 template <typename _proto_TypeTraits,
867 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
868 bool _is_packed>
869 inline int ExtensionSize(
870 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
871 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) const {
872 $annotate_extension_repeated_size$
873 return $extensions$.ExtensionSize(id.number());
874 }
875
876 template <typename _proto_TypeTraits,
877 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
878 bool _is_packed>
879 inline typename _proto_TypeTraits::Singular::ConstType GetExtension(
880 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
881 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) const {
882 $annotate_extension_get$
883 return _proto_TypeTraits::Get(id.number(), $extensions$,
884 id.default_value());
885 }
886
887 template <typename _proto_TypeTraits,
888 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
889 bool _is_packed>
890 inline typename _proto_TypeTraits::Singular::MutableType MutableExtension(
891 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
892 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
893 $annotate_extension_mutable$
894 return _proto_TypeTraits::Mutable(id.number(), _field_type,
895 &$extensions$);
896 }
897
898 template <typename _proto_TypeTraits,
899 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
900 bool _is_packed>
901 inline void SetExtension(
902 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
903 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
904 typename _proto_TypeTraits::Singular::ConstType value) {
905 _proto_TypeTraits::Set(id.number(), _field_type, value, &$extensions$);
906 $annotate_extension_set$
907 }
908
909 template <typename _proto_TypeTraits,
910 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
911 bool _is_packed>
912 inline void SetAllocatedExtension(
913 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
914 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
915 typename _proto_TypeTraits::Singular::MutableType value) {
916 _proto_TypeTraits::SetAllocated(id.number(), _field_type, value,
917 &$extensions$);
918 $annotate_extension_set$
919 }
920 template <typename _proto_TypeTraits,
921 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
922 bool _is_packed>
923 inline void UnsafeArenaSetAllocatedExtension(
924 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
925 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
926 typename _proto_TypeTraits::Singular::MutableType value) {
927 _proto_TypeTraits::UnsafeArenaSetAllocated(id.number(), _field_type,
928 value, &$extensions$);
929 $annotate_extension_set$
930 }
931 template <typename _proto_TypeTraits,
932 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
933 bool _is_packed>
934 PROTOBUF_NODISCARD inline
935 typename _proto_TypeTraits::Singular::MutableType
936 ReleaseExtension(
937 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
938 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
939 $annotate_extension_release$
940 return _proto_TypeTraits::Release(id.number(), _field_type,
941 &$extensions$);
942 }
943 template <typename _proto_TypeTraits,
944 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
945 bool _is_packed>
946 inline typename _proto_TypeTraits::Singular::MutableType
947 UnsafeArenaReleaseExtension(
948 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
949 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
950 $annotate_extension_release$
951 return _proto_TypeTraits::UnsafeArenaRelease(id.number(), _field_type,
952 &$extensions$);
953 }
954
955 template <typename _proto_TypeTraits,
956 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
957 bool _is_packed>
958 inline typename _proto_TypeTraits::Repeated::ConstType GetExtension(
959 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
960 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
961 int index) const {
962 $annotate_repeated_extension_get$
963 return _proto_TypeTraits::Get(id.number(), $extensions$, index);
964 }
965
966 template <typename _proto_TypeTraits,
967 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
968 bool _is_packed>
969 inline typename _proto_TypeTraits::Repeated::MutableType MutableExtension(
970 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
971 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
972 int index) {
973 $annotate_repeated_extension_mutable$
974 return _proto_TypeTraits::Mutable(id.number(), index, &$extensions$);
975 }
976
977 template <typename _proto_TypeTraits,
978 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
979 bool _is_packed>
980 inline void SetExtension(
981 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
982 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
983 int index, typename _proto_TypeTraits::Repeated::ConstType value) {
984 _proto_TypeTraits::Set(id.number(), index, value, &$extensions$);
985 $annotate_repeated_extension_set$
986 }
987
988 template <typename _proto_TypeTraits,
989 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
990 bool _is_packed>
991 inline typename _proto_TypeTraits::Repeated::MutableType AddExtension(
992 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
993 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
994 typename _proto_TypeTraits::Repeated::MutableType to_add =
995 _proto_TypeTraits::Add(id.number(), _field_type, &$extensions$);
996 $annotate_repeated_extension_add_mutable$
997 return to_add;
998 }
999
1000 template <typename _proto_TypeTraits,
1001 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
1002 bool _is_packed>
1003 inline void AddExtension(
1004 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
1005 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id,
1006 typename _proto_TypeTraits::Repeated::ConstType value) {
1007 _proto_TypeTraits::Add(id.number(), _field_type, _is_packed, value,
1008 &$extensions$);
1009 $annotate_repeated_extension_add$
1010 }
1011
1012 template <typename _proto_TypeTraits,
1013 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
1014 bool _is_packed>
1015 inline const typename _proto_TypeTraits::Repeated::RepeatedFieldType&
1016 GetRepeatedExtension(
1017 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
1018 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) const {
1019 $annotate_repeated_extension_list$
1020 return _proto_TypeTraits::GetRepeated(id.number(), $extensions$);
1021 }
1022
1023 template <typename _proto_TypeTraits,
1024 ::PROTOBUF_NAMESPACE_ID::internal::FieldType _field_type,
1025 bool _is_packed>
1026 inline typename _proto_TypeTraits::Repeated::RepeatedFieldType*
1027 MutableRepeatedExtension(
1028 const ::PROTOBUF_NAMESPACE_ID::internal::ExtensionIdentifier<
1029 $classname$, _proto_TypeTraits, _field_type, _is_packed>& id) {
1030 $annotate_repeated_extension_list_mutable$
1031 return _proto_TypeTraits::MutableRepeated(id.number(), _field_type,
1032 _is_packed, &$extensions$);
1033 }
1034
1035 )");
1036 // Generate MessageSet specific APIs for proto2 MessageSet.
1037 // For testing purposes we don't check for bridge.MessageSet, so
1038 // we don't use IsProto2MessageSet
1039 if (descriptor_->options().message_set_wire_format() &&
1040 !options_.opensource_runtime && !options_.lite_implicit_weak_fields) {
1041 // Special-case MessageSet
1042 format("GOOGLE_PROTOBUF_EXTENSION_MESSAGE_SET_ACCESSORS($classname$)\n");
1043 }
1044 }
1045
1046 for (auto oneof : OneOfRange(descriptor_)) {
1047 Formatter::SaveState saver(&format);
1048 format.Set("oneof_name", oneof->name());
1049 format.Set("camel_oneof_name", UnderscoresToCamelCase(oneof->name(), true));
1050 format(
1051 "void ${1$clear_$oneof_name$$}$();\n"
1052 "$camel_oneof_name$Case $oneof_name$_case() const;\n",
1053 oneof);
1054 }
1055 }
1056
GenerateSingularFieldHasBits(const FieldDescriptor * field,Formatter format)1057 void MessageGenerator::GenerateSingularFieldHasBits(
1058 const FieldDescriptor* field, Formatter format) {
1059 if (IsFieldStripped(field, options_)) {
1060 format(
1061 "inline bool $classname$::has_$name$() const { "
1062 "__builtin_trap(); }\n");
1063 return;
1064 }
1065 if (field->options().weak()) {
1066 format(
1067 "inline bool $classname$::has_$name$() const {\n"
1068 "$annotate_has$"
1069 " return $weak_field_map$.Has($number$);\n"
1070 "}\n");
1071 return;
1072 }
1073 if (HasHasbit(field)) {
1074 int has_bit_index = HasBitIndex(field);
1075 GOOGLE_CHECK_NE(has_bit_index, kNoHasbit);
1076
1077 format.Set("has_array_index", has_bit_index / 32);
1078 format.Set("has_mask",
1079 strings::Hex(1u << (has_bit_index % 32), strings::ZERO_PAD_8));
1080 format(
1081 "inline bool $classname$::_internal_has_$name$() const {\n"
1082 " bool value = "
1083 "($has_bits$[$has_array_index$] & 0x$has_mask$u) != 0;\n");
1084
1085 if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE &&
1086 !IsLazy(field, options_, scc_analyzer_)) {
1087 // We maintain the invariant that for a submessage x, has_x() returning
1088 // true implies that x_ is not null. By giving this information to the
1089 // compiler, we allow it to eliminate unnecessary null checks later on.
1090 format(" PROTOBUF_ASSUME(!value || $field$ != nullptr);\n");
1091 }
1092
1093 format(
1094 " return value;\n"
1095 "}\n"
1096 "inline bool $classname$::has_$name$() const {\n"
1097 "$annotate_has$"
1098 " return _internal_has_$name$();\n"
1099 "}\n");
1100 } else if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
1101 // Message fields have a has_$name$() method.
1102 if (IsLazy(field, options_, scc_analyzer_)) {
1103 format(
1104 "inline bool $classname$::_internal_has_$name$() const {\n"
1105 " return !$field$.IsCleared();\n"
1106 "}\n");
1107 } else {
1108 format(
1109 "inline bool $classname$::_internal_has_$name$() const {\n"
1110 " return this != internal_default_instance() "
1111 "&& $field$ != nullptr;\n"
1112 "}\n");
1113 }
1114 format(
1115 "inline bool $classname$::has_$name$() const {\n"
1116 "$annotate_has$"
1117 " return _internal_has_$name$();\n"
1118 "}\n");
1119 }
1120 }
1121
GenerateOneofHasBits(io::Printer * printer)1122 void MessageGenerator::GenerateOneofHasBits(io::Printer* printer) {
1123 Formatter format(printer, variables_);
1124 for (auto oneof : OneOfRange(descriptor_)) {
1125 format.Set("oneof_name", oneof->name());
1126 format.Set("oneof_index", oneof->index());
1127 format.Set("cap_oneof_name", ToUpper(oneof->name()));
1128 format(
1129 "inline bool $classname$::has_$oneof_name$() const {\n"
1130 " return $oneof_name$_case() != $cap_oneof_name$_NOT_SET;\n"
1131 "}\n"
1132 "inline void $classname$::clear_has_$oneof_name$() {\n"
1133 " $oneof_case$[$oneof_index$] = $cap_oneof_name$_NOT_SET;\n"
1134 "}\n");
1135 }
1136 }
1137
GenerateOneofMemberHasBits(const FieldDescriptor * field,const Formatter & format)1138 void MessageGenerator::GenerateOneofMemberHasBits(const FieldDescriptor* field,
1139 const Formatter& format) {
1140 if (IsFieldStripped(field, options_)) {
1141 if (HasHasMethod(field)) {
1142 format(
1143 "inline bool $classname$::has_$name$() const { "
1144 "__builtin_trap(); }\n");
1145 }
1146 format(
1147 "inline void $classname$::set_has_$name$() { __builtin_trap(); "
1148 "}\n");
1149 return;
1150 }
1151 // Singular field in a oneof
1152 // N.B.: Without field presence, we do not use has-bits or generate
1153 // has_$name$() methods, but oneofs still have set_has_$name$().
1154 // Oneofs also have has_$name$() but only as a private helper
1155 // method, so that generated code is slightly cleaner (vs. comparing
1156 // _oneof_case_[index] against a constant everywhere).
1157 //
1158 // If has_$name$() is private, there is no need to add an internal accessor.
1159 // Only annotate public accessors.
1160 if (HasHasMethod(field)) {
1161 format(
1162 "inline bool $classname$::_internal_has_$name$() const {\n"
1163 " return $oneof_name$_case() == k$field_name$;\n"
1164 "}\n"
1165 "inline bool $classname$::has_$name$() const {\n"
1166 "$annotate_has$"
1167 " return _internal_has_$name$();\n"
1168 "}\n");
1169 } else if (HasPrivateHasMethod(field)) {
1170 format(
1171 "inline bool $classname$::_internal_has_$name$() const {\n"
1172 " return $oneof_name$_case() == k$field_name$;\n"
1173 "}\n");
1174 }
1175 // set_has_$name$() for oneof fields is always private; hence should not be
1176 // annotated.
1177 format(
1178 "inline void $classname$::set_has_$name$() {\n"
1179 " $oneof_case$[$oneof_index$] = k$field_name$;\n"
1180 "}\n");
1181 }
1182
GenerateFieldClear(const FieldDescriptor * field,bool is_inline,Formatter format)1183 void MessageGenerator::GenerateFieldClear(const FieldDescriptor* field,
1184 bool is_inline, Formatter format) {
1185 if (IsFieldStripped(field, options_)) {
1186 format("void $classname$::clear_$name$() { __builtin_trap(); }\n");
1187 return;
1188 }
1189
1190 // Generate clear_$name$().
1191 if (is_inline) {
1192 format("inline ");
1193 }
1194 format("void $classname$::clear_$name$() {\n");
1195
1196 format.Indent();
1197
1198 if (field->real_containing_oneof()) {
1199 // Clear this field only if it is the active field in this oneof,
1200 // otherwise ignore
1201 format("if (_internal_has_$name$()) {\n");
1202 format.Indent();
1203 field_generators_.get(field).GenerateClearingCode(format.printer());
1204 format("clear_has_$oneof_name$();\n");
1205 format.Outdent();
1206 format("}\n");
1207 } else {
1208 field_generators_.get(field).GenerateClearingCode(format.printer());
1209 if (HasHasbit(field)) {
1210 int has_bit_index = HasBitIndex(field);
1211 format.Set("has_array_index", has_bit_index / 32);
1212 format.Set("has_mask",
1213 strings::Hex(1u << (has_bit_index % 32), strings::ZERO_PAD_8));
1214 format("$has_bits$[$has_array_index$] &= ~0x$has_mask$u;\n");
1215 }
1216 }
1217 format("$annotate_clear$");
1218 format.Outdent();
1219 format("}\n");
1220 }
1221
GenerateFieldAccessorDefinitions(io::Printer * printer)1222 void MessageGenerator::GenerateFieldAccessorDefinitions(io::Printer* printer) {
1223 Formatter format(printer, variables_);
1224 format("// $classname$\n\n");
1225
1226 for (auto field : FieldRange(descriptor_)) {
1227 PrintFieldComment(format, field);
1228
1229 if (IsFieldStripped(field, options_)) {
1230 continue;
1231 }
1232
1233 std::map<std::string, std::string> vars;
1234 SetCommonFieldVariables(field, &vars, options_);
1235
1236 Formatter::SaveState saver(&format);
1237 format.AddMap(vars);
1238
1239 // Generate has_$name$() or $name$_size().
1240 if (field->is_repeated()) {
1241 if (IsFieldStripped(field, options_)) {
1242 format(
1243 "inline int $classname$::$name$_size() const { "
1244 "__builtin_trap(); }\n");
1245 } else {
1246 format(
1247 "inline int $classname$::_internal_$name$_size() const {\n"
1248 " return $1$$2$.size();\n"
1249 "}\n"
1250 "inline int $classname$::$name$_size() const {\n"
1251 "$annotate_size$"
1252 " return _internal_$name$_size();\n"
1253 "}\n",
1254 FieldMemberName(field),
1255 IsImplicitWeakField(field, options_, scc_analyzer_) &&
1256 field->message_type()
1257 ? ".weak"
1258 : "");
1259 }
1260 } else if (field->real_containing_oneof()) {
1261 format.Set("field_name", UnderscoresToCamelCase(field->name(), true));
1262 format.Set("oneof_name", field->containing_oneof()->name());
1263 format.Set("oneof_index",
1264 StrCat(field->containing_oneof()->index()));
1265 GenerateOneofMemberHasBits(field, format);
1266 } else {
1267 // Singular field.
1268 GenerateSingularFieldHasBits(field, format);
1269 }
1270
1271 if (!IsCrossFileMaybeMap(field)) {
1272 GenerateFieldClear(field, true, format);
1273 }
1274
1275 // Generate type-specific accessors.
1276 if (!IsFieldStripped(field, options_)) {
1277 field_generators_.get(field).GenerateInlineAccessorDefinitions(printer);
1278 }
1279
1280 format("\n");
1281 }
1282
1283 // Generate has_$name$() and clear_has_$name$() functions for oneofs.
1284 GenerateOneofHasBits(printer);
1285 }
1286
GenerateClassDefinition(io::Printer * printer)1287 void MessageGenerator::GenerateClassDefinition(io::Printer* printer) {
1288 Formatter format(printer, variables_);
1289 format.Set("class_final",
1290 ShouldMarkClassAsFinal(descriptor_, options_) ? "final" : "");
1291
1292 if (IsMapEntryMessage(descriptor_)) {
1293 std::map<std::string, std::string> vars;
1294 CollectMapInfo(options_, descriptor_, &vars);
1295 vars["lite"] =
1296 HasDescriptorMethods(descriptor_->file(), options_) ? "" : "Lite";
1297 format.AddMap(vars);
1298 format(
1299 "class $classname$ : public "
1300 "::$proto_ns$::internal::MapEntry$lite$<$classname$, \n"
1301 " $key_cpp$, $val_cpp$,\n"
1302 " ::$proto_ns$::internal::WireFormatLite::$key_wire_type$,\n"
1303 " ::$proto_ns$::internal::WireFormatLite::$val_wire_type$> {\n"
1304 "public:\n"
1305 " typedef ::$proto_ns$::internal::MapEntry$lite$<$classname$, \n"
1306 " $key_cpp$, $val_cpp$,\n"
1307 " ::$proto_ns$::internal::WireFormatLite::$key_wire_type$,\n"
1308 " ::$proto_ns$::internal::WireFormatLite::$val_wire_type$> "
1309 "SuperType;\n"
1310 " $classname$();\n"
1311 " explicit PROTOBUF_CONSTEXPR $classname$(\n"
1312 " ::$proto_ns$::internal::ConstantInitialized);\n"
1313 " explicit $classname$(::$proto_ns$::Arena* arena);\n"
1314 " void MergeFrom(const $classname$& other);\n"
1315 " static const $classname$* internal_default_instance() { return "
1316 "reinterpret_cast<const "
1317 "$classname$*>(&_$classname$_default_instance_); }\n");
1318 auto utf8_check = GetUtf8CheckMode(descriptor_->field(0), options_);
1319 if (descriptor_->field(0)->type() == FieldDescriptor::TYPE_STRING &&
1320 utf8_check != Utf8CheckMode::kNone) {
1321 if (utf8_check == Utf8CheckMode::kStrict) {
1322 format(
1323 " static bool ValidateKey(std::string* s) {\n"
1324 " return ::$proto_ns$::internal::WireFormatLite::"
1325 "VerifyUtf8String(s->data(), static_cast<int>(s->size()), "
1326 "::$proto_ns$::internal::WireFormatLite::PARSE, \"$1$\");\n"
1327 " }\n",
1328 descriptor_->field(0)->full_name());
1329 } else {
1330 GOOGLE_CHECK(utf8_check == Utf8CheckMode::kVerify);
1331 format(
1332 " static bool ValidateKey(std::string* s) {\n"
1333 "#ifndef NDEBUG\n"
1334 " ::$proto_ns$::internal::WireFormatLite::VerifyUtf8String(\n"
1335 " s->data(), static_cast<int>(s->size()), "
1336 "::$proto_ns$::internal::"
1337 "WireFormatLite::PARSE, \"$1$\");\n"
1338 "#else\n"
1339 " (void) s;\n"
1340 "#endif\n"
1341 " return true;\n"
1342 " }\n",
1343 descriptor_->field(0)->full_name());
1344 }
1345 } else {
1346 format(" static bool ValidateKey(void*) { return true; }\n");
1347 }
1348 if (descriptor_->field(1)->type() == FieldDescriptor::TYPE_STRING &&
1349 utf8_check != Utf8CheckMode::kNone) {
1350 if (utf8_check == Utf8CheckMode::kStrict) {
1351 format(
1352 " static bool ValidateValue(std::string* s) {\n"
1353 " return ::$proto_ns$::internal::WireFormatLite::"
1354 "VerifyUtf8String(s->data(), static_cast<int>(s->size()), "
1355 "::$proto_ns$::internal::WireFormatLite::PARSE, \"$1$\");\n"
1356 " }\n",
1357 descriptor_->field(1)->full_name());
1358 } else {
1359 GOOGLE_CHECK(utf8_check == Utf8CheckMode::kVerify);
1360 format(
1361 " static bool ValidateValue(std::string* s) {\n"
1362 "#ifndef NDEBUG\n"
1363 " ::$proto_ns$::internal::WireFormatLite::VerifyUtf8String(\n"
1364 " s->data(), static_cast<int>(s->size()), "
1365 "::$proto_ns$::internal::"
1366 "WireFormatLite::PARSE, \"$1$\");\n"
1367 "#else\n"
1368 " (void) s;\n"
1369 "#endif\n"
1370 " return true;\n"
1371 " }\n",
1372 descriptor_->field(1)->full_name());
1373 }
1374 } else {
1375 format(" static bool ValidateValue(void*) { return true; }\n");
1376 }
1377 if (HasDescriptorMethods(descriptor_->file(), options_)) {
1378 format(
1379 " using ::$proto_ns$::Message::MergeFrom;\n"
1380 ""
1381 " ::$proto_ns$::Metadata GetMetadata() const final;\n");
1382 }
1383 format(
1384 " friend struct ::$tablename$;\n"
1385 "};\n");
1386 return;
1387 }
1388
1389 format(
1390 "class $dllexport_decl $${1$$classname$$}$$ class_final$ :\n"
1391 " public $superclass$ /* @@protoc_insertion_point("
1392 "class_definition:$full_name$) */ {\n",
1393 descriptor_);
1394 format(" public:\n");
1395 format.Indent();
1396
1397 if (EnableMessageOwnedArena(descriptor_, options_)) {
1398 format(
1399 "inline $classname$() : $classname$("
1400 "::$proto_ns$::Arena::InternalCreateMessageOwnedArena(), true) {}\n");
1401 } else {
1402 format("inline $classname$() : $classname$(nullptr) {}\n");
1403 }
1404 if (!HasSimpleBaseClass(descriptor_, options_)) {
1405 format("~$classname$() override;\n");
1406 }
1407 format(
1408 "explicit PROTOBUF_CONSTEXPR "
1409 "$classname$(::$proto_ns$::internal::ConstantInitialized);\n"
1410 "\n"
1411 "$classname$(const $classname$& from);\n"
1412 "$classname$($classname$&& from) noexcept\n"
1413 " : $classname$() {\n"
1414 " *this = ::std::move(from);\n"
1415 "}\n"
1416 "\n"
1417 "inline $classname$& operator=(const $classname$& from) {\n"
1418 " CopyFrom(from);\n"
1419 " return *this;\n"
1420 "}\n"
1421 "inline $classname$& operator=($classname$&& from) noexcept {\n"
1422 " if (this == &from) return *this;\n"
1423 " if (GetOwningArena() == from.GetOwningArena()\n"
1424 "#ifdef PROTOBUF_FORCE_COPY_IN_MOVE\n"
1425 " && GetOwningArena() != nullptr\n"
1426 "#endif // !PROTOBUF_FORCE_COPY_IN_MOVE\n"
1427 " ) {\n"
1428 " InternalSwap(&from);\n"
1429 " } else {\n"
1430 " CopyFrom(from);\n"
1431 " }\n"
1432 " return *this;\n"
1433 "}\n"
1434 "\n");
1435
1436 if (PublicUnknownFieldsAccessors(descriptor_)) {
1437 format(
1438 "inline const $unknown_fields_type$& unknown_fields() const {\n"
1439 " return $unknown_fields$;\n"
1440 "}\n"
1441 "inline $unknown_fields_type$* mutable_unknown_fields() {\n"
1442 " return $mutable_unknown_fields$;\n"
1443 "}\n"
1444 "\n");
1445 }
1446
1447 // Only generate this member if it's not disabled.
1448 if (HasDescriptorMethods(descriptor_->file(), options_) &&
1449 !descriptor_->options().no_standard_descriptor_accessor()) {
1450 format(
1451 "static const ::$proto_ns$::Descriptor* descriptor() {\n"
1452 " return GetDescriptor();\n"
1453 "}\n");
1454 }
1455
1456 if (HasDescriptorMethods(descriptor_->file(), options_)) {
1457 // These shadow non-static methods of the same names in Message. We
1458 // redefine them here because calls directly on the generated class can be
1459 // statically analyzed -- we know what descriptor types are being requested.
1460 // It also avoids a vtable dispatch.
1461 //
1462 // We would eventually like to eliminate the methods in Message, and having
1463 // this separate also lets us track calls to the base class methods
1464 // separately.
1465 format(
1466 "static const ::$proto_ns$::Descriptor* GetDescriptor() {\n"
1467 " return default_instance().GetMetadata().descriptor;\n"
1468 "}\n"
1469 "static const ::$proto_ns$::Reflection* GetReflection() {\n"
1470 " return default_instance().GetMetadata().reflection;\n"
1471 "}\n");
1472 }
1473
1474 format(
1475 "static const $classname$& default_instance() {\n"
1476 " return *internal_default_instance();\n"
1477 "}\n");
1478
1479 // Generate enum values for every field in oneofs. One list is generated for
1480 // each oneof with an additional *_NOT_SET value.
1481 for (auto oneof : OneOfRange(descriptor_)) {
1482 format("enum $1$Case {\n", UnderscoresToCamelCase(oneof->name(), true));
1483 format.Indent();
1484 for (auto field : FieldRange(oneof)) {
1485 format("$1$ = $2$,\n", OneofCaseConstantName(field), // 1
1486 field->number()); // 2
1487 }
1488 format("$1$_NOT_SET = 0,\n", ToUpper(oneof->name()));
1489 format.Outdent();
1490 format(
1491 "};\n"
1492 "\n");
1493 }
1494
1495 // TODO(gerbens) make this private, while still granting other protos access.
1496 format(
1497 "static inline const $classname$* internal_default_instance() {\n"
1498 " return reinterpret_cast<const $classname$*>(\n"
1499 " &_$classname$_default_instance_);\n"
1500 "}\n"
1501 "static constexpr int kIndexInFileMessages =\n"
1502 " $1$;\n"
1503 "\n",
1504 index_in_file_messages_);
1505
1506 if (IsAnyMessage(descriptor_, options_)) {
1507 format(
1508 "// implements Any -----------------------------------------------\n"
1509 "\n");
1510 if (HasDescriptorMethods(descriptor_->file(), options_)) {
1511 format(
1512 "bool PackFrom(const ::$proto_ns$::Message& message) {\n"
1513 " return $any_metadata$.PackFrom(GetArena(), message);\n"
1514 "}\n"
1515 "bool PackFrom(const ::$proto_ns$::Message& message,\n"
1516 " ::PROTOBUF_NAMESPACE_ID::ConstStringParam "
1517 "type_url_prefix) {\n"
1518 " return $any_metadata$.PackFrom(GetArena(), message, "
1519 "type_url_prefix);\n"
1520 "}\n"
1521 "bool UnpackTo(::$proto_ns$::Message* message) const {\n"
1522 " return $any_metadata$.UnpackTo(message);\n"
1523 "}\n"
1524 "static bool GetAnyFieldDescriptors(\n"
1525 " const ::$proto_ns$::Message& message,\n"
1526 " const ::$proto_ns$::FieldDescriptor** type_url_field,\n"
1527 " const ::$proto_ns$::FieldDescriptor** value_field);\n"
1528 "template <typename T, class = typename std::enable_if<"
1529 "!std::is_convertible<T, const ::$proto_ns$::Message&>"
1530 "::value>::type>\n"
1531 "bool PackFrom(const T& message) {\n"
1532 " return $any_metadata$.PackFrom<T>(GetArena(), message);\n"
1533 "}\n"
1534 "template <typename T, class = typename std::enable_if<"
1535 "!std::is_convertible<T, const ::$proto_ns$::Message&>"
1536 "::value>::type>\n"
1537 "bool PackFrom(const T& message,\n"
1538 " ::PROTOBUF_NAMESPACE_ID::ConstStringParam "
1539 "type_url_prefix) {\n"
1540 " return $any_metadata$.PackFrom<T>(GetArena(), message, "
1541 "type_url_prefix);"
1542 "}\n"
1543 "template <typename T, class = typename std::enable_if<"
1544 "!std::is_convertible<T, const ::$proto_ns$::Message&>"
1545 "::value>::type>\n"
1546 "bool UnpackTo(T* message) const {\n"
1547 " return $any_metadata$.UnpackTo<T>(message);\n"
1548 "}\n");
1549 } else {
1550 format(
1551 "template <typename T>\n"
1552 "bool PackFrom(const T& message) {\n"
1553 " return $any_metadata$.PackFrom(GetArena(), message);\n"
1554 "}\n"
1555 "template <typename T>\n"
1556 "bool PackFrom(const T& message,\n"
1557 " ::PROTOBUF_NAMESPACE_ID::ConstStringParam "
1558 "type_url_prefix) {\n"
1559 " return $any_metadata$.PackFrom(GetArena(), message, "
1560 "type_url_prefix);\n"
1561 "}\n"
1562 "template <typename T>\n"
1563 "bool UnpackTo(T* message) const {\n"
1564 " return $any_metadata$.UnpackTo(message);\n"
1565 "}\n");
1566 }
1567 format(
1568 "template<typename T> bool Is() const {\n"
1569 " return $any_metadata$.Is<T>();\n"
1570 "}\n"
1571 "static bool ParseAnyTypeUrl(::PROTOBUF_NAMESPACE_ID::ConstStringParam "
1572 "type_url,\n"
1573 " std::string* full_type_name);\n");
1574 }
1575
1576 format(
1577 "friend void swap($classname$& a, $classname$& b) {\n"
1578 " a.Swap(&b);\n"
1579 "}\n"
1580 "PROTOBUF_NOINLINE void Swap($classname$* other) {\n"
1581 " if (other == this) return;\n"
1582 "#ifdef PROTOBUF_FORCE_COPY_IN_SWAP\n"
1583 " if (GetOwningArena() != nullptr &&\n"
1584 " GetOwningArena() == other->GetOwningArena()) {\n "
1585 "#else // PROTOBUF_FORCE_COPY_IN_SWAP\n"
1586 " if (GetOwningArena() == other->GetOwningArena()) {\n"
1587 "#endif // !PROTOBUF_FORCE_COPY_IN_SWAP\n"
1588 " InternalSwap(other);\n"
1589 " } else {\n"
1590 " ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);\n"
1591 " }\n"
1592 "}\n"
1593 "void UnsafeArenaSwap($classname$* other) {\n"
1594 " if (other == this) return;\n"
1595 " $DCHK$(GetOwningArena() == other->GetOwningArena());\n"
1596 " InternalSwap(other);\n"
1597 "}\n");
1598
1599 format(
1600 "\n"
1601 "// implements Message ----------------------------------------------\n"
1602 "\n"
1603 "$classname$* New(::$proto_ns$::Arena* arena = nullptr) const final {\n"
1604 " return CreateMaybeMessage<$classname$>(arena);\n"
1605 "}\n");
1606
1607 // For instances that derive from Message (rather than MessageLite), some
1608 // methods are virtual and should be marked as final.
1609 format.Set("full_final", HasDescriptorMethods(descriptor_->file(), options_)
1610 ? "final"
1611 : "");
1612
1613 if (HasGeneratedMethods(descriptor_->file(), options_)) {
1614 if (HasDescriptorMethods(descriptor_->file(), options_)) {
1615 if (!HasSimpleBaseClass(descriptor_, options_)) {
1616 format(
1617 // Use Message's built-in MergeFrom and CopyFrom when the passed-in
1618 // argument is a generic Message instance, and only define the
1619 // custom MergeFrom and CopyFrom instances when the source of the
1620 // merge/copy is known to be the same class as the destination.
1621 // TODO(jorg): Define MergeFrom in terms of MergeImpl, rather than
1622 // the other way around, to save even more code size.
1623 "using $superclass$::CopyFrom;\n"
1624 "void CopyFrom(const $classname$& from);\n"
1625 ""
1626 "using $superclass$::MergeFrom;\n"
1627 "void MergeFrom(const $classname$& from);\n"
1628 "private:\n"
1629 "static void MergeImpl(::$proto_ns$::Message* to, const "
1630 "::$proto_ns$::Message& from);\n"
1631 "public:\n");
1632 } else {
1633 format(
1634 "using $superclass$::CopyFrom;\n"
1635 "inline void CopyFrom(const $classname$& from) {\n"
1636 " $superclass$::CopyImpl(this, from);\n"
1637 "}\n"
1638 ""
1639 "using $superclass$::MergeFrom;\n"
1640 "void MergeFrom(const $classname$& from) {\n"
1641 " $superclass$::MergeImpl(this, from);\n"
1642 "}\n"
1643 "public:\n");
1644 }
1645 } else {
1646 format(
1647 "void CheckTypeAndMergeFrom(const ::$proto_ns$::MessageLite& from)"
1648 " final;\n"
1649 "void CopyFrom(const $classname$& from);\n"
1650 "void MergeFrom(const $classname$& from);\n");
1651 }
1652
1653 if (!HasSimpleBaseClass(descriptor_, options_)) {
1654 format(
1655 "PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;\n"
1656 "bool IsInitialized() const final;\n"
1657 "\n"
1658 "size_t ByteSizeLong() const final;\n");
1659
1660 parse_function_generator_->GenerateMethodDecls(printer);
1661
1662 format(
1663 "$uint8$* _InternalSerialize(\n"
1664 " $uint8$* target, ::$proto_ns$::io::EpsCopyOutputStream* stream) "
1665 "const final;\n");
1666 }
1667 }
1668
1669 if (options_.field_listener_options.inject_field_listener_events) {
1670 format("static constexpr int _kInternalFieldNumber = $1$;\n",
1671 descriptor_->field_count());
1672 }
1673
1674 if (!HasSimpleBaseClass(descriptor_, options_)) {
1675 format(
1676 "int GetCachedSize() const final { return "
1677 "$cached_size$.Get(); }"
1678 "\n\nprivate:\n"
1679 "void SharedCtor();\n"
1680 "void SharedDtor();\n"
1681 "void SetCachedSize(int size) const$ full_final$;\n"
1682 "void InternalSwap($classname$* other);\n");
1683 }
1684
1685 format(
1686 // Friend AnyMetadata so that it can call this FullMessageName() method.
1687 "\nprivate:\n"
1688 "friend class ::$proto_ns$::internal::AnyMetadata;\n"
1689 "static $1$ FullMessageName() {\n"
1690 " return \"$full_name$\";\n"
1691 "}\n",
1692 options_.opensource_runtime ? "::PROTOBUF_NAMESPACE_ID::StringPiece"
1693 : "::StringPiece");
1694
1695 format(
1696 // TODO(gerbens) Make this private! Currently people are deriving from
1697 // protos to give access to this constructor, breaking the invariants
1698 // we rely on.
1699 "protected:\n"
1700 "explicit $classname$(::$proto_ns$::Arena* arena,\n"
1701 " bool is_message_owned = false);\n");
1702
1703 switch (NeedsArenaDestructor()) {
1704 case ArenaDtorNeeds::kOnDemand:
1705 format(
1706 "private:\n"
1707 "static void ArenaDtor(void* object);\n"
1708 "inline void OnDemandRegisterArenaDtor(::$proto_ns$::Arena* arena) "
1709 "override {\n"
1710 " if (arena == nullptr || ($inlined_string_donated_array$[0] & "
1711 "0x1u) "
1712 "== "
1713 "0) {\n"
1714 " return;\n"
1715 " }\n"
1716 " $inlined_string_donated_array$[0] &= 0xFFFFFFFEu;\n"
1717 " arena->OwnCustomDestructor(this, &$classname$::ArenaDtor);\n"
1718 "}\n");
1719 break;
1720 case ArenaDtorNeeds::kRequired:
1721 format(
1722 "private:\n"
1723 "static void ArenaDtor(void* object);\n");
1724 break;
1725 case ArenaDtorNeeds::kNone:
1726 break;
1727 }
1728
1729 format(
1730 "public:\n"
1731 "\n");
1732
1733 if (HasDescriptorMethods(descriptor_->file(), options_)) {
1734 if (HasGeneratedMethods(descriptor_->file(), options_)) {
1735 format(
1736 "static const ClassData _class_data_;\n"
1737 "const ::$proto_ns$::Message::ClassData*"
1738 "GetClassData() const final;\n"
1739 "\n");
1740 }
1741 format(
1742 "::$proto_ns$::Metadata GetMetadata() const final;\n"
1743 "\n");
1744 } else {
1745 format(
1746 "std::string GetTypeName() const final;\n"
1747 "\n");
1748 }
1749
1750 format(
1751 "// nested types ----------------------------------------------------\n"
1752 "\n");
1753
1754 // Import all nested message classes into this class's scope with typedefs.
1755 for (int i = 0; i < descriptor_->nested_type_count(); i++) {
1756 const Descriptor* nested_type = descriptor_->nested_type(i);
1757 if (!IsMapEntryMessage(nested_type)) {
1758 format.Set("nested_full_name", ClassName(nested_type, false));
1759 format.Set("nested_name", ResolveKeyword(nested_type->name()));
1760 format("typedef ${1$$nested_full_name$$}$ ${1$$nested_name$$}$;\n",
1761 nested_type);
1762 }
1763 }
1764
1765 if (descriptor_->nested_type_count() > 0) {
1766 format("\n");
1767 }
1768
1769 // Import all nested enums and their values into this class's scope with
1770 // typedefs and constants.
1771 for (int i = 0; i < descriptor_->enum_type_count(); i++) {
1772 enum_generators_[i]->GenerateSymbolImports(printer);
1773 format("\n");
1774 }
1775
1776 format(
1777 "// accessors -------------------------------------------------------\n"
1778 "\n");
1779
1780 // Generate accessor methods for all fields.
1781 GenerateFieldAccessorDeclarations(printer);
1782
1783 // Declare extension identifiers.
1784 for (int i = 0; i < descriptor_->extension_count(); i++) {
1785 extension_generators_[i]->GenerateDeclaration(printer);
1786 }
1787
1788
1789 format("// @@protoc_insertion_point(class_scope:$full_name$)\n");
1790
1791 // Generate private members.
1792 format.Outdent();
1793 format(" private:\n");
1794 format.Indent();
1795 // TODO(seongkim): Remove hack to track field access and remove this class.
1796 format("class _Internal;\n");
1797
1798 for (auto field : FieldRange(descriptor_)) {
1799 // set_has_***() generated in all oneofs.
1800 if (!field->is_repeated() && !field->options().weak() &&
1801 field->real_containing_oneof()) {
1802 format("void set_has_$1$();\n", FieldName(field));
1803 }
1804 }
1805 format("\n");
1806
1807 // Generate oneof function declarations
1808 for (auto oneof : OneOfRange(descriptor_)) {
1809 format(
1810 "inline bool has_$1$() const;\n"
1811 "inline void clear_has_$1$();\n\n",
1812 oneof->name());
1813 }
1814
1815 if (HasGeneratedMethods(descriptor_->file(), options_) &&
1816 !descriptor_->options().message_set_wire_format() &&
1817 num_required_fields_ > 1) {
1818 format(
1819 "// helper for ByteSizeLong()\n"
1820 "size_t RequiredFieldsByteSizeFallback() const;\n\n");
1821 }
1822
1823 if (HasGeneratedMethods(descriptor_->file(), options_)) {
1824 parse_function_generator_->GenerateDataDecls(printer);
1825 }
1826
1827 // Prepare decls for _cached_size_ and _has_bits_. Their position in the
1828 // output will be determined later.
1829
1830 bool need_to_emit_cached_size = !HasSimpleBaseClass(descriptor_, options_);
1831 const std::string cached_size_decl =
1832 "mutable ::$proto_ns$::internal::CachedSize _cached_size_;\n";
1833
1834 const size_t sizeof_has_bits = HasBitsSize();
1835 const std::string has_bits_decl =
1836 sizeof_has_bits == 0 ? ""
1837 : StrCat("::$proto_ns$::internal::HasBits<",
1838 sizeof_has_bits, "> _has_bits_;\n");
1839
1840 // To minimize padding, data members are divided into three sections:
1841 // (1) members assumed to align to 8 bytes
1842 // (2) members corresponding to message fields, re-ordered to optimize
1843 // alignment.
1844 // (3) members assumed to align to 4 bytes.
1845
1846 // Members assumed to align to 8 bytes:
1847
1848 if (descriptor_->extension_range_count() > 0) {
1849 format(
1850 "::$proto_ns$::internal::ExtensionSet _extensions_;\n"
1851 "\n");
1852 }
1853
1854 if (options_.field_listener_options.inject_field_listener_events &&
1855 descriptor_->file()->options().optimize_for() !=
1856 google::protobuf::FileOptions::LITE_RUNTIME) {
1857 format("static ::$proto_ns$::AccessListener<$1$> _tracker_;\n",
1858 ClassName(descriptor_));
1859 }
1860
1861 // Generate _inlined_string_donated_ for inlined string type.
1862 // TODO(congliu): To avoid affecting the locality of `_has_bits_`, should this
1863 // be below or above `_has_bits_`?
1864 if (!inlined_string_indices_.empty()) {
1865 format("::$proto_ns$::internal::HasBits<$1$> _inlined_string_donated_;\n",
1866 InlinedStringDonatedSize());
1867 }
1868
1869 format(
1870 "template <typename T> friend class "
1871 "::$proto_ns$::Arena::InternalHelper;\n"
1872 "typedef void InternalArenaConstructable_;\n"
1873 "typedef void DestructorSkippable_;\n");
1874
1875 if (!has_bit_indices_.empty()) {
1876 // _has_bits_ is frequently accessed, so to reduce code size and improve
1877 // speed, it should be close to the start of the object. Placing
1878 // _cached_size_ together with _has_bits_ improves cache locality despite
1879 // potential alignment padding.
1880 format(has_bits_decl.c_str());
1881 if (need_to_emit_cached_size) {
1882 format(cached_size_decl.c_str());
1883 need_to_emit_cached_size = false;
1884 }
1885 }
1886
1887 // Field members:
1888
1889 // Emit some private and static members
1890 for (auto field : optimized_order_) {
1891 const FieldGenerator& generator = field_generators_.get(field);
1892 generator.GenerateStaticMembers(printer);
1893 generator.GeneratePrivateMembers(printer);
1894 }
1895
1896 // For each oneof generate a union
1897 for (auto oneof : OneOfRange(descriptor_)) {
1898 std::string camel_oneof_name = UnderscoresToCamelCase(oneof->name(), true);
1899 format("union $1$Union {\n", camel_oneof_name);
1900 format.Indent();
1901 format(
1902 // explicit empty constructor is needed when union contains
1903 // ArenaStringPtr members for string fields.
1904 "constexpr $1$Union() : _constinit_{} {}\n"
1905 " ::$proto_ns$::internal::ConstantInitialized _constinit_;\n",
1906 camel_oneof_name);
1907 for (auto field : FieldRange(oneof)) {
1908 if (!IsFieldStripped(field, options_)) {
1909 field_generators_.get(field).GeneratePrivateMembers(printer);
1910 }
1911 }
1912 format.Outdent();
1913 format("} $1$_;\n", oneof->name());
1914 for (auto field : FieldRange(oneof)) {
1915 if (!IsFieldStripped(field, options_)) {
1916 field_generators_.get(field).GenerateStaticMembers(printer);
1917 }
1918 }
1919 }
1920
1921 // Members assumed to align to 4 bytes:
1922
1923 if (need_to_emit_cached_size) {
1924 format(cached_size_decl.c_str());
1925 need_to_emit_cached_size = false;
1926 }
1927
1928 // Generate _oneof_case_.
1929 if (descriptor_->real_oneof_decl_count() > 0) {
1930 format(
1931 "$uint32$ _oneof_case_[$1$];\n"
1932 "\n",
1933 descriptor_->real_oneof_decl_count());
1934 }
1935
1936 if (num_weak_fields_) {
1937 format("::$proto_ns$::internal::WeakFieldMap _weak_field_map_;\n");
1938 }
1939 // Generate _any_metadata_ for the Any type.
1940 if (IsAnyMessage(descriptor_, options_)) {
1941 format("::$proto_ns$::internal::AnyMetadata _any_metadata_;\n");
1942 }
1943
1944 // The TableStruct struct needs access to the private parts, in order to
1945 // construct the offsets of all members.
1946 format("friend struct ::$tablename$;\n");
1947
1948 format.Outdent();
1949 format("};");
1950 GOOGLE_DCHECK(!need_to_emit_cached_size);
1951 } // NOLINT(readability/fn_size)
1952
GenerateInlineMethods(io::Printer * printer)1953 void MessageGenerator::GenerateInlineMethods(io::Printer* printer) {
1954 if (IsMapEntryMessage(descriptor_)) return;
1955 GenerateFieldAccessorDefinitions(printer);
1956
1957 // Generate oneof_case() functions.
1958 for (auto oneof : OneOfRange(descriptor_)) {
1959 Formatter format(printer, variables_);
1960 format.Set("camel_oneof_name", UnderscoresToCamelCase(oneof->name(), true));
1961 format.Set("oneof_name", oneof->name());
1962 format.Set("oneof_index", oneof->index());
1963 format(
1964 "inline $classname$::$camel_oneof_name$Case $classname$::"
1965 "${1$$oneof_name$_case$}$() const {\n"
1966 " return $classname$::$camel_oneof_name$Case("
1967 "$oneof_case$[$oneof_index$]);\n"
1968 "}\n",
1969 oneof);
1970 }
1971 }
1972
GenerateSchema(io::Printer * printer,int offset,int has_offset)1973 void MessageGenerator::GenerateSchema(io::Printer* printer, int offset,
1974 int has_offset) {
1975 Formatter format(printer, variables_);
1976 has_offset = !has_bit_indices_.empty() || IsMapEntryMessage(descriptor_)
1977 ? offset + has_offset
1978 : -1;
1979 int inlined_string_indices_offset;
1980 if (inlined_string_indices_.empty()) {
1981 inlined_string_indices_offset = -1;
1982 } else {
1983 GOOGLE_DCHECK_NE(has_offset, -1);
1984 GOOGLE_DCHECK(!IsMapEntryMessage(descriptor_));
1985 inlined_string_indices_offset = has_offset + has_bit_indices_.size();
1986 }
1987
1988 format("{ $1$, $2$, $3$, sizeof($classtype$)},\n", offset, has_offset,
1989 inlined_string_indices_offset);
1990 }
1991
GenerateClassMethods(io::Printer * printer)1992 void MessageGenerator::GenerateClassMethods(io::Printer* printer) {
1993 Formatter format(printer, variables_);
1994 if (IsMapEntryMessage(descriptor_)) {
1995 format(
1996 "$classname$::$classname$() {}\n"
1997 "$classname$::$classname$(::$proto_ns$::Arena* arena)\n"
1998 " : SuperType(arena) {}\n"
1999 "void $classname$::MergeFrom(const $classname$& other) {\n"
2000 " MergeFromInternal(other);\n"
2001 "}\n");
2002 if (HasDescriptorMethods(descriptor_->file(), options_)) {
2003 if (!descriptor_->options().map_entry()) {
2004 format(
2005 "::$proto_ns$::Metadata $classname$::GetMetadata() const {\n"
2006 "$annotate_reflection$"
2007 " return ::_pbi::AssignDescriptors(\n"
2008 " &$desc_table$_getter, &$desc_table$_once,\n"
2009 " $file_level_metadata$[$1$]);\n"
2010 "}\n",
2011 index_in_file_messages_);
2012 } else {
2013 format(
2014 "::$proto_ns$::Metadata $classname$::GetMetadata() const {\n"
2015 " return ::_pbi::AssignDescriptors(\n"
2016 " &$desc_table$_getter, &$desc_table$_once,\n"
2017 " $file_level_metadata$[$1$]);\n"
2018 "}\n",
2019 index_in_file_messages_);
2020 }
2021 }
2022 return;
2023 }
2024
2025 if (IsAnyMessage(descriptor_, options_)) {
2026 if (HasDescriptorMethods(descriptor_->file(), options_)) {
2027 format(
2028 "bool $classname$::GetAnyFieldDescriptors(\n"
2029 " const ::$proto_ns$::Message& message,\n"
2030 " const ::$proto_ns$::FieldDescriptor** type_url_field,\n"
2031 " const ::$proto_ns$::FieldDescriptor** value_field) {\n"
2032 " return ::_pbi::GetAnyFieldDescriptors(\n"
2033 " message, type_url_field, value_field);\n"
2034 "}\n");
2035 }
2036 format(
2037 "bool $classname$::ParseAnyTypeUrl(\n"
2038 " ::PROTOBUF_NAMESPACE_ID::ConstStringParam type_url,\n"
2039 " std::string* full_type_name) {\n"
2040 " return ::_pbi::ParseAnyTypeUrl(type_url, full_type_name);\n"
2041 "}\n"
2042 "\n");
2043 }
2044
2045 format(
2046 "class $classname$::_Internal {\n"
2047 " public:\n");
2048 format.Indent();
2049 if (!has_bit_indices_.empty()) {
2050 format(
2051 "using HasBits = "
2052 "decltype(std::declval<$classname$>().$has_bits$);\n");
2053 }
2054 for (auto field : FieldRange(descriptor_)) {
2055 field_generators_.get(field).GenerateInternalAccessorDeclarations(printer);
2056 if (IsFieldStripped(field, options_)) {
2057 continue;
2058 }
2059 if (HasHasbit(field)) {
2060 int has_bit_index = HasBitIndex(field);
2061 GOOGLE_CHECK_NE(has_bit_index, kNoHasbit) << field->full_name();
2062 format(
2063 "static void set_has_$1$(HasBits* has_bits) {\n"
2064 " (*has_bits)[$2$] |= $3$u;\n"
2065 "}\n",
2066 FieldName(field), has_bit_index / 32, (1u << (has_bit_index % 32)));
2067 }
2068 }
2069 if (num_required_fields_ > 0) {
2070 const std::vector<uint32_t> masks_for_has_bits = RequiredFieldsBitMask();
2071 format(
2072 "static bool MissingRequiredFields(const HasBits& has_bits) "
2073 "{\n"
2074 " return $1$;\n"
2075 "}\n",
2076 ConditionalToCheckBitmasks(masks_for_has_bits, false, "has_bits"));
2077 }
2078
2079 format.Outdent();
2080 format("};\n\n");
2081 for (auto field : FieldRange(descriptor_)) {
2082 if (!IsFieldStripped(field, options_)) {
2083 field_generators_.get(field).GenerateInternalAccessorDefinitions(printer);
2084 }
2085 }
2086
2087 // Generate non-inline field definitions.
2088 for (auto field : FieldRange(descriptor_)) {
2089 if (IsFieldStripped(field, options_)) {
2090 continue;
2091 }
2092 field_generators_.get(field).GenerateNonInlineAccessorDefinitions(printer);
2093 if (IsCrossFileMaybeMap(field)) {
2094 Formatter::SaveState saver(&format);
2095 std::map<std::string, std::string> vars;
2096 SetCommonFieldVariables(field, &vars, options_);
2097 if (field->real_containing_oneof()) {
2098 SetCommonOneofFieldVariables(field, &vars);
2099 }
2100 format.AddMap(vars);
2101 GenerateFieldClear(field, false, format);
2102 }
2103 }
2104
2105 GenerateStructors(printer);
2106 format("\n");
2107
2108 if (descriptor_->real_oneof_decl_count() > 0) {
2109 GenerateOneofClear(printer);
2110 format("\n");
2111 }
2112
2113 if (HasGeneratedMethods(descriptor_->file(), options_)) {
2114 GenerateClear(printer);
2115 format("\n");
2116
2117 if (!HasSimpleBaseClass(descriptor_, options_)) {
2118 parse_function_generator_->GenerateMethodImpls(printer);
2119 format("\n");
2120
2121 parse_function_generator_->GenerateDataDefinitions(printer);
2122 }
2123
2124 GenerateSerializeWithCachedSizesToArray(printer);
2125 format("\n");
2126
2127 GenerateByteSize(printer);
2128 format("\n");
2129
2130 GenerateMergeFrom(printer);
2131 format("\n");
2132
2133 GenerateClassSpecificMergeFrom(printer);
2134 format("\n");
2135
2136 GenerateCopyFrom(printer);
2137 format("\n");
2138
2139 GenerateIsInitialized(printer);
2140 format("\n");
2141 }
2142
2143 GenerateVerify(printer);
2144
2145 GenerateSwap(printer);
2146 format("\n");
2147
2148 if (HasDescriptorMethods(descriptor_->file(), options_)) {
2149 if (!descriptor_->options().map_entry()) {
2150 format(
2151 "::$proto_ns$::Metadata $classname$::GetMetadata() const {\n"
2152 "$annotate_reflection$"
2153 " return ::_pbi::AssignDescriptors(\n"
2154 " &$desc_table$_getter, &$desc_table$_once,\n"
2155 " $file_level_metadata$[$1$]);\n"
2156 "}\n",
2157 index_in_file_messages_);
2158 } else {
2159 format(
2160 "::$proto_ns$::Metadata $classname$::GetMetadata() const {\n"
2161 " return ::_pbi::AssignDescriptors(\n"
2162 " &$desc_table$_getter, &$desc_table$_once,\n"
2163 " $file_level_metadata$[$1$]);\n"
2164 "}\n",
2165 index_in_file_messages_);
2166 }
2167 } else {
2168 format(
2169 "std::string $classname$::GetTypeName() const {\n"
2170 " return \"$full_name$\";\n"
2171 "}\n"
2172 "\n");
2173 }
2174
2175 if (options_.field_listener_options.inject_field_listener_events &&
2176 descriptor_->file()->options().optimize_for() !=
2177 google::protobuf::FileOptions::LITE_RUNTIME) {
2178 format(
2179 "::$proto_ns$::AccessListener<$classtype$> "
2180 "$1$::$tracker$(&FullMessageName);\n",
2181 ClassName(descriptor_));
2182 }
2183 }
2184
GenerateOffsets(io::Printer * printer)2185 std::pair<size_t, size_t> MessageGenerator::GenerateOffsets(
2186 io::Printer* printer) {
2187 Formatter format(printer, variables_);
2188
2189 if (!has_bit_indices_.empty() || IsMapEntryMessage(descriptor_)) {
2190 format("PROTOBUF_FIELD_OFFSET($classtype$, $has_bits$),\n");
2191 } else {
2192 format("~0u, // no _has_bits_\n");
2193 }
2194 format("PROTOBUF_FIELD_OFFSET($classtype$, _internal_metadata_),\n");
2195 if (descriptor_->extension_range_count() > 0) {
2196 format("PROTOBUF_FIELD_OFFSET($classtype$, $extensions$),\n");
2197 } else {
2198 format("~0u, // no _extensions_\n");
2199 }
2200 if (descriptor_->real_oneof_decl_count() > 0) {
2201 format("PROTOBUF_FIELD_OFFSET($classtype$, $oneof_case$[0]),\n");
2202 } else {
2203 format("~0u, // no _oneof_case_\n");
2204 }
2205 if (num_weak_fields_ > 0) {
2206 format("PROTOBUF_FIELD_OFFSET($classtype$, $weak_field_map$),\n");
2207 } else {
2208 format("~0u, // no _weak_field_map_\n");
2209 }
2210 if (!inlined_string_indices_.empty()) {
2211 format(
2212 "PROTOBUF_FIELD_OFFSET($classtype$, "
2213 "$inlined_string_donated_array$),\n");
2214 } else {
2215 format("~0u, // no _inlined_string_donated_\n");
2216 }
2217 const int kNumGenericOffsets = 6; // the number of fixed offsets above
2218 const size_t offsets = kNumGenericOffsets + descriptor_->field_count() +
2219 descriptor_->real_oneof_decl_count();
2220 size_t entries = offsets;
2221 for (auto field : FieldRange(descriptor_)) {
2222 if (IsFieldStripped(field, options_)) {
2223 format("~0u, // stripped\n");
2224 continue;
2225 }
2226 // TODO(sbenza): We should not have an entry in the offset table for fields
2227 // that do not use them.
2228 if (field->options().weak() || field->real_containing_oneof()) {
2229 // Mark the field to prevent unintentional access through reflection.
2230 // Don't use the top bit because that is for unused fields.
2231 format("::_pbi::kInvalidFieldOffsetTag");
2232 } else {
2233 format("PROTOBUF_FIELD_OFFSET($classtype$, $1$)", FieldMemberName(field));
2234 }
2235
2236 // Some information about a field is in the pdproto profile. The profile is
2237 // only available at compile time. So we embed such information in the
2238 // offset of the field, so that the information is available when
2239 // reflectively accessing the field at run time.
2240 //
2241 // Embed whether the field is eagerly verified lazy or inlined string to the
2242 // LSB of the offset.
2243 if (IsEagerlyVerifiedLazy(field, options_, scc_analyzer_)) {
2244 format(" | 0x1u // eagerly verified lazy\n");
2245 } else if (IsStringInlined(field, options_)) {
2246 format(" | 0x1u // inlined\n");
2247 }
2248 format(",\n");
2249 }
2250
2251 int count = 0;
2252 for (auto oneof : OneOfRange(descriptor_)) {
2253 format("PROTOBUF_FIELD_OFFSET($classtype$, $1$_),\n", oneof->name());
2254 count++;
2255 }
2256 GOOGLE_CHECK_EQ(count, descriptor_->real_oneof_decl_count());
2257
2258 if (IsMapEntryMessage(descriptor_)) {
2259 entries += 2;
2260 format(
2261 "0,\n"
2262 "1,\n");
2263 } else if (!has_bit_indices_.empty()) {
2264 entries += has_bit_indices_.size();
2265 for (int i = 0; i < has_bit_indices_.size(); i++) {
2266 const std::string index =
2267 has_bit_indices_[i] >= 0 ? StrCat(has_bit_indices_[i]) : "~0u";
2268 format("$1$,\n", index);
2269 }
2270 }
2271 if (!inlined_string_indices_.empty()) {
2272 entries += inlined_string_indices_.size();
2273 for (int inlined_string_index : inlined_string_indices_) {
2274 const std::string index =
2275 inlined_string_index >= 0
2276 ? StrCat(inlined_string_index, ", // inlined_string_index")
2277 : "~0u,";
2278 format("$1$\n", index);
2279 }
2280 }
2281
2282 return std::make_pair(entries, offsets);
2283 }
2284
GenerateSharedConstructorCode(io::Printer * printer)2285 void MessageGenerator::GenerateSharedConstructorCode(io::Printer* printer) {
2286 if (HasSimpleBaseClass(descriptor_, options_)) return;
2287 Formatter format(printer, variables_);
2288
2289 format("inline void $classname$::SharedCtor() {\n");
2290
2291 std::vector<bool> processed(optimized_order_.size(), false);
2292 GenerateConstructorBody(printer, processed, false);
2293
2294 for (auto oneof : OneOfRange(descriptor_)) {
2295 format("clear_has_$1$();\n", oneof->name());
2296 }
2297
2298 format("}\n\n");
2299 }
2300
GenerateSharedDestructorCode(io::Printer * printer)2301 void MessageGenerator::GenerateSharedDestructorCode(io::Printer* printer) {
2302 if (HasSimpleBaseClass(descriptor_, options_)) return;
2303 Formatter format(printer, variables_);
2304
2305 format("inline void $classname$::SharedDtor() {\n");
2306 format.Indent();
2307 format("$DCHK$(GetArenaForAllocation() == nullptr);\n");
2308 // Write the destructors for each field except oneof members.
2309 // optimized_order_ does not contain oneof fields.
2310 for (auto field : optimized_order_) {
2311 field_generators_.get(field).GenerateDestructorCode(printer);
2312 }
2313
2314 // Generate code to destruct oneofs. Clearing should do the work.
2315 for (auto oneof : OneOfRange(descriptor_)) {
2316 format(
2317 "if (has_$1$()) {\n"
2318 " clear_$1$();\n"
2319 "}\n",
2320 oneof->name());
2321 }
2322
2323 if (num_weak_fields_) {
2324 format("$weak_field_map$.ClearAll();\n");
2325 }
2326 format.Outdent();
2327 format(
2328 "}\n"
2329 "\n");
2330 }
2331
NeedsArenaDestructor() const2332 ArenaDtorNeeds MessageGenerator::NeedsArenaDestructor() const {
2333 if (HasSimpleBaseClass(descriptor_, options_)) return ArenaDtorNeeds::kNone;
2334 ArenaDtorNeeds needs = ArenaDtorNeeds::kNone;
2335 for (const auto* field : FieldRange(descriptor_)) {
2336 if (IsFieldStripped(field, options_)) continue;
2337 needs =
2338 std::max(needs, field_generators_.get(field).NeedsArenaDestructor());
2339 }
2340 return needs;
2341 }
2342
GenerateArenaDestructorCode(io::Printer * printer)2343 void MessageGenerator::GenerateArenaDestructorCode(io::Printer* printer) {
2344 GOOGLE_CHECK(NeedsArenaDestructor() > ArenaDtorNeeds::kNone);
2345
2346 Formatter format(printer, variables_);
2347
2348 // Generate the ArenaDtor() method. Track whether any fields actually produced
2349 // code that needs to be called.
2350 format("void $classname$::ArenaDtor(void* object) {\n");
2351 format.Indent();
2352
2353 // This code is placed inside a static method, rather than an ordinary one,
2354 // since that simplifies Arena's destructor list (ordinary function pointers
2355 // rather than member function pointers). _this is the object being
2356 // destructed.
2357 format("$classname$* _this = reinterpret_cast< $classname$* >(object);\n");
2358
2359 // Process non-oneof fields first.
2360 for (auto field : optimized_order_) {
2361 if (IsFieldStripped(field, options_)) continue;
2362 const FieldGenerator& fg = field_generators_.get(field);
2363 fg.GenerateArenaDestructorCode(printer);
2364 }
2365
2366 // Process oneof fields.
2367 for (auto oneof : OneOfRange(descriptor_)) {
2368 for (auto field : FieldRange(oneof)) {
2369 if (IsFieldStripped(field, options_)) continue;
2370 field_generators_.get(field).GenerateArenaDestructorCode(printer);
2371 }
2372 }
2373
2374 format.Outdent();
2375 format("}\n");
2376 }
2377
GenerateConstexprConstructor(io::Printer * printer)2378 void MessageGenerator::GenerateConstexprConstructor(io::Printer* printer) {
2379 Formatter format(printer, variables_);
2380
2381 format(
2382 "PROTOBUF_CONSTEXPR $classname$::$classname$(\n"
2383 " ::_pbi::ConstantInitialized)");
2384 format.Indent();
2385 const char* field_sep = ":";
2386 const auto put_sep = [&] {
2387 format("\n$1$ ", field_sep);
2388 field_sep = ",";
2389 };
2390
2391 if (!IsMapEntryMessage(descriptor_)) {
2392 // Process non-oneof fields first.
2393 for (auto field : optimized_order_) {
2394 auto& gen = field_generators_.get(field);
2395 put_sep();
2396 gen.GenerateConstinitInitializer(printer);
2397 }
2398
2399 if (IsAnyMessage(descriptor_, options_)) {
2400 put_sep();
2401 format("_any_metadata_(&type_url_, &value_)");
2402 }
2403
2404 if (descriptor_->real_oneof_decl_count() != 0) {
2405 put_sep();
2406 format("_oneof_case_{}");
2407 }
2408 }
2409
2410 format.Outdent();
2411 format("{}\n");
2412 }
2413
GenerateConstructorBody(io::Printer * printer,std::vector<bool> processed,bool copy_constructor) const2414 void MessageGenerator::GenerateConstructorBody(io::Printer* printer,
2415 std::vector<bool> processed,
2416 bool copy_constructor) const {
2417 Formatter format(printer, variables_);
2418
2419 const RunMap runs = FindRuns(
2420 optimized_order_, [copy_constructor, this](const FieldDescriptor* field) {
2421 return (copy_constructor && IsPOD(field)) ||
2422 (!copy_constructor &&
2423 CanBeManipulatedAsRawBytes(field, options_, scc_analyzer_));
2424 });
2425
2426 std::string pod_template;
2427 if (copy_constructor) {
2428 pod_template =
2429 "::memcpy(&$first$, &from.$first$,\n"
2430 " static_cast<size_t>(reinterpret_cast<char*>(&$last$) -\n"
2431 " reinterpret_cast<char*>(&$first$)) + sizeof($last$));\n";
2432 } else {
2433 pod_template =
2434 "::memset(reinterpret_cast<char*>(this) + static_cast<size_t>(\n"
2435 " reinterpret_cast<char*>(&$first$) - "
2436 "reinterpret_cast<char*>(this)),\n"
2437 " 0, static_cast<size_t>(reinterpret_cast<char*>(&$last$) -\n"
2438 " reinterpret_cast<char*>(&$first$)) + sizeof($last$));\n";
2439 }
2440
2441 for (int i = 0; i < optimized_order_.size(); ++i) {
2442 if (processed[i]) {
2443 continue;
2444 }
2445
2446 const FieldDescriptor* field = optimized_order_[i];
2447 const auto it = runs.find(field);
2448
2449 // We only apply the memset technique to runs of more than one field, as
2450 // assignment is better than memset for generated code clarity.
2451 if (it != runs.end() && it->second > 1) {
2452 // Use a memset, then skip run_length fields.
2453 const size_t run_length = it->second;
2454 const std::string first_field_name = FieldMemberName(field);
2455 const std::string last_field_name =
2456 FieldMemberName(optimized_order_[i + run_length - 1]);
2457
2458 format.Set("first", first_field_name);
2459 format.Set("last", last_field_name);
2460
2461 format(pod_template.c_str());
2462
2463 i += run_length - 1;
2464 // ++i at the top of the loop.
2465 } else {
2466 if (copy_constructor) {
2467 field_generators_.get(field).GenerateCopyConstructorCode(printer);
2468 } else {
2469 field_generators_.get(field).GenerateConstructorCode(printer);
2470 }
2471 }
2472 }
2473 }
2474
GenerateStructors(io::Printer * printer)2475 void MessageGenerator::GenerateStructors(io::Printer* printer) {
2476 Formatter format(printer, variables_);
2477
2478 std::string superclass;
2479 superclass = SuperClassName(descriptor_, options_);
2480 std::string initializer_with_arena = superclass + "(arena, is_message_owned)";
2481
2482 if (descriptor_->extension_range_count() > 0) {
2483 initializer_with_arena += ",\n _extensions_(arena)";
2484 }
2485
2486 // Initialize member variables with arena constructor.
2487 for (auto field : optimized_order_) {
2488 GOOGLE_DCHECK(!IsFieldStripped(field, options_));
2489 bool has_arena_constructor = field->is_repeated();
2490 if (!field->real_containing_oneof() &&
2491 (IsLazy(field, options_, scc_analyzer_) ||
2492 IsStringPiece(field, options_) ||
2493 (IsString(field, options_) && IsStringInlined(field, options_)))) {
2494 has_arena_constructor = true;
2495 }
2496 if (has_arena_constructor) {
2497 initializer_with_arena +=
2498 std::string(",\n ") + FieldName(field) + std::string("_(arena)");
2499 }
2500 }
2501
2502 if (IsAnyMessage(descriptor_, options_)) {
2503 initializer_with_arena += ",\n _any_metadata_(&type_url_, &value_)";
2504 }
2505 if (num_weak_fields_ > 0) {
2506 initializer_with_arena += ", _weak_field_map_(arena)";
2507 }
2508
2509 std::string initializer_null = superclass + "()";
2510 if (IsAnyMessage(descriptor_, options_)) {
2511 initializer_null += ", _any_metadata_(&type_url_, &value_)";
2512 }
2513 if (num_weak_fields_ > 0) {
2514 initializer_null += ", _weak_field_map_(nullptr)";
2515 }
2516
2517 format(
2518 "$classname$::$classname$(::$proto_ns$::Arena* arena,\n"
2519 " bool is_message_owned)\n"
2520 " : $1$ {\n",
2521 initializer_with_arena);
2522
2523 if (!inlined_string_indices_.empty()) {
2524 // Donate inline string fields.
2525 format.Indent();
2526 // The last bit is the tracking bit for registering ArenaDtor. The bit is 1
2527 // means ArenaDtor is not registered on construction, and on demand register
2528 // is needed.
2529 format("if (arena != nullptr) {\n");
2530 if (NeedsArenaDestructor() == ArenaDtorNeeds::kOnDemand) {
2531 format(
2532 " if (!is_message_owned) {\n"
2533 " $inlined_string_donated_array$[0] = ~0u;\n"
2534 " } else {\n"
2535 // We should not register ArenaDtor for MOA.
2536 " $inlined_string_donated_array$[0] = 0xFFFFFFFEu;\n"
2537 " }\n");
2538 } else {
2539 format(" $inlined_string_donated_array$[0] = 0xFFFFFFFEu;\n");
2540 }
2541 for (size_t i = 1; i < InlinedStringDonatedSize(); ++i) {
2542 format(" $inlined_string_donated_array$[$1$] = ~0u;\n", i);
2543 }
2544 format("}\n");
2545 format.Outdent();
2546 }
2547
2548 if (!HasSimpleBaseClass(descriptor_, options_)) {
2549 format(" SharedCtor();\n");
2550 if (NeedsArenaDestructor() == ArenaDtorNeeds::kRequired) {
2551 format(
2552 " if (arena != nullptr && !is_message_owned) {\n"
2553 " arena->OwnCustomDestructor(this, &$classname$::ArenaDtor);\n"
2554 " }\n");
2555 }
2556 }
2557 format(
2558 " // @@protoc_insertion_point(arena_constructor:$full_name$)\n"
2559 "}\n");
2560
2561 std::map<std::string, std::string> vars;
2562 SetUnknownFieldsVariable(descriptor_, options_, &vars);
2563 format.AddMap(vars);
2564
2565 // Generate the copy constructor.
2566 if (UsingImplicitWeakFields(descriptor_->file(), options_)) {
2567 // If we are in lite mode and using implicit weak fields, we generate a
2568 // one-liner copy constructor that delegates to MergeFrom. This saves some
2569 // code size and also cuts down on the complexity of implicit weak fields.
2570 // We might eventually want to do this for all lite protos.
2571 format(
2572 "$classname$::$classname$(const $classname$& from)\n"
2573 " : $classname$() {\n"
2574 " MergeFrom(from);\n"
2575 "}\n");
2576 } else {
2577 format(
2578 "$classname$::$classname$(const $classname$& from)\n"
2579 " : $superclass$()");
2580 format.Indent();
2581 format.Indent();
2582 format.Indent();
2583
2584 // Do not copy inlined_string_donated_, because this is not an arena
2585 // constructor.
2586
2587 if (!has_bit_indices_.empty()) {
2588 format(",\n_has_bits_(from._has_bits_)");
2589 }
2590
2591 std::vector<bool> processed(optimized_order_.size(), false);
2592 for (int i = 0; i < optimized_order_.size(); i++) {
2593 auto field = optimized_order_[i];
2594 if (!(field->is_repeated() && !(field->is_map())) &&
2595 !IsCord(field, options_)) {
2596 continue;
2597 }
2598
2599 processed[i] = true;
2600 format(",\n$1$_(from.$1$_)", FieldName(field));
2601 }
2602
2603 if (IsAnyMessage(descriptor_, options_)) {
2604 format(",\n_any_metadata_(&type_url_, &value_)");
2605 }
2606 if (num_weak_fields_ > 0) {
2607 format(",\n_weak_field_map_(from._weak_field_map_)");
2608 }
2609
2610 format.Outdent();
2611 format.Outdent();
2612 format(" {\n");
2613
2614 format(
2615 "_internal_metadata_.MergeFrom<$unknown_fields_type$>(from._internal_"
2616 "metadata_);\n");
2617
2618 if (descriptor_->extension_range_count() > 0) {
2619 format(
2620 "$extensions$.MergeFrom(internal_default_instance(), "
2621 "from.$extensions$);\n");
2622 }
2623
2624 GenerateConstructorBody(printer, processed, true);
2625
2626 // Copy oneof fields. Oneof field requires oneof case check.
2627 for (auto oneof : OneOfRange(descriptor_)) {
2628 format(
2629 "clear_has_$1$();\n"
2630 "switch (from.$1$_case()) {\n",
2631 oneof->name());
2632 format.Indent();
2633 for (auto field : FieldRange(oneof)) {
2634 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
2635 format.Indent();
2636 if (!IsFieldStripped(field, options_)) {
2637 field_generators_.get(field).GenerateMergingCode(printer);
2638 }
2639 format("break;\n");
2640 format.Outdent();
2641 format("}\n");
2642 }
2643 format(
2644 "case $1$_NOT_SET: {\n"
2645 " break;\n"
2646 "}\n",
2647 ToUpper(oneof->name()));
2648 format.Outdent();
2649 format("}\n");
2650 }
2651
2652 format.Outdent();
2653 format(
2654 " // @@protoc_insertion_point(copy_constructor:$full_name$)\n"
2655 "}\n"
2656 "\n");
2657 }
2658
2659 // Generate the shared constructor code.
2660 GenerateSharedConstructorCode(printer);
2661
2662 // Generate the destructor.
2663 if (!HasSimpleBaseClass(descriptor_, options_)) {
2664 format(
2665 "$classname$::~$classname$() {\n"
2666 " // @@protoc_insertion_point(destructor:$full_name$)\n");
2667 format(
2668 " if (auto *arena = "
2669 "_internal_metadata_.DeleteReturnArena<$unknown_fields_type$>()) {\n"
2670 " (void)arena;\n");
2671 if (NeedsArenaDestructor() > ArenaDtorNeeds::kNone) {
2672 format(" ArenaDtor(this);\n");
2673 }
2674 format(
2675 " return;\n"
2676 " }\n");
2677 format(
2678 " SharedDtor();\n"
2679 "}\n"
2680 "\n");
2681 } else {
2682 // For messages using simple base classes, having no destructor
2683 // allows our vtable to share the same destructor as every other
2684 // message with a simple base class. This works only as long as
2685 // we have no fields needing destruction, of course. (No strings
2686 // or extensions)
2687 }
2688
2689 // Generate the shared destructor code.
2690 GenerateSharedDestructorCode(printer);
2691
2692 // Generate the arena-specific destructor code.
2693 if (NeedsArenaDestructor() > ArenaDtorNeeds::kNone) {
2694 GenerateArenaDestructorCode(printer);
2695 }
2696
2697 if (!HasSimpleBaseClass(descriptor_, options_)) {
2698 // Generate SetCachedSize.
2699 format(
2700 "void $classname$::SetCachedSize(int size) const {\n"
2701 " $cached_size$.Set(size);\n"
2702 "}\n");
2703 }
2704 }
2705
GenerateSourceInProto2Namespace(io::Printer * printer)2706 void MessageGenerator::GenerateSourceInProto2Namespace(io::Printer* printer) {
2707 Formatter format(printer, variables_);
2708 format(
2709 "template<> "
2710 "PROTOBUF_NOINLINE $classtype$*\n"
2711 "Arena::CreateMaybeMessage< $classtype$ >(Arena* arena) {\n"
2712 " return Arena::CreateMessageInternal< $classtype$ >(arena);\n"
2713 "}\n");
2714 }
2715
GenerateClear(io::Printer * printer)2716 void MessageGenerator::GenerateClear(io::Printer* printer) {
2717 if (HasSimpleBaseClass(descriptor_, options_)) return;
2718 Formatter format(printer, variables_);
2719
2720 // The maximum number of bytes we will memset to zero without checking their
2721 // hasbit to see if a zero-init is necessary.
2722 const int kMaxUnconditionalPrimitiveBytesClear = 4;
2723
2724 format(
2725 "void $classname$::Clear() {\n"
2726 "// @@protoc_insertion_point(message_clear_start:$full_name$)\n");
2727 format.Indent();
2728
2729 format(
2730 // TODO(jwb): It would be better to avoid emitting this if it is not used,
2731 // rather than emitting a workaround for the resulting warning.
2732 "$uint32$ cached_has_bits = 0;\n"
2733 "// Prevent compiler warnings about cached_has_bits being unused\n"
2734 "(void) cached_has_bits;\n\n");
2735
2736 if (descriptor_->extension_range_count() > 0) {
2737 format("$extensions$.Clear();\n");
2738 }
2739
2740 // Collect fields into chunks. Each chunk may have an if() condition that
2741 // checks all hasbits in the chunk and skips it if none are set.
2742 int zero_init_bytes = 0;
2743 for (const auto& field : optimized_order_) {
2744 if (CanInitializeByZeroing(field)) {
2745 zero_init_bytes += EstimateAlignmentSize(field);
2746 }
2747 }
2748 bool merge_zero_init = zero_init_bytes > kMaxUnconditionalPrimitiveBytesClear;
2749 int chunk_count = 0;
2750
2751 std::vector<std::vector<const FieldDescriptor*>> chunks = CollectFields(
2752 optimized_order_,
2753 [&](const FieldDescriptor* a, const FieldDescriptor* b) -> bool {
2754 chunk_count++;
2755 // This predicate guarantees that there is only a single zero-init
2756 // (memset) per chunk, and if present it will be at the beginning.
2757 bool same = HasByteIndex(a) == HasByteIndex(b) &&
2758 a->is_repeated() == b->is_repeated() &&
2759 (CanInitializeByZeroing(a) == CanInitializeByZeroing(b) ||
2760 (CanInitializeByZeroing(a) &&
2761 (chunk_count == 1 || merge_zero_init)));
2762 if (!same) chunk_count = 0;
2763 return same;
2764 });
2765
2766 ColdChunkSkipper cold_skipper(options_, chunks, has_bit_indices_, kColdRatio);
2767 int cached_has_word_index = -1;
2768
2769 for (int chunk_index = 0; chunk_index < chunks.size(); chunk_index++) {
2770 std::vector<const FieldDescriptor*>& chunk = chunks[chunk_index];
2771 cold_skipper.OnStartChunk(chunk_index, cached_has_word_index, "", printer);
2772
2773 const FieldDescriptor* memset_start = nullptr;
2774 const FieldDescriptor* memset_end = nullptr;
2775 bool saw_non_zero_init = false;
2776
2777 for (const auto& field : chunk) {
2778 if (CanInitializeByZeroing(field)) {
2779 GOOGLE_CHECK(!saw_non_zero_init);
2780 if (!memset_start) memset_start = field;
2781 memset_end = field;
2782 } else {
2783 saw_non_zero_init = true;
2784 }
2785 }
2786
2787 // Whether we wrap this chunk in:
2788 // if (cached_has_bits & <chunk hasbits) { /* chunk. */ }
2789 // We can omit the if() for chunk size 1, or if our fields do not have
2790 // hasbits. I don't understand the rationale for the last part of the
2791 // condition, but it matches the old logic.
2792 const bool have_outer_if = HasBitIndex(chunk.front()) != kNoHasbit &&
2793 chunk.size() > 1 &&
2794 (memset_end != chunk.back() || merge_zero_init);
2795
2796 if (have_outer_if) {
2797 // Emit an if() that will let us skip the whole chunk if none are set.
2798 uint32_t chunk_mask = GenChunkMask(chunk, has_bit_indices_);
2799 std::string chunk_mask_str =
2800 StrCat(strings::Hex(chunk_mask, strings::ZERO_PAD_8));
2801
2802 // Check (up to) 8 has_bits at a time if we have more than one field in
2803 // this chunk. Due to field layout ordering, we may check
2804 // _has_bits_[last_chunk * 8 / 32] multiple times.
2805 GOOGLE_DCHECK_LE(2, popcnt(chunk_mask));
2806 GOOGLE_DCHECK_GE(8, popcnt(chunk_mask));
2807
2808 if (cached_has_word_index != HasWordIndex(chunk.front())) {
2809 cached_has_word_index = HasWordIndex(chunk.front());
2810 format("cached_has_bits = $has_bits$[$1$];\n", cached_has_word_index);
2811 }
2812 format("if (cached_has_bits & 0x$1$u) {\n", chunk_mask_str);
2813 format.Indent();
2814 }
2815
2816 if (memset_start) {
2817 if (memset_start == memset_end) {
2818 // For clarity, do not memset a single field.
2819 field_generators_.get(memset_start)
2820 .GenerateMessageClearingCode(printer);
2821 } else {
2822 format(
2823 "::memset(&$1$, 0, static_cast<size_t>(\n"
2824 " reinterpret_cast<char*>(&$2$) -\n"
2825 " reinterpret_cast<char*>(&$1$)) + sizeof($2$));\n",
2826 FieldMemberName(memset_start), FieldMemberName(memset_end));
2827 }
2828 }
2829
2830 // Clear all non-zero-initializable fields in the chunk.
2831 for (const auto& field : chunk) {
2832 if (CanInitializeByZeroing(field)) continue;
2833 // It's faster to just overwrite primitive types, but we should only
2834 // clear strings and messages if they were set.
2835 //
2836 // TODO(kenton): Let the CppFieldGenerator decide this somehow.
2837 bool have_enclosing_if =
2838 HasBitIndex(field) != kNoHasbit &&
2839 (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE ||
2840 field->cpp_type() == FieldDescriptor::CPPTYPE_STRING);
2841
2842 if (have_enclosing_if) {
2843 PrintPresenceCheck(format, field, has_bit_indices_, printer,
2844 &cached_has_word_index);
2845 }
2846
2847 field_generators_.get(field).GenerateMessageClearingCode(printer);
2848
2849 if (have_enclosing_if) {
2850 format.Outdent();
2851 format("}\n");
2852 }
2853 }
2854
2855 if (have_outer_if) {
2856 format.Outdent();
2857 format("}\n");
2858 }
2859
2860 if (cold_skipper.OnEndChunk(chunk_index, printer)) {
2861 // Reset here as it may have been updated in just closed if statement.
2862 cached_has_word_index = -1;
2863 }
2864 }
2865
2866 // Step 4: Unions.
2867 for (auto oneof : OneOfRange(descriptor_)) {
2868 format("clear_$1$();\n", oneof->name());
2869 }
2870
2871 if (num_weak_fields_) {
2872 format("$weak_field_map$.ClearAll();\n");
2873 }
2874
2875 // We don't clear donated status.
2876
2877 if (!has_bit_indices_.empty()) {
2878 // Step 5: Everything else.
2879 format("$has_bits$.Clear();\n");
2880 }
2881
2882 std::map<std::string, std::string> vars;
2883 SetUnknownFieldsVariable(descriptor_, options_, &vars);
2884 format.AddMap(vars);
2885 format("_internal_metadata_.Clear<$unknown_fields_type$>();\n");
2886
2887 format.Outdent();
2888 format("}\n");
2889 }
2890
GenerateOneofClear(io::Printer * printer)2891 void MessageGenerator::GenerateOneofClear(io::Printer* printer) {
2892 // Generated function clears the active field and union case (e.g. foo_case_).
2893 int i = 0;
2894 for (auto oneof : OneOfRange(descriptor_)) {
2895 Formatter format(printer, variables_);
2896 format.Set("oneofname", oneof->name());
2897
2898 format(
2899 "void $classname$::clear_$oneofname$() {\n"
2900 "// @@protoc_insertion_point(one_of_clear_start:$full_name$)\n");
2901 format.Indent();
2902 format("switch ($oneofname$_case()) {\n");
2903 format.Indent();
2904 for (auto field : FieldRange(oneof)) {
2905 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
2906 format.Indent();
2907 // We clear only allocated objects in oneofs
2908 if (!IsStringOrMessage(field) || IsFieldStripped(field, options_)) {
2909 format("// No need to clear\n");
2910 } else {
2911 field_generators_.get(field).GenerateClearingCode(printer);
2912 }
2913 format("break;\n");
2914 format.Outdent();
2915 format("}\n");
2916 }
2917 format(
2918 "case $1$_NOT_SET: {\n"
2919 " break;\n"
2920 "}\n",
2921 ToUpper(oneof->name()));
2922 format.Outdent();
2923 format(
2924 "}\n"
2925 "$oneof_case$[$1$] = $2$_NOT_SET;\n",
2926 i, ToUpper(oneof->name()));
2927 format.Outdent();
2928 format(
2929 "}\n"
2930 "\n");
2931 i++;
2932 }
2933 }
2934
GenerateSwap(io::Printer * printer)2935 void MessageGenerator::GenerateSwap(io::Printer* printer) {
2936 if (HasSimpleBaseClass(descriptor_, options_)) return;
2937 Formatter format(printer, variables_);
2938
2939 format("void $classname$::InternalSwap($classname$* other) {\n");
2940 format.Indent();
2941 format("using std::swap;\n");
2942
2943 if (HasGeneratedMethods(descriptor_->file(), options_)) {
2944 if (descriptor_->extension_range_count() > 0) {
2945 format(
2946 "$extensions$.InternalSwap(&other->$extensions$);"
2947 "\n");
2948 }
2949
2950 std::map<std::string, std::string> vars;
2951 SetUnknownFieldsVariable(descriptor_, options_, &vars);
2952 format.AddMap(vars);
2953 if (HasSingularString(descriptor_, options_)) {
2954 format(
2955 "auto* lhs_arena = GetArenaForAllocation();\n"
2956 "auto* rhs_arena = other->GetArenaForAllocation();\n");
2957 }
2958 format("_internal_metadata_.InternalSwap(&other->_internal_metadata_);\n");
2959
2960 if (!has_bit_indices_.empty()) {
2961 for (int i = 0; i < HasBitsSize(); ++i) {
2962 format("swap($has_bits$[$1$], other->$has_bits$[$1$]);\n", i);
2963 }
2964 }
2965
2966 // If possible, we swap several fields at once, including padding.
2967 const RunMap runs =
2968 FindRuns(optimized_order_, [this](const FieldDescriptor* field) {
2969 return CanBeManipulatedAsRawBytes(field, options_, scc_analyzer_);
2970 });
2971
2972 for (int i = 0; i < optimized_order_.size(); ++i) {
2973 const FieldDescriptor* field = optimized_order_[i];
2974 const auto it = runs.find(field);
2975
2976 // We only apply the memswap technique to runs of more than one field, as
2977 // `swap(field_, other.field_)` is better than
2978 // `memswap<...>(&field_, &other.field_)` for generated code readability.
2979 if (it != runs.end() && it->second > 1) {
2980 // Use a memswap, then skip run_length fields.
2981 const size_t run_length = it->second;
2982 const std::string first_field_name = FieldMemberName(field);
2983 const std::string last_field_name =
2984 FieldMemberName(optimized_order_[i + run_length - 1]);
2985
2986 format.Set("first", first_field_name);
2987 format.Set("last", last_field_name);
2988
2989 format(
2990 "::PROTOBUF_NAMESPACE_ID::internal::memswap<\n"
2991 " PROTOBUF_FIELD_OFFSET($classname$, $last$)\n"
2992 " + sizeof($classname$::$last$)\n"
2993 " - PROTOBUF_FIELD_OFFSET($classname$, $first$)>(\n"
2994 " reinterpret_cast<char*>(&$first$),\n"
2995 " reinterpret_cast<char*>(&other->$first$));\n");
2996
2997 i += run_length - 1;
2998 // ++i at the top of the loop.
2999 } else {
3000 field_generators_.get(field).GenerateSwappingCode(printer);
3001 }
3002 }
3003
3004 for (auto oneof : OneOfRange(descriptor_)) {
3005 format("swap($1$_, other->$1$_);\n", oneof->name());
3006 }
3007
3008 for (int i = 0; i < descriptor_->real_oneof_decl_count(); i++) {
3009 format(
3010 "swap($oneof_case$[$1$], "
3011 "other->$oneof_case$[$1$]);\n",
3012 i);
3013 }
3014
3015 if (num_weak_fields_) {
3016 format(
3017 "$weak_field_map$.UnsafeArenaSwap(&other->$weak_field_map$)"
3018 ";\n");
3019 }
3020
3021 if (!inlined_string_indices_.empty()) {
3022 for (size_t i = 0; i < InlinedStringDonatedSize(); ++i) {
3023 format(
3024 "swap($inlined_string_donated_array$[$1$], "
3025 "other->$inlined_string_donated_array$[$1$]);\n",
3026 i);
3027 }
3028 }
3029 } else {
3030 format("GetReflection()->Swap(this, other);");
3031 }
3032
3033 format.Outdent();
3034 format("}\n");
3035 }
3036
GenerateMergeFrom(io::Printer * printer)3037 void MessageGenerator::GenerateMergeFrom(io::Printer* printer) {
3038 Formatter format(printer, variables_);
3039 if (!HasSimpleBaseClass(descriptor_, options_)) {
3040 if (HasDescriptorMethods(descriptor_->file(), options_)) {
3041 // We don't override the generalized MergeFrom (aka that which
3042 // takes in the Message base class as a parameter); instead we just
3043 // let the base Message::MergeFrom take care of it. The base MergeFrom
3044 // knows how to quickly confirm the types exactly match, and if so, will
3045 // use GetClassData() to retrieve the address of MergeImpl, which calls
3046 // the fast MergeFrom overload. Most callers avoid all this by passing
3047 // a "from" message that is the same type as the message being merged
3048 // into, rather than a generic Message.
3049
3050 format(
3051 "const ::$proto_ns$::Message::ClassData "
3052 "$classname$::_class_data_ = {\n"
3053 " ::$proto_ns$::Message::CopyWithSizeCheck,\n"
3054 " $classname$::MergeImpl\n"
3055 "};\n"
3056 "const ::$proto_ns$::Message::ClassData*"
3057 "$classname$::GetClassData() const { return &_class_data_; }\n"
3058 "\n"
3059 "void $classname$::MergeImpl(::$proto_ns$::Message* to,\n"
3060 " const ::$proto_ns$::Message& from) {\n"
3061 " static_cast<$classname$ *>(to)->MergeFrom(\n"
3062 " static_cast<const $classname$ &>(from));\n"
3063 "}\n"
3064 "\n");
3065 } else {
3066 // Generate CheckTypeAndMergeFrom().
3067 format(
3068 "void $classname$::CheckTypeAndMergeFrom(\n"
3069 " const ::$proto_ns$::MessageLite& from) {\n"
3070 " MergeFrom(*::_pbi::DownCast<const $classname$*>(\n"
3071 " &from));\n"
3072 "}\n");
3073 }
3074 } else {
3075 // In the simple case, we just define ClassData that vectors back to the
3076 // simple implementation of Copy and Merge.
3077 format(
3078 "const ::$proto_ns$::Message::ClassData "
3079 "$classname$::_class_data_ = {\n"
3080 " $superclass$::CopyImpl,\n"
3081 " $superclass$::MergeImpl,\n"
3082 "};\n"
3083 "const ::$proto_ns$::Message::ClassData*"
3084 "$classname$::GetClassData() const { return &_class_data_; }\n"
3085 "\n"
3086 "\n");
3087 }
3088 }
3089
GenerateClassSpecificMergeFrom(io::Printer * printer)3090 void MessageGenerator::GenerateClassSpecificMergeFrom(io::Printer* printer) {
3091 if (HasSimpleBaseClass(descriptor_, options_)) return;
3092 // Generate the class-specific MergeFrom, which avoids the GOOGLE_CHECK and cast.
3093 Formatter format(printer, variables_);
3094 format(
3095 "void $classname$::MergeFrom(const $classname$& from) {\n"
3096 "$annotate_mergefrom$"
3097 "// @@protoc_insertion_point(class_specific_merge_from_start:"
3098 "$full_name$)\n"
3099 " $DCHK$_NE(&from, this);\n");
3100 format.Indent();
3101
3102 format(
3103 "$uint32$ cached_has_bits = 0;\n"
3104 "(void) cached_has_bits;\n\n");
3105
3106 std::vector<std::vector<const FieldDescriptor*>> chunks = CollectFields(
3107 optimized_order_,
3108 [&](const FieldDescriptor* a, const FieldDescriptor* b) -> bool {
3109 return HasByteIndex(a) == HasByteIndex(b);
3110 });
3111
3112 ColdChunkSkipper cold_skipper(options_, chunks, has_bit_indices_, kColdRatio);
3113
3114 // cached_has_word_index maintains that:
3115 // cached_has_bits = from._has_bits_[cached_has_word_index]
3116 // for cached_has_word_index >= 0
3117 int cached_has_word_index = -1;
3118
3119 for (int chunk_index = 0; chunk_index < chunks.size(); chunk_index++) {
3120 const std::vector<const FieldDescriptor*>& chunk = chunks[chunk_index];
3121 bool have_outer_if =
3122 chunk.size() > 1 && HasByteIndex(chunk.front()) != kNoHasbit;
3123 cold_skipper.OnStartChunk(chunk_index, cached_has_word_index, "from.",
3124 printer);
3125
3126 if (have_outer_if) {
3127 // Emit an if() that will let us skip the whole chunk if none are set.
3128 uint32_t chunk_mask = GenChunkMask(chunk, has_bit_indices_);
3129 std::string chunk_mask_str =
3130 StrCat(strings::Hex(chunk_mask, strings::ZERO_PAD_8));
3131
3132 // Check (up to) 8 has_bits at a time if we have more than one field in
3133 // this chunk. Due to field layout ordering, we may check
3134 // _has_bits_[last_chunk * 8 / 32] multiple times.
3135 GOOGLE_DCHECK_LE(2, popcnt(chunk_mask));
3136 GOOGLE_DCHECK_GE(8, popcnt(chunk_mask));
3137
3138 if (cached_has_word_index != HasWordIndex(chunk.front())) {
3139 cached_has_word_index = HasWordIndex(chunk.front());
3140 format("cached_has_bits = from.$has_bits$[$1$];\n",
3141 cached_has_word_index);
3142 }
3143
3144 format("if (cached_has_bits & 0x$1$u) {\n", chunk_mask_str);
3145 format.Indent();
3146 }
3147
3148 // Go back and emit merging code for each of the fields we processed.
3149 bool deferred_has_bit_changes = false;
3150 for (const auto field : chunk) {
3151 const FieldGenerator& generator = field_generators_.get(field);
3152
3153 if (field->is_repeated()) {
3154 generator.GenerateMergingCode(printer);
3155 } else if (field->is_optional() && !HasHasbit(field)) {
3156 // Merge semantics without true field presence: primitive fields are
3157 // merged only if non-zero (numeric) or non-empty (string).
3158 bool have_enclosing_if =
3159 EmitFieldNonDefaultCondition(printer, "from.", field);
3160 generator.GenerateMergingCode(printer);
3161 if (have_enclosing_if) {
3162 format.Outdent();
3163 format("}\n");
3164 }
3165 } else if (field->options().weak() ||
3166 cached_has_word_index != HasWordIndex(field)) {
3167 // Check hasbit, not using cached bits.
3168 GOOGLE_CHECK(HasHasbit(field));
3169 format("if (from._internal_has_$1$()) {\n", FieldName(field));
3170 format.Indent();
3171 generator.GenerateMergingCode(printer);
3172 format.Outdent();
3173 format("}\n");
3174 } else {
3175 // Check hasbit, using cached bits.
3176 GOOGLE_CHECK(HasHasbit(field));
3177 int has_bit_index = has_bit_indices_[field->index()];
3178 const std::string mask = StrCat(
3179 strings::Hex(1u << (has_bit_index % 32), strings::ZERO_PAD_8));
3180 format("if (cached_has_bits & 0x$1$u) {\n", mask);
3181 format.Indent();
3182
3183 if (have_outer_if && IsPOD(field)) {
3184 // Defer hasbit modification until the end of chunk.
3185 // This can reduce the number of loads/stores by up to 7 per 8 fields.
3186 deferred_has_bit_changes = true;
3187 generator.GenerateCopyConstructorCode(printer);
3188 } else {
3189 generator.GenerateMergingCode(printer);
3190 }
3191
3192 format.Outdent();
3193 format("}\n");
3194 }
3195 }
3196
3197 if (have_outer_if) {
3198 if (deferred_has_bit_changes) {
3199 // Flush the has bits for the primitives we deferred.
3200 GOOGLE_CHECK_LE(0, cached_has_word_index);
3201 format("$has_bits$[$1$] |= cached_has_bits;\n", cached_has_word_index);
3202 }
3203
3204 format.Outdent();
3205 format("}\n");
3206 }
3207
3208 if (cold_skipper.OnEndChunk(chunk_index, printer)) {
3209 // Reset here as it may have been updated in just closed if statement.
3210 cached_has_word_index = -1;
3211 }
3212 }
3213
3214 // Merge oneof fields. Oneof field requires oneof case check.
3215 for (auto oneof : OneOfRange(descriptor_)) {
3216 format("switch (from.$1$_case()) {\n", oneof->name());
3217 format.Indent();
3218 for (auto field : FieldRange(oneof)) {
3219 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
3220 format.Indent();
3221 if (!IsFieldStripped(field, options_)) {
3222 field_generators_.get(field).GenerateMergingCode(printer);
3223 }
3224 format("break;\n");
3225 format.Outdent();
3226 format("}\n");
3227 }
3228 format(
3229 "case $1$_NOT_SET: {\n"
3230 " break;\n"
3231 "}\n",
3232 ToUpper(oneof->name()));
3233 format.Outdent();
3234 format("}\n");
3235 }
3236 if (num_weak_fields_) {
3237 format(
3238 "$weak_field_map$.MergeFrom(from.$weak_field_map$);"
3239 "\n");
3240 }
3241
3242 // Merging of extensions and unknown fields is done last, to maximize
3243 // the opportunity for tail calls.
3244 if (descriptor_->extension_range_count() > 0) {
3245 format(
3246 "$extensions$.MergeFrom(internal_default_instance(), "
3247 "from.$extensions$);\n");
3248 }
3249
3250 format(
3251 "_internal_metadata_.MergeFrom<$unknown_fields_type$>(from._internal_"
3252 "metadata_);\n");
3253
3254 format.Outdent();
3255 format("}\n");
3256 }
3257
GenerateCopyFrom(io::Printer * printer)3258 void MessageGenerator::GenerateCopyFrom(io::Printer* printer) {
3259 if (HasSimpleBaseClass(descriptor_, options_)) return;
3260 Formatter format(printer, variables_);
3261 if (HasDescriptorMethods(descriptor_->file(), options_)) {
3262 // We don't override the generalized CopyFrom (aka that which
3263 // takes in the Message base class as a parameter); instead we just
3264 // let the base Message::CopyFrom take care of it. The base MergeFrom
3265 // knows how to quickly confirm the types exactly match, and if so, will
3266 // use GetClassData() to get the address of Message::CopyWithSizeCheck,
3267 // which calls Clear() and then MergeFrom(), as well as making sure that
3268 // clearing the destination message doesn't alter the size of the source,
3269 // when in debug builds.
3270 // Most callers avoid this by passing a "from" message that is the same
3271 // type as the message being merged into, rather than a generic Message.
3272 }
3273
3274 // Generate the class-specific CopyFrom.
3275 format(
3276 "void $classname$::CopyFrom(const $classname$& from) {\n"
3277 "// @@protoc_insertion_point(class_specific_copy_from_start:"
3278 "$full_name$)\n");
3279 format.Indent();
3280
3281 format("if (&from == this) return;\n");
3282
3283 if (!options_.opensource_runtime) {
3284 // This check is disabled in the opensource release because we're
3285 // concerned that many users do not define NDEBUG in their release builds.
3286 format(
3287 "#ifndef NDEBUG\n"
3288 "size_t from_size = from.ByteSizeLong();\n"
3289 "#endif\n"
3290 "Clear();\n"
3291 "#ifndef NDEBUG\n"
3292 "$CHK$_EQ(from_size, from.ByteSizeLong())\n"
3293 " << \"Source of CopyFrom changed when clearing target. Either \"\n"
3294 " \"source is a nested message in target (not allowed), or \"\n"
3295 " \"another thread is modifying the source.\";\n"
3296 "#endif\n");
3297 } else {
3298 format("Clear();\n");
3299 }
3300 format("MergeFrom(from);\n");
3301
3302 format.Outdent();
3303 format("}\n");
3304 }
3305
GenerateVerify(io::Printer * printer)3306 void MessageGenerator::GenerateVerify(io::Printer* printer) {
3307 }
3308
GenerateSerializeOneofFields(io::Printer * printer,const std::vector<const FieldDescriptor * > & fields)3309 void MessageGenerator::GenerateSerializeOneofFields(
3310 io::Printer* printer, const std::vector<const FieldDescriptor*>& fields) {
3311 Formatter format(printer, variables_);
3312 GOOGLE_CHECK(!fields.empty());
3313 if (fields.size() == 1) {
3314 GenerateSerializeOneField(printer, fields[0], -1);
3315 return;
3316 }
3317 // We have multiple mutually exclusive choices. Emit a switch statement.
3318 const OneofDescriptor* oneof = fields[0]->containing_oneof();
3319 format("switch ($1$_case()) {\n", oneof->name());
3320 format.Indent();
3321 for (auto field : fields) {
3322 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
3323 format.Indent();
3324 field_generators_.get(field).GenerateSerializeWithCachedSizesToArray(
3325 printer);
3326 format("break;\n");
3327 format.Outdent();
3328 format("}\n");
3329 }
3330 format.Outdent();
3331 // Doing nothing is an option.
3332 format(
3333 " default: ;\n"
3334 "}\n");
3335 }
3336
GenerateSerializeOneField(io::Printer * printer,const FieldDescriptor * field,int cached_has_bits_index)3337 void MessageGenerator::GenerateSerializeOneField(io::Printer* printer,
3338 const FieldDescriptor* field,
3339 int cached_has_bits_index) {
3340 Formatter format(printer, variables_);
3341 if (!field->options().weak()) {
3342 // For weakfields, PrintFieldComment is called during iteration.
3343 PrintFieldComment(format, field);
3344 }
3345
3346 bool have_enclosing_if = false;
3347 if (field->options().weak()) {
3348 } else if (HasHasbit(field)) {
3349 // Attempt to use the state of cached_has_bits, if possible.
3350 int has_bit_index = HasBitIndex(field);
3351 if (cached_has_bits_index == has_bit_index / 32) {
3352 const std::string mask =
3353 StrCat(strings::Hex(1u << (has_bit_index % 32), strings::ZERO_PAD_8));
3354
3355 format("if (cached_has_bits & 0x$1$u) {\n", mask);
3356 } else {
3357 format("if (_internal_has_$1$()) {\n", FieldName(field));
3358 }
3359
3360 format.Indent();
3361 have_enclosing_if = true;
3362 } else if (field->is_optional() && !HasHasbit(field)) {
3363 have_enclosing_if = EmitFieldNonDefaultCondition(printer, "this->", field);
3364 }
3365
3366 field_generators_.get(field).GenerateSerializeWithCachedSizesToArray(printer);
3367
3368 if (have_enclosing_if) {
3369 format.Outdent();
3370 format("}\n");
3371 }
3372 format("\n");
3373 }
3374
GenerateSerializeOneExtensionRange(io::Printer * printer,const Descriptor::ExtensionRange * range)3375 void MessageGenerator::GenerateSerializeOneExtensionRange(
3376 io::Printer* printer, const Descriptor::ExtensionRange* range) {
3377 std::map<std::string, std::string> vars = variables_;
3378 vars["start"] = StrCat(range->start);
3379 vars["end"] = StrCat(range->end);
3380 Formatter format(printer, vars);
3381 format("// Extension range [$start$, $end$)\n");
3382 format(
3383 "target = $extensions$._InternalSerialize(\n"
3384 "internal_default_instance(), $start$, $end$, target, stream);\n\n");
3385 }
3386
GenerateSerializeWithCachedSizesToArray(io::Printer * printer)3387 void MessageGenerator::GenerateSerializeWithCachedSizesToArray(
3388 io::Printer* printer) {
3389 if (HasSimpleBaseClass(descriptor_, options_)) return;
3390 Formatter format(printer, variables_);
3391 if (descriptor_->options().message_set_wire_format()) {
3392 // Special-case MessageSet.
3393 format(
3394 "$uint8$* $classname$::_InternalSerialize(\n"
3395 " $uint8$* target, ::$proto_ns$::io::EpsCopyOutputStream* stream) "
3396 "const {\n"
3397 "$annotate_serialize$"
3398 " target = $extensions$."
3399 "InternalSerializeMessageSetWithCachedSizesToArray(\n" //
3400 "internal_default_instance(), target, stream);\n");
3401 std::map<std::string, std::string> vars;
3402 SetUnknownFieldsVariable(descriptor_, options_, &vars);
3403 format.AddMap(vars);
3404 format(
3405 " target = ::_pbi::"
3406 "InternalSerializeUnknownMessageSetItemsToArray(\n"
3407 " $unknown_fields$, target, stream);\n");
3408 format(
3409 " return target;\n"
3410 "}\n");
3411 return;
3412 }
3413
3414 format(
3415 "$uint8$* $classname$::_InternalSerialize(\n"
3416 " $uint8$* target, ::$proto_ns$::io::EpsCopyOutputStream* stream) "
3417 "const {\n"
3418 "$annotate_serialize$");
3419 format.Indent();
3420
3421 format("// @@protoc_insertion_point(serialize_to_array_start:$full_name$)\n");
3422
3423 if (!ShouldSerializeInOrder(descriptor_, options_)) {
3424 format.Outdent();
3425 format("#ifdef NDEBUG\n");
3426 format.Indent();
3427 }
3428
3429 GenerateSerializeWithCachedSizesBody(printer);
3430
3431 if (!ShouldSerializeInOrder(descriptor_, options_)) {
3432 format.Outdent();
3433 format("#else // NDEBUG\n");
3434 format.Indent();
3435
3436 GenerateSerializeWithCachedSizesBodyShuffled(printer);
3437
3438 format.Outdent();
3439 format("#endif // !NDEBUG\n");
3440 format.Indent();
3441 }
3442
3443 format("// @@protoc_insertion_point(serialize_to_array_end:$full_name$)\n");
3444
3445 format.Outdent();
3446 format(
3447 " return target;\n"
3448 "}\n");
3449 }
3450
GenerateSerializeWithCachedSizesBody(io::Printer * printer)3451 void MessageGenerator::GenerateSerializeWithCachedSizesBody(
3452 io::Printer* printer) {
3453 if (HasSimpleBaseClass(descriptor_, options_)) return;
3454 Formatter format(printer, variables_);
3455 // If there are multiple fields in a row from the same oneof then we
3456 // coalesce them and emit a switch statement. This is more efficient
3457 // because it lets the C++ compiler know this is a "at most one can happen"
3458 // situation. If we emitted "if (has_x()) ...; if (has_y()) ..." the C++
3459 // compiler's emitted code might check has_y() even when has_x() is true.
3460 class LazySerializerEmitter {
3461 public:
3462 LazySerializerEmitter(MessageGenerator* mg, io::Printer* printer)
3463 : mg_(mg),
3464 format_(printer),
3465 eager_(IsProto3(mg->descriptor_->file())),
3466 cached_has_bit_index_(kNoHasbit) {}
3467
3468 ~LazySerializerEmitter() { Flush(); }
3469
3470 // If conditions allow, try to accumulate a run of fields from the same
3471 // oneof, and handle them at the next Flush().
3472 void Emit(const FieldDescriptor* field) {
3473 if (eager_ || MustFlush(field)) {
3474 Flush();
3475 }
3476 if (!field->real_containing_oneof()) {
3477 // TODO(ckennelly): Defer non-oneof fields similarly to oneof fields.
3478
3479 if (!field->options().weak() && !field->is_repeated() && !eager_) {
3480 // We speculatively load the entire _has_bits_[index] contents, even
3481 // if it is for only one field. Deferring non-oneof emitting would
3482 // allow us to determine whether this is going to be useful.
3483 int has_bit_index = mg_->has_bit_indices_[field->index()];
3484 if (cached_has_bit_index_ != has_bit_index / 32) {
3485 // Reload.
3486 int new_index = has_bit_index / 32;
3487
3488 format_("cached_has_bits = _has_bits_[$1$];\n", new_index);
3489
3490 cached_has_bit_index_ = new_index;
3491 }
3492 }
3493
3494 mg_->GenerateSerializeOneField(format_.printer(), field,
3495 cached_has_bit_index_);
3496 } else {
3497 v_.push_back(field);
3498 }
3499 }
3500
3501 void EmitIfNotNull(const FieldDescriptor* field) {
3502 if (field != nullptr) {
3503 Emit(field);
3504 }
3505 }
3506
3507 void Flush() {
3508 if (!v_.empty()) {
3509 mg_->GenerateSerializeOneofFields(format_.printer(), v_);
3510 v_.clear();
3511 }
3512 }
3513
3514 private:
3515 // If we have multiple fields in v_ then they all must be from the same
3516 // oneof. Would adding field to v_ break that invariant?
3517 bool MustFlush(const FieldDescriptor* field) {
3518 return !v_.empty() &&
3519 v_[0]->containing_oneof() != field->containing_oneof();
3520 }
3521
3522 MessageGenerator* mg_;
3523 Formatter format_;
3524 const bool eager_;
3525 std::vector<const FieldDescriptor*> v_;
3526
3527 // cached_has_bit_index_ maintains that:
3528 // cached_has_bits = from._has_bits_[cached_has_bit_index_]
3529 // for cached_has_bit_index_ >= 0
3530 int cached_has_bit_index_;
3531 };
3532
3533 class LazyExtensionRangeEmitter {
3534 public:
3535 LazyExtensionRangeEmitter(MessageGenerator* mg, io::Printer* printer)
3536 : mg_(mg), format_(printer) {}
3537
3538 void AddToRange(const Descriptor::ExtensionRange* range) {
3539 if (!has_current_range_) {
3540 current_combined_range_ = *range;
3541 has_current_range_ = true;
3542 } else {
3543 current_combined_range_.start =
3544 std::min(current_combined_range_.start, range->start);
3545 current_combined_range_.end =
3546 std::max(current_combined_range_.end, range->end);
3547 }
3548 }
3549
3550 void Flush() {
3551 if (has_current_range_) {
3552 mg_->GenerateSerializeOneExtensionRange(format_.printer(),
3553 ¤t_combined_range_);
3554 }
3555 has_current_range_ = false;
3556 }
3557
3558 private:
3559 MessageGenerator* mg_;
3560 Formatter format_;
3561 bool has_current_range_ = false;
3562 Descriptor::ExtensionRange current_combined_range_;
3563 };
3564
3565 // We need to track the largest weak field, because weak fields are serialized
3566 // differently than normal fields. The WeakFieldMap::FieldWriter will
3567 // serialize all weak fields that are ordinally between the last serialized
3568 // weak field and the current field. In order to guarantee that all weak
3569 // fields are serialized, we need to make sure to emit the code to serialize
3570 // the largest weak field present at some point.
3571 class LargestWeakFieldHolder {
3572 public:
3573 const FieldDescriptor* Release() {
3574 const FieldDescriptor* result = field_;
3575 field_ = nullptr;
3576 return result;
3577 }
3578 void ReplaceIfLarger(const FieldDescriptor* field) {
3579 if (field_ == nullptr || field_->number() < field->number()) {
3580 field_ = field;
3581 }
3582 }
3583
3584 private:
3585 const FieldDescriptor* field_ = nullptr;
3586 };
3587
3588 std::vector<const FieldDescriptor*> ordered_fields =
3589 SortFieldsByNumber(descriptor_);
3590
3591 std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
3592 sorted_extensions.reserve(descriptor_->extension_range_count());
3593 for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
3594 sorted_extensions.push_back(descriptor_->extension_range(i));
3595 }
3596 std::sort(sorted_extensions.begin(), sorted_extensions.end(),
3597 ExtensionRangeSorter());
3598 if (num_weak_fields_) {
3599 format(
3600 "::_pbi::WeakFieldMap::FieldWriter field_writer("
3601 "$weak_field_map$);\n");
3602 }
3603
3604 format(
3605 "$uint32$ cached_has_bits = 0;\n"
3606 "(void) cached_has_bits;\n\n");
3607
3608 // Merge the fields and the extension ranges, both sorted by field number.
3609 {
3610 LazySerializerEmitter e(this, printer);
3611 LazyExtensionRangeEmitter re(this, printer);
3612 LargestWeakFieldHolder largest_weak_field;
3613 int i, j;
3614 for (i = 0, j = 0;
3615 i < ordered_fields.size() || j < sorted_extensions.size();) {
3616 if ((j == sorted_extensions.size()) ||
3617 (i < descriptor_->field_count() &&
3618 ordered_fields[i]->number() < sorted_extensions[j]->start)) {
3619 const FieldDescriptor* field = ordered_fields[i++];
3620 if (IsFieldStripped(field, options_)) {
3621 continue;
3622 }
3623 re.Flush();
3624 if (field->options().weak()) {
3625 largest_weak_field.ReplaceIfLarger(field);
3626 PrintFieldComment(format, field);
3627 } else {
3628 e.EmitIfNotNull(largest_weak_field.Release());
3629 e.Emit(field);
3630 }
3631 } else {
3632 e.EmitIfNotNull(largest_weak_field.Release());
3633 e.Flush();
3634 re.AddToRange(sorted_extensions[j++]);
3635 }
3636 }
3637 re.Flush();
3638 e.EmitIfNotNull(largest_weak_field.Release());
3639 }
3640
3641 std::map<std::string, std::string> vars;
3642 SetUnknownFieldsVariable(descriptor_, options_, &vars);
3643 format.AddMap(vars);
3644 format("if (PROTOBUF_PREDICT_FALSE($have_unknown_fields$)) {\n");
3645 format.Indent();
3646 if (UseUnknownFieldSet(descriptor_->file(), options_)) {
3647 format(
3648 "target = "
3649 "::_pbi::WireFormat::"
3650 "InternalSerializeUnknownFieldsToArray(\n"
3651 " $unknown_fields$, target, stream);\n");
3652 } else {
3653 format(
3654 "target = stream->WriteRaw($unknown_fields$.data(),\n"
3655 " static_cast<int>($unknown_fields$.size()), target);\n");
3656 }
3657 format.Outdent();
3658 format("}\n");
3659 }
3660
GenerateSerializeWithCachedSizesBodyShuffled(io::Printer * printer)3661 void MessageGenerator::GenerateSerializeWithCachedSizesBodyShuffled(
3662 io::Printer* printer) {
3663 Formatter format(printer, variables_);
3664
3665 std::vector<const FieldDescriptor*> ordered_fields =
3666 SortFieldsByNumber(descriptor_);
3667 ordered_fields.erase(
3668 std::remove_if(ordered_fields.begin(), ordered_fields.end(),
3669 [this](const FieldDescriptor* f) {
3670 return !IsFieldUsed(f, options_);
3671 }),
3672 ordered_fields.end());
3673
3674 std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
3675 sorted_extensions.reserve(descriptor_->extension_range_count());
3676 for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
3677 sorted_extensions.push_back(descriptor_->extension_range(i));
3678 }
3679 std::sort(sorted_extensions.begin(), sorted_extensions.end(),
3680 ExtensionRangeSorter());
3681
3682 int num_fields = ordered_fields.size() + sorted_extensions.size();
3683 constexpr int kLargePrime = 1000003;
3684 GOOGLE_CHECK_LT(num_fields, kLargePrime)
3685 << "Prime offset must be greater than the number of fields to ensure "
3686 "those are coprime.";
3687
3688 if (num_weak_fields_) {
3689 format(
3690 "::_pbi::WeakFieldMap::FieldWriter field_writer("
3691 "$weak_field_map$);\n");
3692 }
3693
3694 format("for (int i = $1$; i >= 0; i-- ) {\n", num_fields - 1);
3695
3696 format.Indent();
3697 format("switch(i) {\n");
3698 format.Indent();
3699
3700 int index = 0;
3701 for (const auto* f : ordered_fields) {
3702 format("case $1$: {\n", index++);
3703 format.Indent();
3704
3705 GenerateSerializeOneField(printer, f, -1);
3706
3707 format("break;\n");
3708 format.Outdent();
3709 format("}\n");
3710 }
3711
3712 for (const auto* r : sorted_extensions) {
3713 format("case $1$: {\n", index++);
3714 format.Indent();
3715
3716 GenerateSerializeOneExtensionRange(printer, r);
3717
3718 format("break;\n");
3719 format.Outdent();
3720 format("}\n");
3721 }
3722
3723 format(
3724 "default: {\n"
3725 " $DCHK$(false) << \"Unexpected index: \" << i;\n"
3726 "}\n");
3727 format.Outdent();
3728 format("}\n");
3729
3730 format.Outdent();
3731 format("}\n");
3732
3733 std::map<std::string, std::string> vars;
3734 SetUnknownFieldsVariable(descriptor_, options_, &vars);
3735 format.AddMap(vars);
3736 format("if (PROTOBUF_PREDICT_FALSE($have_unknown_fields$)) {\n");
3737 format.Indent();
3738 if (UseUnknownFieldSet(descriptor_->file(), options_)) {
3739 format(
3740 "target = "
3741 "::_pbi::WireFormat::"
3742 "InternalSerializeUnknownFieldsToArray(\n"
3743 " $unknown_fields$, target, stream);\n");
3744 } else {
3745 format(
3746 "target = stream->WriteRaw($unknown_fields$.data(),\n"
3747 " static_cast<int>($unknown_fields$.size()), target);\n");
3748 }
3749 format.Outdent();
3750 format("}\n");
3751 }
3752
RequiredFieldsBitMask() const3753 std::vector<uint32_t> MessageGenerator::RequiredFieldsBitMask() const {
3754 const int array_size = HasBitsSize();
3755 std::vector<uint32_t> masks(array_size, 0);
3756
3757 for (auto field : FieldRange(descriptor_)) {
3758 if (!field->is_required()) {
3759 continue;
3760 }
3761
3762 const int has_bit_index = has_bit_indices_[field->index()];
3763 masks[has_bit_index / 32] |= static_cast<uint32_t>(1)
3764 << (has_bit_index % 32);
3765 }
3766 return masks;
3767 }
3768
GenerateByteSize(io::Printer * printer)3769 void MessageGenerator::GenerateByteSize(io::Printer* printer) {
3770 if (HasSimpleBaseClass(descriptor_, options_)) return;
3771 Formatter format(printer, variables_);
3772
3773 if (descriptor_->options().message_set_wire_format()) {
3774 // Special-case MessageSet.
3775 std::map<std::string, std::string> vars;
3776 SetUnknownFieldsVariable(descriptor_, options_, &vars);
3777 format.AddMap(vars);
3778 format(
3779 "size_t $classname$::ByteSizeLong() const {\n"
3780 "$annotate_bytesize$"
3781 "// @@protoc_insertion_point(message_set_byte_size_start:$full_name$)\n"
3782 " size_t total_size = $extensions$.MessageSetByteSize();\n"
3783 " if ($have_unknown_fields$) {\n"
3784 " total_size += ::_pbi::\n"
3785 " ComputeUnknownMessageSetItemsSize($unknown_fields$);\n"
3786 " }\n"
3787 " int cached_size = "
3788 "::_pbi::ToCachedSize(total_size);\n"
3789 " SetCachedSize(cached_size);\n"
3790 " return total_size;\n"
3791 "}\n");
3792 return;
3793 }
3794
3795 if (num_required_fields_ > 1) {
3796 // Emit a function (rarely used, we hope) that handles the required fields
3797 // by checking for each one individually.
3798 format(
3799 "size_t $classname$::RequiredFieldsByteSizeFallback() const {\n"
3800 "// @@protoc_insertion_point(required_fields_byte_size_fallback_start:"
3801 "$full_name$)\n");
3802 format.Indent();
3803 format("size_t total_size = 0;\n");
3804 for (auto field : optimized_order_) {
3805 if (field->is_required()) {
3806 format(
3807 "\n"
3808 "if (_internal_has_$1$()) {\n",
3809 FieldName(field));
3810 format.Indent();
3811 PrintFieldComment(format, field);
3812 field_generators_.get(field).GenerateByteSize(printer);
3813 format.Outdent();
3814 format("}\n");
3815 }
3816 }
3817 format(
3818 "\n"
3819 "return total_size;\n");
3820 format.Outdent();
3821 format("}\n");
3822 }
3823
3824 format(
3825 "size_t $classname$::ByteSizeLong() const {\n"
3826 "$annotate_bytesize$"
3827 "// @@protoc_insertion_point(message_byte_size_start:$full_name$)\n");
3828 format.Indent();
3829 format(
3830 "size_t total_size = 0;\n"
3831 "\n");
3832
3833 if (descriptor_->extension_range_count() > 0) {
3834 format(
3835 "total_size += $extensions$.ByteSize();\n"
3836 "\n");
3837 }
3838
3839 std::map<std::string, std::string> vars;
3840 SetUnknownFieldsVariable(descriptor_, options_, &vars);
3841 format.AddMap(vars);
3842
3843 // Handle required fields (if any). We expect all of them to be
3844 // present, so emit one conditional that checks for that. If they are all
3845 // present then the fast path executes; otherwise the slow path executes.
3846 if (num_required_fields_ > 1) {
3847 // The fast path works if all required fields are present.
3848 const std::vector<uint32_t> masks_for_has_bits = RequiredFieldsBitMask();
3849 format("if ($1$) { // All required fields are present.\n",
3850 ConditionalToCheckBitmasks(masks_for_has_bits));
3851 format.Indent();
3852 // Oneof fields cannot be required, so optimized_order_ contains all of the
3853 // fields that we need to potentially emit.
3854 for (auto field : optimized_order_) {
3855 if (!field->is_required()) continue;
3856 PrintFieldComment(format, field);
3857 field_generators_.get(field).GenerateByteSize(printer);
3858 format("\n");
3859 }
3860 format.Outdent();
3861 format(
3862 "} else {\n" // the slow path
3863 " total_size += RequiredFieldsByteSizeFallback();\n"
3864 "}\n");
3865 } else {
3866 // num_required_fields_ <= 1: no need to be tricky
3867 for (auto field : optimized_order_) {
3868 if (!field->is_required()) continue;
3869 PrintFieldComment(format, field);
3870 format("if (_internal_has_$1$()) {\n", FieldName(field));
3871 format.Indent();
3872 field_generators_.get(field).GenerateByteSize(printer);
3873 format.Outdent();
3874 format("}\n");
3875 }
3876 }
3877
3878 std::vector<std::vector<const FieldDescriptor*>> chunks = CollectFields(
3879 optimized_order_,
3880 [&](const FieldDescriptor* a, const FieldDescriptor* b) -> bool {
3881 return a->label() == b->label() && HasByteIndex(a) == HasByteIndex(b);
3882 });
3883
3884 // Remove chunks with required fields.
3885 chunks.erase(std::remove_if(chunks.begin(), chunks.end(), IsRequired),
3886 chunks.end());
3887
3888 ColdChunkSkipper cold_skipper(options_, chunks, has_bit_indices_, kColdRatio);
3889 int cached_has_word_index = -1;
3890
3891 format(
3892 "$uint32$ cached_has_bits = 0;\n"
3893 "// Prevent compiler warnings about cached_has_bits being unused\n"
3894 "(void) cached_has_bits;\n\n");
3895
3896 for (int chunk_index = 0; chunk_index < chunks.size(); chunk_index++) {
3897 const std::vector<const FieldDescriptor*>& chunk = chunks[chunk_index];
3898 const bool have_outer_if =
3899 chunk.size() > 1 && HasWordIndex(chunk[0]) != kNoHasbit;
3900 cold_skipper.OnStartChunk(chunk_index, cached_has_word_index, "", printer);
3901
3902 if (have_outer_if) {
3903 // Emit an if() that will let us skip the whole chunk if none are set.
3904 uint32_t chunk_mask = GenChunkMask(chunk, has_bit_indices_);
3905 std::string chunk_mask_str =
3906 StrCat(strings::Hex(chunk_mask, strings::ZERO_PAD_8));
3907
3908 // Check (up to) 8 has_bits at a time if we have more than one field in
3909 // this chunk. Due to field layout ordering, we may check
3910 // _has_bits_[last_chunk * 8 / 32] multiple times.
3911 GOOGLE_DCHECK_LE(2, popcnt(chunk_mask));
3912 GOOGLE_DCHECK_GE(8, popcnt(chunk_mask));
3913
3914 if (cached_has_word_index != HasWordIndex(chunk.front())) {
3915 cached_has_word_index = HasWordIndex(chunk.front());
3916 format("cached_has_bits = $has_bits$[$1$];\n", cached_has_word_index);
3917 }
3918 format("if (cached_has_bits & 0x$1$u) {\n", chunk_mask_str);
3919 format.Indent();
3920 }
3921
3922 // Go back and emit checks for each of the fields we processed.
3923 for (int j = 0; j < chunk.size(); j++) {
3924 const FieldDescriptor* field = chunk[j];
3925 const FieldGenerator& generator = field_generators_.get(field);
3926 bool have_enclosing_if = false;
3927 bool need_extra_newline = false;
3928
3929 PrintFieldComment(format, field);
3930
3931 if (field->is_repeated()) {
3932 // No presence check is required.
3933 need_extra_newline = true;
3934 } else if (HasHasbit(field)) {
3935 PrintPresenceCheck(format, field, has_bit_indices_, printer,
3936 &cached_has_word_index);
3937 have_enclosing_if = true;
3938 } else {
3939 // Without field presence: field is serialized only if it has a
3940 // non-default value.
3941 have_enclosing_if =
3942 EmitFieldNonDefaultCondition(printer, "this->", field);
3943 }
3944
3945 generator.GenerateByteSize(printer);
3946
3947 if (have_enclosing_if) {
3948 format.Outdent();
3949 format(
3950 "}\n"
3951 "\n");
3952 }
3953 if (need_extra_newline) {
3954 format("\n");
3955 }
3956 }
3957
3958 if (have_outer_if) {
3959 format.Outdent();
3960 format("}\n");
3961 }
3962
3963 if (cold_skipper.OnEndChunk(chunk_index, printer)) {
3964 // Reset here as it may have been updated in just closed if statement.
3965 cached_has_word_index = -1;
3966 }
3967 }
3968
3969 // Fields inside a oneof don't use _has_bits_ so we count them in a separate
3970 // pass.
3971 for (auto oneof : OneOfRange(descriptor_)) {
3972 format("switch ($1$_case()) {\n", oneof->name());
3973 format.Indent();
3974 for (auto field : FieldRange(oneof)) {
3975 PrintFieldComment(format, field);
3976 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
3977 format.Indent();
3978 if (!IsFieldStripped(field, options_)) {
3979 field_generators_.get(field).GenerateByteSize(printer);
3980 }
3981 format("break;\n");
3982 format.Outdent();
3983 format("}\n");
3984 }
3985 format(
3986 "case $1$_NOT_SET: {\n"
3987 " break;\n"
3988 "}\n",
3989 ToUpper(oneof->name()));
3990 format.Outdent();
3991 format("}\n");
3992 }
3993
3994 if (num_weak_fields_) {
3995 // TagSize + MessageSize
3996 format("total_size += $weak_field_map$.ByteSizeLong();\n");
3997 }
3998
3999 if (UseUnknownFieldSet(descriptor_->file(), options_)) {
4000 // We go out of our way to put the computation of the uncommon path of
4001 // unknown fields in tail position. This allows for better code generation
4002 // of this function for simple protos.
4003 format(
4004 "return MaybeComputeUnknownFieldsSize(total_size, &$cached_size$);\n");
4005 } else {
4006 format("if (PROTOBUF_PREDICT_FALSE($have_unknown_fields$)) {\n");
4007 format(" total_size += $unknown_fields$.size();\n");
4008 format("}\n");
4009
4010 // We update _cached_size_ even though this is a const method. Because
4011 // const methods might be called concurrently this needs to be atomic
4012 // operations or the program is undefined. In practice, since any
4013 // concurrent writes will be writing the exact same value, normal writes
4014 // will work on all common processors. We use a dedicated wrapper class to
4015 // abstract away the underlying atomic. This makes it easier on platforms
4016 // where even relaxed memory order might have perf impact to replace it with
4017 // ordinary loads and stores.
4018 format(
4019 "int cached_size = ::_pbi::ToCachedSize(total_size);\n"
4020 "SetCachedSize(cached_size);\n"
4021 "return total_size;\n");
4022 }
4023
4024 format.Outdent();
4025 format("}\n");
4026 }
4027
GenerateIsInitialized(io::Printer * printer)4028 void MessageGenerator::GenerateIsInitialized(io::Printer* printer) {
4029 if (HasSimpleBaseClass(descriptor_, options_)) return;
4030 Formatter format(printer, variables_);
4031 format("bool $classname$::IsInitialized() const {\n");
4032 format.Indent();
4033
4034 if (descriptor_->extension_range_count() > 0) {
4035 format(
4036 "if (!$extensions$.IsInitialized()) {\n"
4037 " return false;\n"
4038 "}\n\n");
4039 }
4040
4041 if (num_required_fields_ > 0) {
4042 format(
4043 "if (_Internal::MissingRequiredFields($has_bits$))"
4044 " return false;\n");
4045 }
4046
4047 // Now check that all non-oneof embedded messages are initialized.
4048 for (auto field : optimized_order_) {
4049 field_generators_.get(field).GenerateIsInitialized(printer);
4050 }
4051 if (num_weak_fields_) {
4052 // For Weak fields.
4053 format("if (!$weak_field_map$.IsInitialized()) return false;\n");
4054 }
4055 // Go through the oneof fields, emitting a switch if any might have required
4056 // fields.
4057 for (auto oneof : OneOfRange(descriptor_)) {
4058 bool has_required_fields = false;
4059 for (auto field : FieldRange(oneof)) {
4060 if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE &&
4061 !ShouldIgnoreRequiredFieldCheck(field, options_) &&
4062 scc_analyzer_->HasRequiredFields(field->message_type())) {
4063 has_required_fields = true;
4064 break;
4065 }
4066 }
4067
4068 if (!has_required_fields) {
4069 continue;
4070 }
4071
4072 format("switch ($1$_case()) {\n", oneof->name());
4073 format.Indent();
4074 for (auto field : FieldRange(oneof)) {
4075 format("case k$1$: {\n", UnderscoresToCamelCase(field->name(), true));
4076 format.Indent();
4077 if (!IsFieldStripped(field, options_)) {
4078 field_generators_.get(field).GenerateIsInitialized(printer);
4079 }
4080 format("break;\n");
4081 format.Outdent();
4082 format("}\n");
4083 }
4084 format(
4085 "case $1$_NOT_SET: {\n"
4086 " break;\n"
4087 "}\n",
4088 ToUpper(oneof->name()));
4089 format.Outdent();
4090 format("}\n");
4091 }
4092
4093 format.Outdent();
4094 format(
4095 " return true;\n"
4096 "}\n");
4097 }
4098
4099 } // namespace cpp
4100 } // namespace compiler
4101 } // namespace protobuf
4102 } // namespace google
4103
4104 #include <google/protobuf/port_undef.inc>
4105