• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 Google Inc. All rights reserved.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "flatbuffers/reflection.h"
18 
19 #include "flatbuffers/util.h"
20 
21 // Helper functionality for reflection.
22 
23 namespace flatbuffers {
24 
25 namespace {
26 
CopyInline(FlatBufferBuilder & fbb,const reflection::Field & fielddef,const Table & table,size_t align,size_t size)27 static void CopyInline(FlatBufferBuilder &fbb,
28                        const reflection::Field &fielddef, const Table &table,
29                        size_t align, size_t size) {
30   fbb.Align(align);
31   fbb.PushBytes(table.GetStruct<const uint8_t *>(fielddef.offset()), size);
32   fbb.TrackField(fielddef.offset(), fbb.GetSize());
33 }
34 
VerifyStruct(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)35 static bool VerifyStruct(flatbuffers::Verifier &v,
36                          const flatbuffers::Table &parent_table,
37                          voffset_t field_offset, const reflection::Object &obj,
38                          bool required) {
39   auto offset = parent_table.GetOptionalFieldOffset(field_offset);
40   if (required && !offset) { return false; }
41 
42   return !offset ||
43          v.VerifyFieldStruct(reinterpret_cast<const uint8_t *>(&parent_table),
44                              offset, obj.bytesize(), obj.minalign());
45 }
46 
VerifyVectorOfStructs(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)47 static bool VerifyVectorOfStructs(flatbuffers::Verifier &v,
48                                   const flatbuffers::Table &parent_table,
49                                   voffset_t field_offset,
50                                   const reflection::Object &obj,
51                                   bool required) {
52   auto p = parent_table.GetPointer<const uint8_t *>(field_offset);
53   if (required && !p) { return false; }
54 
55   return !p || v.VerifyVectorOrString(p, obj.bytesize());
56 }
57 
58 // forward declare to resolve cyclic deps between VerifyObject and VerifyVector
59 static bool VerifyObject(flatbuffers::Verifier &v,
60                          const reflection::Schema &schema,
61                          const reflection::Object &obj,
62                          const flatbuffers::Table *table, bool required);
63 
VerifyUnion(flatbuffers::Verifier & v,const reflection::Schema & schema,uint8_t utype,const uint8_t * elem,const reflection::Field & union_field)64 static bool VerifyUnion(flatbuffers::Verifier &v,
65                         const reflection::Schema &schema, uint8_t utype,
66                         const uint8_t *elem,
67                         const reflection::Field &union_field) {
68   if (!utype) return true;  // Not present.
69   auto fb_enum = schema.enums()->Get(union_field.type()->index());
70   if (utype >= fb_enum->values()->size()) return false;
71   auto elem_type = fb_enum->values()->Get(utype)->union_type();
72   switch (elem_type->base_type()) {
73     case reflection::Obj: {
74       auto elem_obj = schema.objects()->Get(elem_type->index());
75       if (elem_obj->is_struct()) {
76         return v.VerifyFromPointer(elem, elem_obj->bytesize());
77       } else {
78         return VerifyObject(v, schema, *elem_obj,
79                             reinterpret_cast<const flatbuffers::Table *>(elem),
80                             true);
81       }
82     }
83     case reflection::String:
84       return v.VerifyString(
85           reinterpret_cast<const flatbuffers::String *>(elem));
86     default: return false;
87   }
88 }
89 
VerifyVector(flatbuffers::Verifier & v,const reflection::Schema & schema,const flatbuffers::Table & table,const reflection::Field & vec_field)90 static bool VerifyVector(flatbuffers::Verifier &v,
91                          const reflection::Schema &schema,
92                          const flatbuffers::Table &table,
93                          const reflection::Field &vec_field) {
94   FLATBUFFERS_ASSERT(vec_field.type()->base_type() == reflection::Vector);
95   if (!table.VerifyField<uoffset_t>(v, vec_field.offset(), sizeof(uoffset_t)))
96     return false;
97 
98   switch (vec_field.type()->element()) {
99     case reflection::UType:
100       return v.VerifyVector(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
101     case reflection::Bool:
102     case reflection::Byte:
103     case reflection::UByte:
104       return v.VerifyVector(flatbuffers::GetFieldV<int8_t>(table, vec_field));
105     case reflection::Short:
106     case reflection::UShort:
107       return v.VerifyVector(flatbuffers::GetFieldV<int16_t>(table, vec_field));
108     case reflection::Int:
109     case reflection::UInt:
110       return v.VerifyVector(flatbuffers::GetFieldV<int32_t>(table, vec_field));
111     case reflection::Long:
112     case reflection::ULong:
113       return v.VerifyVector(flatbuffers::GetFieldV<int64_t>(table, vec_field));
114     case reflection::Float:
115       return v.VerifyVector(flatbuffers::GetFieldV<float>(table, vec_field));
116     case reflection::Double:
117       return v.VerifyVector(flatbuffers::GetFieldV<double>(table, vec_field));
118     case reflection::String: {
119       auto vec_string =
120           flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::String>>(
121               table, vec_field);
122       if (v.VerifyVector(vec_string) && v.VerifyVectorOfStrings(vec_string)) {
123         return true;
124       } else {
125         return false;
126       }
127     }
128     case reflection::Obj: {
129       auto obj = schema.objects()->Get(vec_field.type()->index());
130       if (obj->is_struct()) {
131         return VerifyVectorOfStructs(v, table, vec_field.offset(), *obj,
132                                      vec_field.required());
133       } else {
134         auto vec =
135             flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::Table>>(
136                 table, vec_field);
137         if (!v.VerifyVector(vec)) return false;
138         if (!vec) return true;
139         for (uoffset_t j = 0; j < vec->size(); j++) {
140           if (!VerifyObject(v, schema, *obj, vec->Get(j), true)) {
141             return false;
142           }
143         }
144         return true;
145       }
146     }
147     case reflection::Union: {
148       auto vec = flatbuffers::GetFieldV<flatbuffers::Offset<uint8_t>>(
149           table, vec_field);
150       if (!v.VerifyVector(vec)) return false;
151       if (!vec) return true;
152       auto type_vec = table.GetPointer<Vector<uint8_t> *>(vec_field.offset() -
153                                                           sizeof(voffset_t));
154       if (!v.VerifyVector(type_vec)) return false;
155       for (uoffset_t j = 0; j < vec->size(); j++) {
156         //  get union type from the prev field
157         auto utype = type_vec->Get(j);
158         auto elem = vec->Get(j);
159         if (!VerifyUnion(v, schema, utype, elem, vec_field)) return false;
160       }
161       return true;
162     }
163     case reflection::Vector:
164     case reflection::None:
165     default: FLATBUFFERS_ASSERT(false); return false;
166   }
167 }
168 
VerifyObject(flatbuffers::Verifier & v,const reflection::Schema & schema,const reflection::Object & obj,const flatbuffers::Table * table,bool required)169 static bool VerifyObject(flatbuffers::Verifier &v,
170                          const reflection::Schema &schema,
171                          const reflection::Object &obj,
172                          const flatbuffers::Table *table, bool required) {
173   if (!table) return !required;
174   if (!table->VerifyTableStart(v)) return false;
175   for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
176     auto field_def = obj.fields()->Get(i);
177     switch (field_def->type()->base_type()) {
178       case reflection::None: FLATBUFFERS_ASSERT(false); break;
179       case reflection::UType:
180         if (!table->VerifyField<uint8_t>(v, field_def->offset(),
181                                          sizeof(uint8_t)))
182           return false;
183         break;
184       case reflection::Bool:
185       case reflection::Byte:
186       case reflection::UByte:
187         if (!table->VerifyField<int8_t>(v, field_def->offset(), sizeof(int8_t)))
188           return false;
189         break;
190       case reflection::Short:
191       case reflection::UShort:
192         if (!table->VerifyField<int16_t>(v, field_def->offset(),
193                                          sizeof(int16_t)))
194           return false;
195         break;
196       case reflection::Int:
197       case reflection::UInt:
198         if (!table->VerifyField<int32_t>(v, field_def->offset(),
199                                          sizeof(int32_t)))
200           return false;
201         break;
202       case reflection::Long:
203       case reflection::ULong:
204         if (!table->VerifyField<int64_t>(v, field_def->offset(),
205                                          sizeof(int64_t)))
206           return false;
207         break;
208       case reflection::Float:
209         if (!table->VerifyField<float>(v, field_def->offset(), sizeof(float)))
210           return false;
211         break;
212       case reflection::Double:
213         if (!table->VerifyField<double>(v, field_def->offset(), sizeof(double)))
214           return false;
215         break;
216       case reflection::String:
217         if (!table->VerifyField<uoffset_t>(v, field_def->offset(),
218                                            sizeof(uoffset_t)) ||
219             !v.VerifyString(flatbuffers::GetFieldS(*table, *field_def))) {
220           return false;
221         }
222         break;
223       case reflection::Vector:
224         if (!VerifyVector(v, schema, *table, *field_def)) return false;
225         break;
226       case reflection::Obj: {
227         auto child_obj = schema.objects()->Get(field_def->type()->index());
228         if (child_obj->is_struct()) {
229           if (!VerifyStruct(v, *table, field_def->offset(), *child_obj,
230                             field_def->required())) {
231             return false;
232           }
233         } else {
234           if (!VerifyObject(v, schema, *child_obj,
235                             flatbuffers::GetFieldT(*table, *field_def),
236                             field_def->required())) {
237             return false;
238           }
239         }
240         break;
241       }
242       case reflection::Union: {
243         //  get union type from the prev field
244         voffset_t utype_offset = field_def->offset() - sizeof(voffset_t);
245         auto utype = table->GetField<uint8_t>(utype_offset, 0);
246         auto uval = reinterpret_cast<const uint8_t *>(
247             flatbuffers::GetFieldT(*table, *field_def));
248         if (!VerifyUnion(v, schema, utype, uval, *field_def)) { return false; }
249         break;
250       }
251       default: FLATBUFFERS_ASSERT(false); break;
252     }
253   }
254 
255   if (!v.EndTable()) return false;
256 
257   return true;
258 }
259 
260 }  // namespace
261 
GetAnyValueI(reflection::BaseType type,const uint8_t * data)262 int64_t GetAnyValueI(reflection::BaseType type, const uint8_t *data) {
263   // clang-format off
264   #define FLATBUFFERS_GET(T) static_cast<int64_t>(ReadScalar<T>(data))
265   switch (type) {
266     case reflection::UType:
267     case reflection::Bool:
268     case reflection::UByte:  return FLATBUFFERS_GET(uint8_t);
269     case reflection::Byte:   return FLATBUFFERS_GET(int8_t);
270     case reflection::Short:  return FLATBUFFERS_GET(int16_t);
271     case reflection::UShort: return FLATBUFFERS_GET(uint16_t);
272     case reflection::Int:    return FLATBUFFERS_GET(int32_t);
273     case reflection::UInt:   return FLATBUFFERS_GET(uint32_t);
274     case reflection::Long:   return FLATBUFFERS_GET(int64_t);
275     case reflection::ULong:  return FLATBUFFERS_GET(uint64_t);
276     case reflection::Float:  return FLATBUFFERS_GET(float);
277     case reflection::Double: return FLATBUFFERS_GET(double);
278     case reflection::String: {
279       auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
280                                                 data);
281       return s ? StringToInt(s->c_str()) : 0;
282     }
283     default: return 0;  // Tables & vectors do not make sense.
284   }
285   #undef FLATBUFFERS_GET
286   // clang-format on
287 }
288 
GetAnyValueF(reflection::BaseType type,const uint8_t * data)289 double GetAnyValueF(reflection::BaseType type, const uint8_t *data) {
290   switch (type) {
291     case reflection::Float: return static_cast<double>(ReadScalar<float>(data));
292     case reflection::Double: return ReadScalar<double>(data);
293     case reflection::String: {
294       auto s =
295           reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
296       if (s) {
297         double d;
298         StringToNumber(s->c_str(), &d);
299         return d;
300       } else {
301         return 0.0;
302       }
303     }
304     default: return static_cast<double>(GetAnyValueI(type, data));
305   }
306 }
307 
GetAnyValueS(reflection::BaseType type,const uint8_t * data,const reflection::Schema * schema,int type_index)308 std::string GetAnyValueS(reflection::BaseType type, const uint8_t *data,
309                          const reflection::Schema *schema, int type_index) {
310   switch (type) {
311     case reflection::Float:
312     case reflection::Double: return NumToString(GetAnyValueF(type, data));
313     case reflection::String: {
314       auto s =
315           reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
316       return s ? s->c_str() : "";
317     }
318     case reflection::Obj:
319       if (schema) {
320         // Convert the table to a string. This is mostly for debugging purposes,
321         // and does NOT promise to be JSON compliant.
322         // Also prefixes the type.
323         auto &objectdef = *schema->objects()->Get(type_index);
324         auto s = objectdef.name()->str();
325         if (objectdef.is_struct()) {
326           s += "(struct)";  // TODO: implement this as well.
327         } else {
328           auto table_field = reinterpret_cast<const Table *>(
329               ReadScalar<uoffset_t>(data) + data);
330           s += " { ";
331           auto fielddefs = objectdef.fields();
332           for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
333             auto &fielddef = **it;
334             if (!table_field->CheckField(fielddef.offset())) continue;
335             auto val = GetAnyFieldS(*table_field, fielddef, schema);
336             if (fielddef.type()->base_type() == reflection::String) {
337               std::string esc;
338               flatbuffers::EscapeString(val.c_str(), val.length(), &esc, true,
339                                         false);
340               val = esc;
341             }
342             s += fielddef.name()->str();
343             s += ": ";
344             s += val;
345             s += ", ";
346           }
347           s += "}";
348         }
349         return s;
350       } else {
351         return "(table)";
352       }
353     case reflection::Vector:
354       return "[(elements)]";                   // TODO: implement this as well.
355     case reflection::Union: return "(union)";  // TODO: implement this as well.
356     default: return NumToString(GetAnyValueI(type, data));
357   }
358 }
359 
ForAllFields(const reflection::Object * object,bool reverse,std::function<void (const reflection::Field *)> func)360 void ForAllFields(const reflection::Object *object, bool reverse,
361                   std::function<void(const reflection::Field *)> func) {
362   std::vector<uint32_t> field_to_id_map;
363   field_to_id_map.resize(object->fields()->size());
364 
365   // Create the mapping of field ID to the index into the vector.
366   for (uint32_t i = 0; i < object->fields()->size(); ++i) {
367     auto field = object->fields()->Get(i);
368     field_to_id_map[field->id()] = i;
369   }
370 
371   for (size_t i = 0; i < field_to_id_map.size(); ++i) {
372     func(object->fields()->Get(
373         field_to_id_map[reverse ? field_to_id_map.size() - i + 1 : i]));
374   }
375 }
376 
SetAnyValueI(reflection::BaseType type,uint8_t * data,int64_t val)377 void SetAnyValueI(reflection::BaseType type, uint8_t *data, int64_t val) {
378   // clang-format off
379   #define FLATBUFFERS_SET(T) WriteScalar(data, static_cast<T>(val))
380   switch (type) {
381     case reflection::UType:
382     case reflection::Bool:
383     case reflection::UByte:  FLATBUFFERS_SET(uint8_t ); break;
384     case reflection::Byte:   FLATBUFFERS_SET(int8_t  ); break;
385     case reflection::Short:  FLATBUFFERS_SET(int16_t ); break;
386     case reflection::UShort: FLATBUFFERS_SET(uint16_t); break;
387     case reflection::Int:    FLATBUFFERS_SET(int32_t ); break;
388     case reflection::UInt:   FLATBUFFERS_SET(uint32_t); break;
389     case reflection::Long:   FLATBUFFERS_SET(int64_t ); break;
390     case reflection::ULong:  FLATBUFFERS_SET(uint64_t); break;
391     case reflection::Float:  FLATBUFFERS_SET(float   ); break;
392     case reflection::Double: FLATBUFFERS_SET(double  ); break;
393     // TODO: support strings
394     default: break;
395   }
396   #undef FLATBUFFERS_SET
397   // clang-format on
398 }
399 
SetAnyValueF(reflection::BaseType type,uint8_t * data,double val)400 void SetAnyValueF(reflection::BaseType type, uint8_t *data, double val) {
401   switch (type) {
402     case reflection::Float: WriteScalar(data, static_cast<float>(val)); break;
403     case reflection::Double: WriteScalar(data, val); break;
404     // TODO: support strings.
405     default: SetAnyValueI(type, data, static_cast<int64_t>(val)); break;
406   }
407 }
408 
SetAnyValueS(reflection::BaseType type,uint8_t * data,const char * val)409 void SetAnyValueS(reflection::BaseType type, uint8_t *data, const char *val) {
410   switch (type) {
411     case reflection::Float:
412     case reflection::Double: {
413       double d;
414       StringToNumber(val, &d);
415       SetAnyValueF(type, data, d);
416       break;
417     }
418     // TODO: support strings.
419     default: SetAnyValueI(type, data, StringToInt(val)); break;
420   }
421 }
422 
423 // Resize a FlatBuffer in-place by iterating through all offsets in the buffer
424 // and adjusting them by "delta" if they straddle the start offset.
425 // Once that is done, bytes can now be inserted/deleted safely.
426 // "delta" may be negative (shrinking).
427 // Unless "delta" is a multiple of the largest alignment, you'll create a small
428 // amount of garbage space in the buffer (usually 0..7 bytes).
429 // If your FlatBuffer's root table is not the schema's root table, you should
430 // pass in your root_table type as well.
431 class ResizeContext {
432  public:
ResizeContext(const reflection::Schema & schema,uoffset_t start,int delta,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table=nullptr)433   ResizeContext(const reflection::Schema &schema, uoffset_t start, int delta,
434                 std::vector<uint8_t> *flatbuf,
435                 const reflection::Object *root_table = nullptr)
436       : schema_(schema),
437         startptr_(flatbuf->data() + start),
438         delta_(delta),
439         buf_(*flatbuf),
440         dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
441     auto mask = static_cast<int>(sizeof(largest_scalar_t) - 1);
442     delta_ = (delta_ + mask) & ~mask;
443     if (!delta_) return;  // We can't shrink by less than largest_scalar_t.
444     // Now change all the offsets by delta_.
445     auto root = GetAnyRoot(buf_.data());
446     Straddle<uoffset_t, 1>(buf_.data(), root, buf_.data());
447     ResizeTable(root_table ? *root_table : *schema.root_table(), root);
448     // We can now add or remove bytes at start.
449     if (delta_ > 0)
450       buf_.insert(buf_.begin() + start, delta_, 0);
451     else
452       buf_.erase(buf_.begin() + start + delta_, buf_.begin() + start);
453   }
454 
455   // Check if the range between first (lower address) and second straddles
456   // the insertion point. If it does, change the offset at offsetloc (of
457   // type T, with direction D).
458   template<typename T, int D>
Straddle(const void * first,const void * second,void * offsetloc)459   void Straddle(const void *first, const void *second, void *offsetloc) {
460     if (first <= startptr_ && second >= startptr_) {
461       WriteScalar<T>(offsetloc, ReadScalar<T>(offsetloc) + delta_ * D);
462       DagCheck(offsetloc) = true;
463     }
464   }
465 
466   // This returns a boolean that records if the corresponding offset location
467   // has been modified already. If so, we can't even read the corresponding
468   // offset, since it is pointing to a location that is illegal until the
469   // resize actually happens.
470   // This must be checked for every offset, since we can't know which offsets
471   // will straddle and which won't.
DagCheck(const void * offsetloc)472   uint8_t &DagCheck(const void *offsetloc) {
473     auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
474                    reinterpret_cast<const uoffset_t *>(buf_.data());
475     return dag_check_[dag_idx];
476   }
477 
ResizeTable(const reflection::Object & objectdef,Table * table)478   void ResizeTable(const reflection::Object &objectdef, Table *table) {
479     if (DagCheck(table)) return;  // Table already visited.
480     auto vtable = table->GetVTable();
481     // Early out: since all fields inside the table must point forwards in
482     // memory, if the insertion point is before the table we can stop here.
483     auto tableloc = reinterpret_cast<uint8_t *>(table);
484     if (startptr_ <= tableloc) {
485       // Check if insertion point is between the table and a vtable that
486       // precedes it. This can't happen in current construction code, but check
487       // just in case we ever change the way flatbuffers are built.
488       Straddle<soffset_t, -1>(vtable, table, table);
489     } else {
490       // Check each field.
491       auto fielddefs = objectdef.fields();
492       for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
493         auto &fielddef = **it;
494         auto base_type = fielddef.type()->base_type();
495         // Ignore scalars.
496         if (base_type <= reflection::Double) continue;
497         // Ignore fields that are not stored.
498         auto offset = table->GetOptionalFieldOffset(fielddef.offset());
499         if (!offset) continue;
500         // Ignore structs.
501         auto subobjectdef =
502             base_type == reflection::Obj
503                 ? schema_.objects()->Get(fielddef.type()->index())
504                 : nullptr;
505         if (subobjectdef && subobjectdef->is_struct()) continue;
506         // Get this fields' offset, and read it if safe.
507         auto offsetloc = tableloc + offset;
508         if (DagCheck(offsetloc)) continue;  // This offset already visited.
509         auto ref = offsetloc + ReadScalar<uoffset_t>(offsetloc);
510         Straddle<uoffset_t, 1>(offsetloc, ref, offsetloc);
511         // Recurse.
512         switch (base_type) {
513           case reflection::Obj: {
514             if (subobjectdef) {
515               ResizeTable(*subobjectdef, reinterpret_cast<Table *>(ref));
516             }
517             break;
518           }
519           case reflection::Vector: {
520             auto elem_type = fielddef.type()->element();
521             if (elem_type != reflection::Obj && elem_type != reflection::String)
522               break;
523             auto vec = reinterpret_cast<Vector<uoffset_t> *>(ref);
524             auto elemobjectdef =
525                 elem_type == reflection::Obj
526                     ? schema_.objects()->Get(fielddef.type()->index())
527                     : nullptr;
528             if (elemobjectdef && elemobjectdef->is_struct()) break;
529             for (uoffset_t i = 0; i < vec->size(); i++) {
530               auto loc = vec->Data() + i * sizeof(uoffset_t);
531               if (DagCheck(loc)) continue;  // This offset already visited.
532               auto dest = loc + vec->Get(i);
533               Straddle<uoffset_t, 1>(loc, dest, loc);
534               if (elemobjectdef)
535                 ResizeTable(*elemobjectdef, reinterpret_cast<Table *>(dest));
536             }
537             break;
538           }
539           case reflection::Union: {
540             ResizeTable(GetUnionType(schema_, objectdef, fielddef, *table),
541                         reinterpret_cast<Table *>(ref));
542             break;
543           }
544           case reflection::String: break;
545           default: FLATBUFFERS_ASSERT(false);
546         }
547       }
548       // Check if the vtable offset points beyond the insertion point.
549       // Must do this last, since GetOptionalFieldOffset above still reads
550       // this value.
551       Straddle<soffset_t, -1>(table, vtable, table);
552     }
553   }
554 
555  private:
556   const reflection::Schema &schema_;
557   uint8_t *startptr_;
558   int delta_;
559   std::vector<uint8_t> &buf_;
560   std::vector<uint8_t> dag_check_;
561 };
562 
SetString(const reflection::Schema & schema,const std::string & val,const String * str,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)563 void SetString(const reflection::Schema &schema, const std::string &val,
564                const String *str, std::vector<uint8_t> *flatbuf,
565                const reflection::Object *root_table) {
566   auto delta = static_cast<int>(val.size()) - static_cast<int>(str->size());
567   auto str_start = static_cast<uoffset_t>(
568       reinterpret_cast<const uint8_t *>(str) - flatbuf->data());
569   auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
570   if (delta) {
571     // Clear the old string, since we don't want parts of it remaining.
572     memset(flatbuf->data() + start, 0, str->size());
573     // Different size, we must expand (or contract).
574     ResizeContext ctx(schema, start, delta, flatbuf, root_table);
575     // Set the new length.
576     WriteScalar(flatbuf->data() + str_start,
577                 static_cast<uoffset_t>(val.size()));
578   }
579   // Copy new data. Safe because we created the right amount of space.
580   memcpy(flatbuf->data() + start, val.c_str(), val.size() + 1);
581 }
582 
ResizeAnyVector(const reflection::Schema & schema,uoffset_t newsize,const VectorOfAny * vec,uoffset_t num_elems,uoffset_t elem_size,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)583 uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
584                          const VectorOfAny *vec, uoffset_t num_elems,
585                          uoffset_t elem_size, std::vector<uint8_t> *flatbuf,
586                          const reflection::Object *root_table) {
587   auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
588   auto delta_bytes = delta_elem * static_cast<int>(elem_size);
589   auto vec_start = reinterpret_cast<const uint8_t *>(vec) - flatbuf->data();
590   auto start = static_cast<uoffset_t>(vec_start) +
591                static_cast<uoffset_t>(sizeof(uoffset_t)) +
592                elem_size * num_elems;
593   if (delta_bytes) {
594     if (delta_elem < 0) {
595       // Clear elements we're throwing away, since some might remain in the
596       // buffer.
597       auto size_clear = -delta_elem * elem_size;
598       memset(flatbuf->data() + start - size_clear, 0, size_clear);
599     }
600     ResizeContext ctx(schema, start, delta_bytes, flatbuf, root_table);
601     WriteScalar(flatbuf->data() + vec_start, newsize);  // Length field.
602     // Set new elements to 0.. this can be overwritten by the caller.
603     if (delta_elem > 0) {
604       memset(flatbuf->data() + start, 0,
605              static_cast<size_t>(delta_elem) * elem_size);
606     }
607   }
608   return flatbuf->data() + start;
609 }
610 
AddFlatBuffer(std::vector<uint8_t> & flatbuf,const uint8_t * newbuf,size_t newlen)611 const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
612                              const uint8_t *newbuf, size_t newlen) {
613   // Align to sizeof(uoffset_t) past sizeof(largest_scalar_t) since we're
614   // going to chop off the root offset.
615   while ((flatbuf.size() & (sizeof(uoffset_t) - 1)) ||
616          !(flatbuf.size() & (sizeof(largest_scalar_t) - 1))) {
617     flatbuf.push_back(0);
618   }
619   auto insertion_point = static_cast<uoffset_t>(flatbuf.size());
620   // Insert the entire FlatBuffer minus the root pointer.
621   flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
622   auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
623   return flatbuf.data() + insertion_point + root_offset;
624 }
625 
CopyTable(FlatBufferBuilder & fbb,const reflection::Schema & schema,const reflection::Object & objectdef,const Table & table,bool use_string_pooling)626 Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
627                                 const reflection::Schema &schema,
628                                 const reflection::Object &objectdef,
629                                 const Table &table, bool use_string_pooling) {
630   // Before we can construct the table, we have to first generate any
631   // subobjects, and collect their offsets.
632   std::vector<uoffset_t> offsets;
633   auto fielddefs = objectdef.fields();
634   for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
635     auto &fielddef = **it;
636     // Skip if field is not present in the source.
637     if (!table.CheckField(fielddef.offset())) continue;
638     uoffset_t offset = 0;
639     switch (fielddef.type()->base_type()) {
640       case reflection::String: {
641         offset = use_string_pooling
642                      ? fbb.CreateSharedString(GetFieldS(table, fielddef)).o
643                      : fbb.CreateString(GetFieldS(table, fielddef)).o;
644         break;
645       }
646       case reflection::Obj: {
647         auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
648         if (!subobjectdef.is_struct()) {
649           offset = CopyTable(fbb, schema, subobjectdef,
650                              *GetFieldT(table, fielddef), use_string_pooling)
651                        .o;
652         }
653         break;
654       }
655       case reflection::Union: {
656         auto &subobjectdef = GetUnionType(schema, objectdef, fielddef, table);
657         offset = CopyTable(fbb, schema, subobjectdef,
658                            *GetFieldT(table, fielddef), use_string_pooling)
659                      .o;
660         break;
661       }
662       case reflection::Vector: {
663         auto vec =
664             table.GetPointer<const Vector<Offset<Table>> *>(fielddef.offset());
665         auto element_base_type = fielddef.type()->element();
666         auto elemobjectdef =
667             element_base_type == reflection::Obj
668                 ? schema.objects()->Get(fielddef.type()->index())
669                 : nullptr;
670         switch (element_base_type) {
671           case reflection::String: {
672             std::vector<Offset<const String *>> elements(vec->size());
673             auto vec_s = reinterpret_cast<const Vector<Offset<String>> *>(vec);
674             for (uoffset_t i = 0; i < vec_s->size(); i++) {
675               elements[i] = use_string_pooling
676                                 ? fbb.CreateSharedString(vec_s->Get(i)).o
677                                 : fbb.CreateString(vec_s->Get(i)).o;
678             }
679             offset = fbb.CreateVector(elements).o;
680             break;
681           }
682           case reflection::Obj: {
683             if (!elemobjectdef->is_struct()) {
684               std::vector<Offset<const Table *>> elements(vec->size());
685               for (uoffset_t i = 0; i < vec->size(); i++) {
686                 elements[i] = CopyTable(fbb, schema, *elemobjectdef,
687                                         *vec->Get(i), use_string_pooling);
688               }
689               offset = fbb.CreateVector(elements).o;
690               break;
691             }
692           }
693             FLATBUFFERS_FALLTHROUGH();  // fall thru
694           default: {                    // Scalars and structs.
695             auto element_size = GetTypeSize(element_base_type);
696             auto element_alignment = element_size;  // For primitive elements
697             if (elemobjectdef && elemobjectdef->is_struct())
698               element_size = elemobjectdef->bytesize();
699             fbb.StartVector(vec->size(), element_size, element_alignment);
700             fbb.PushBytes(vec->Data(), element_size * vec->size());
701             offset = fbb.EndVector(vec->size());
702             break;
703           }
704         }
705         break;
706       }
707       default:  // Scalars.
708         break;
709     }
710     if (offset) { offsets.push_back(offset); }
711   }
712   // Now we can build the actual table from either offsets or scalar data.
713   auto start = objectdef.is_struct() ? fbb.StartStruct(objectdef.minalign())
714                                      : fbb.StartTable();
715   size_t offset_idx = 0;
716   for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
717     auto &fielddef = **it;
718     if (!table.CheckField(fielddef.offset())) continue;
719     auto base_type = fielddef.type()->base_type();
720     switch (base_type) {
721       case reflection::Obj: {
722         auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
723         if (subobjectdef.is_struct()) {
724           CopyInline(fbb, fielddef, table, subobjectdef.minalign(),
725                      subobjectdef.bytesize());
726           break;
727         }
728       }
729         FLATBUFFERS_FALLTHROUGH();  // fall thru
730       case reflection::Union:
731       case reflection::String:
732       case reflection::Vector:
733         fbb.AddOffset(fielddef.offset(), Offset<void>(offsets[offset_idx++]));
734         break;
735       default: {  // Scalars.
736         auto size = GetTypeSize(base_type);
737         CopyInline(fbb, fielddef, table, size, size);
738         break;
739       }
740     }
741   }
742   FLATBUFFERS_ASSERT(offset_idx == offsets.size());
743   if (objectdef.is_struct()) {
744     fbb.ClearOffsets();
745     return fbb.EndStruct();
746   } else {
747     return fbb.EndTable(start);
748   }
749 }
750 
Verify(const reflection::Schema & schema,const reflection::Object & root,const uint8_t * const buf,const size_t length,const uoffset_t max_depth,const uoffset_t max_tables)751 bool Verify(const reflection::Schema &schema, const reflection::Object &root,
752             const uint8_t *const buf, const size_t length,
753             const uoffset_t max_depth, const uoffset_t max_tables) {
754   Verifier v(buf, length, max_depth, max_tables);
755   return VerifyObject(v, schema, root, flatbuffers::GetAnyRoot(buf),
756                       /*required=*/true);
757 }
758 
VerifySizePrefixed(const reflection::Schema & schema,const reflection::Object & root,const uint8_t * const buf,const size_t length,const uoffset_t max_depth,const uoffset_t max_tables)759 bool VerifySizePrefixed(const reflection::Schema &schema,
760                         const reflection::Object &root,
761                         const uint8_t *const buf, const size_t length,
762                         const uoffset_t max_depth, const uoffset_t max_tables) {
763   Verifier v(buf, length, max_depth, max_tables);
764   return VerifyObject(v, schema, root, flatbuffers::GetAnySizePrefixedRoot(buf),
765                       /*required=*/true);
766 }
767 
768 }  // namespace flatbuffers
769