1 /*
2 * Copyright 2015 Google Inc. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "flatbuffers/reflection.h"
18 #include "flatbuffers/util.h"
19
20 // Helper functionality for reflection.
21
22 namespace flatbuffers {
23
GetAnyValueI(reflection::BaseType type,const uint8_t * data)24 int64_t GetAnyValueI(reflection::BaseType type, const uint8_t *data) {
25 # define FLATBUFFERS_GET(T) static_cast<int64_t>(ReadScalar<T>(data))
26 switch (type) {
27 case reflection::UType:
28 case reflection::Bool:
29 case reflection::UByte: return FLATBUFFERS_GET(uint8_t);
30 case reflection::Byte: return FLATBUFFERS_GET(int8_t);
31 case reflection::Short: return FLATBUFFERS_GET(int16_t);
32 case reflection::UShort: return FLATBUFFERS_GET(uint16_t);
33 case reflection::Int: return FLATBUFFERS_GET(int32_t);
34 case reflection::UInt: return FLATBUFFERS_GET(uint32_t);
35 case reflection::Long: return FLATBUFFERS_GET(int64_t);
36 case reflection::ULong: return FLATBUFFERS_GET(uint64_t);
37 case reflection::Float: return FLATBUFFERS_GET(float);
38 case reflection::Double: return FLATBUFFERS_GET(double);
39 case reflection::String: {
40 auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
41 data);
42 return s ? StringToInt(s->c_str()) : 0;
43 }
44 default: return 0; // Tables & vectors do not make sense.
45 }
46 # undef FLATBUFFERS_GET
47 }
48
GetAnyValueF(reflection::BaseType type,const uint8_t * data)49 double GetAnyValueF(reflection::BaseType type, const uint8_t *data) {
50 switch (type) {
51 case reflection::Float: return static_cast<double>(ReadScalar<float>(data));
52 case reflection::Double: return ReadScalar<double>(data);
53 case reflection::String: {
54 auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
55 data);
56 return s ? strtod(s->c_str(), nullptr) : 0.0;
57 }
58 default: return static_cast<double>(GetAnyValueI(type, data));
59 }
60 }
61
GetAnyValueS(reflection::BaseType type,const uint8_t * data,const reflection::Schema * schema,int type_index)62 std::string GetAnyValueS(reflection::BaseType type, const uint8_t *data,
63 const reflection::Schema *schema, int type_index) {
64 switch (type) {
65 case reflection::Float:
66 case reflection::Double: return NumToString(GetAnyValueF(type, data));
67 case reflection::String: {
68 auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
69 data);
70 return s ? s->c_str() : "";
71 }
72 case reflection::Obj:
73 if (schema) {
74 // Convert the table to a string. This is mostly for debugging purposes,
75 // and does NOT promise to be JSON compliant.
76 // Also prefixes the type.
77 auto &objectdef = *schema->objects()->Get(type_index);
78 auto s = objectdef.name()->str();
79 if (objectdef.is_struct()) {
80 s += "(struct)"; // TODO: implement this as well.
81 } else {
82 auto table_field = reinterpret_cast<const Table *>(
83 ReadScalar<uoffset_t>(data) + data);
84 s += " { ";
85 auto fielddefs = objectdef.fields();
86 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
87 auto &fielddef = **it;
88 if (!table_field->CheckField(fielddef.offset())) continue;
89 auto val = GetAnyFieldS(*table_field, fielddef, schema);
90 if (fielddef.type()->base_type() == reflection::String) {
91 std::string esc;
92 flatbuffers::EscapeString(val.c_str(), val.length(), &esc, true);
93 val = esc;
94 }
95 s += fielddef.name()->str();
96 s += ": ";
97 s += val;
98 s += ", ";
99 }
100 s += "}";
101 }
102 return s;
103 } else {
104 return "(table)";
105 }
106 case reflection::Vector:
107 return "[(elements)]"; // TODO: implement this as well.
108 case reflection::Union:
109 return "(union)"; // TODO: implement this as well.
110 default: return NumToString(GetAnyValueI(type, data));
111 }
112 }
113
SetAnyValueI(reflection::BaseType type,uint8_t * data,int64_t val)114 void SetAnyValueI(reflection::BaseType type, uint8_t *data, int64_t val) {
115 # define FLATBUFFERS_SET(T) WriteScalar(data, static_cast<T>(val))
116 switch (type) {
117 case reflection::UType:
118 case reflection::Bool:
119 case reflection::UByte: FLATBUFFERS_SET(uint8_t ); break;
120 case reflection::Byte: FLATBUFFERS_SET(int8_t ); break;
121 case reflection::Short: FLATBUFFERS_SET(int16_t ); break;
122 case reflection::UShort: FLATBUFFERS_SET(uint16_t); break;
123 case reflection::Int: FLATBUFFERS_SET(int32_t ); break;
124 case reflection::UInt: FLATBUFFERS_SET(uint32_t); break;
125 case reflection::Long: FLATBUFFERS_SET(int64_t ); break;
126 case reflection::ULong: FLATBUFFERS_SET(uint64_t); break;
127 case reflection::Float: FLATBUFFERS_SET(float ); break;
128 case reflection::Double: FLATBUFFERS_SET(double ); break;
129 // TODO: support strings
130 default: break;
131 }
132 # undef FLATBUFFERS_SET
133 }
134
SetAnyValueF(reflection::BaseType type,uint8_t * data,double val)135 void SetAnyValueF(reflection::BaseType type, uint8_t *data, double val) {
136 switch (type) {
137 case reflection::Float: WriteScalar(data, static_cast<float>(val)); break;
138 case reflection::Double: WriteScalar(data, val); break;
139 // TODO: support strings.
140 default: SetAnyValueI(type, data, static_cast<int64_t>(val)); break;
141 }
142 }
143
SetAnyValueS(reflection::BaseType type,uint8_t * data,const char * val)144 void SetAnyValueS(reflection::BaseType type, uint8_t *data, const char *val) {
145 switch (type) {
146 case reflection::Float:
147 case reflection::Double:
148 SetAnyValueF(type, data, strtod(val, nullptr));
149 break;
150 // TODO: support strings.
151 default: SetAnyValueI(type, data, StringToInt(val)); break;
152 }
153 }
154
155 // Resize a FlatBuffer in-place by iterating through all offsets in the buffer
156 // and adjusting them by "delta" if they straddle the start offset.
157 // Once that is done, bytes can now be inserted/deleted safely.
158 // "delta" may be negative (shrinking).
159 // Unless "delta" is a multiple of the largest alignment, you'll create a small
160 // amount of garbage space in the buffer (usually 0..7 bytes).
161 // If your FlatBuffer's root table is not the schema's root table, you should
162 // pass in your root_table type as well.
163 class ResizeContext {
164 public:
ResizeContext(const reflection::Schema & schema,uoffset_t start,int delta,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table=nullptr)165 ResizeContext(const reflection::Schema &schema, uoffset_t start, int delta,
166 std::vector<uint8_t> *flatbuf,
167 const reflection::Object *root_table = nullptr)
168 : schema_(schema), startptr_(vector_data(*flatbuf) + start),
169 delta_(delta), buf_(*flatbuf),
170 dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
171 auto mask = static_cast<int>(sizeof(largest_scalar_t) - 1);
172 delta_ = (delta_ + mask) & ~mask;
173 if (!delta_) return; // We can't shrink by less than largest_scalar_t.
174 // Now change all the offsets by delta_.
175 auto root = GetAnyRoot(vector_data(buf_));
176 Straddle<uoffset_t, 1>(vector_data(buf_), root, vector_data(buf_));
177 ResizeTable(root_table ? *root_table : *schema.root_table(), root);
178 // We can now add or remove bytes at start.
179 if (delta_ > 0) buf_.insert(buf_.begin() + start, delta_, 0);
180 else buf_.erase(buf_.begin() + start, buf_.begin() + start - delta_);
181 }
182
183 // Check if the range between first (lower address) and second straddles
184 // the insertion point. If it does, change the offset at offsetloc (of
185 // type T, with direction D).
Straddle(const void * first,const void * second,void * offsetloc)186 template<typename T, int D> void Straddle(const void *first,
187 const void *second,
188 void *offsetloc) {
189 if (first <= startptr_ && second >= startptr_) {
190 WriteScalar<T>(offsetloc, ReadScalar<T>(offsetloc) + delta_ * D);
191 DagCheck(offsetloc) = true;
192 }
193 }
194
195 // This returns a boolean that records if the corresponding offset location
196 // has been modified already. If so, we can't even read the corresponding
197 // offset, since it is pointing to a location that is illegal until the
198 // resize actually happens.
199 // This must be checked for every offset, since we can't know which offsets
200 // will straddle and which won't.
DagCheck(const void * offsetloc)201 uint8_t &DagCheck(const void *offsetloc) {
202 auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
203 reinterpret_cast<const uoffset_t *>(vector_data(buf_));
204 return dag_check_[dag_idx];
205 }
206
ResizeTable(const reflection::Object & objectdef,Table * table)207 void ResizeTable(const reflection::Object &objectdef, Table *table) {
208 if (DagCheck(table))
209 return; // Table already visited.
210 auto vtable = table->GetVTable();
211 // Early out: since all fields inside the table must point forwards in
212 // memory, if the insertion point is before the table we can stop here.
213 auto tableloc = reinterpret_cast<uint8_t *>(table);
214 if (startptr_ <= tableloc) {
215 // Check if insertion point is between the table and a vtable that
216 // precedes it. This can't happen in current construction code, but check
217 // just in case we ever change the way flatbuffers are built.
218 Straddle<soffset_t, -1>(vtable, table, table);
219 } else {
220 // Check each field.
221 auto fielddefs = objectdef.fields();
222 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
223 auto &fielddef = **it;
224 auto base_type = fielddef.type()->base_type();
225 // Ignore scalars.
226 if (base_type <= reflection::Double) continue;
227 // Ignore fields that are not stored.
228 auto offset = table->GetOptionalFieldOffset(fielddef.offset());
229 if (!offset) continue;
230 // Ignore structs.
231 auto subobjectdef = base_type == reflection::Obj ?
232 schema_.objects()->Get(fielddef.type()->index()) : nullptr;
233 if (subobjectdef && subobjectdef->is_struct()) continue;
234 // Get this fields' offset, and read it if safe.
235 auto offsetloc = tableloc + offset;
236 if (DagCheck(offsetloc))
237 continue; // This offset already visited.
238 auto ref = offsetloc + ReadScalar<uoffset_t>(offsetloc);
239 Straddle<uoffset_t, 1>(offsetloc, ref, offsetloc);
240 // Recurse.
241 switch (base_type) {
242 case reflection::Obj: {
243 ResizeTable(*subobjectdef, reinterpret_cast<Table *>(ref));
244 break;
245 }
246 case reflection::Vector: {
247 auto elem_type = fielddef.type()->element();
248 if (elem_type != reflection::Obj && elem_type != reflection::String)
249 break;
250 auto vec = reinterpret_cast<Vector<uoffset_t> *>(ref);
251 auto elemobjectdef = elem_type == reflection::Obj
252 ? schema_.objects()->Get(fielddef.type()->index())
253 : nullptr;
254 if (elemobjectdef && elemobjectdef->is_struct()) break;
255 for (uoffset_t i = 0; i < vec->size(); i++) {
256 auto loc = vec->Data() + i * sizeof(uoffset_t);
257 if (DagCheck(loc))
258 continue; // This offset already visited.
259 auto dest = loc + vec->Get(i);
260 Straddle<uoffset_t, 1>(loc, dest ,loc);
261 if (elemobjectdef)
262 ResizeTable(*elemobjectdef, reinterpret_cast<Table *>(dest));
263 }
264 break;
265 }
266 case reflection::Union: {
267 ResizeTable(GetUnionType(schema_, objectdef, fielddef, *table),
268 reinterpret_cast<Table *>(ref));
269 break;
270 }
271 case reflection::String:
272 break;
273 default:
274 assert(false);
275 }
276 }
277 // Check if the vtable offset points beyond the insertion point.
278 // Must do this last, since GetOptionalFieldOffset above still reads
279 // this value.
280 Straddle<soffset_t, -1>(table, vtable, table);
281 }
282 }
283
284 void operator=(const ResizeContext &rc);
285
286 private:
287 const reflection::Schema &schema_;
288 uint8_t *startptr_;
289 int delta_;
290 std::vector<uint8_t> &buf_;
291 std::vector<uint8_t> dag_check_;
292 };
293
SetString(const reflection::Schema & schema,const std::string & val,const String * str,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)294 void SetString(const reflection::Schema &schema, const std::string &val,
295 const String *str, std::vector<uint8_t> *flatbuf,
296 const reflection::Object *root_table) {
297 auto delta = static_cast<int>(val.size()) - static_cast<int>(str->Length());
298 auto str_start = static_cast<uoffset_t>(
299 reinterpret_cast<const uint8_t *>(str) - vector_data(*flatbuf));
300 auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
301 if (delta) {
302 // Clear the old string, since we don't want parts of it remaining.
303 memset(vector_data(*flatbuf) + start, 0, str->Length());
304 // Different size, we must expand (or contract).
305 ResizeContext(schema, start, delta, flatbuf, root_table);
306 // Set the new length.
307 WriteScalar(vector_data(*flatbuf) + str_start,
308 static_cast<uoffset_t>(val.size()));
309 }
310 // Copy new data. Safe because we created the right amount of space.
311 memcpy(vector_data(*flatbuf) + start, val.c_str(), val.size() + 1);
312 }
313
ResizeAnyVector(const reflection::Schema & schema,uoffset_t newsize,const VectorOfAny * vec,uoffset_t num_elems,uoffset_t elem_size,std::vector<uint8_t> * flatbuf,const reflection::Object * root_table)314 uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
315 const VectorOfAny *vec, uoffset_t num_elems,
316 uoffset_t elem_size, std::vector<uint8_t> *flatbuf,
317 const reflection::Object *root_table) {
318 auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
319 auto delta_bytes = delta_elem * static_cast<int>(elem_size);
320 auto vec_start = reinterpret_cast<const uint8_t *>(vec) -
321 vector_data(*flatbuf);
322 auto start = static_cast<uoffset_t>(vec_start + sizeof(uoffset_t) +
323 elem_size * num_elems);
324 if (delta_bytes) {
325 if (delta_elem < 0) {
326 // Clear elements we're throwing away, since some might remain in the
327 // buffer.
328 auto size_clear = -delta_elem * elem_size;
329 memset(vector_data(*flatbuf) + start - size_clear, 0, size_clear);
330 }
331 ResizeContext(schema, start, delta_bytes, flatbuf, root_table);
332 WriteScalar(vector_data(*flatbuf) + vec_start, newsize); // Length field.
333 // Set new elements to 0.. this can be overwritten by the caller.
334 if (delta_elem > 0) {
335 memset(vector_data(*flatbuf) + start, 0, delta_elem * elem_size);
336 }
337 }
338 return vector_data(*flatbuf) + start;
339 }
340
AddFlatBuffer(std::vector<uint8_t> & flatbuf,const uint8_t * newbuf,size_t newlen)341 const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
342 const uint8_t *newbuf, size_t newlen) {
343 // Align to sizeof(uoffset_t) past sizeof(largest_scalar_t) since we're
344 // going to chop off the root offset.
345 while ((flatbuf.size() & (sizeof(uoffset_t) - 1)) ||
346 !(flatbuf.size() & (sizeof(largest_scalar_t) - 1))) {
347 flatbuf.push_back(0);
348 }
349 auto insertion_point = static_cast<uoffset_t>(flatbuf.size());
350 // Insert the entire FlatBuffer minus the root pointer.
351 flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
352 auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
353 return vector_data(flatbuf) + insertion_point + root_offset;
354 }
355
CopyInline(FlatBufferBuilder & fbb,const reflection::Field & fielddef,const Table & table,size_t align,size_t size)356 void CopyInline(FlatBufferBuilder &fbb, const reflection::Field &fielddef,
357 const Table &table, size_t align, size_t size) {
358 fbb.Align(align);
359 fbb.PushBytes(table.GetStruct<const uint8_t *>(fielddef.offset()), size);
360 fbb.TrackField(fielddef.offset(), fbb.GetSize());
361 }
362
CopyTable(FlatBufferBuilder & fbb,const reflection::Schema & schema,const reflection::Object & objectdef,const Table & table,bool use_string_pooling)363 Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
364 const reflection::Schema &schema,
365 const reflection::Object &objectdef,
366 const Table &table,
367 bool use_string_pooling) {
368 // Before we can construct the table, we have to first generate any
369 // subobjects, and collect their offsets.
370 std::vector<uoffset_t> offsets;
371 auto fielddefs = objectdef.fields();
372 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
373 auto &fielddef = **it;
374 // Skip if field is not present in the source.
375 if (!table.CheckField(fielddef.offset())) continue;
376 uoffset_t offset = 0;
377 switch (fielddef.type()->base_type()) {
378 case reflection::String: {
379 offset = use_string_pooling
380 ? fbb.CreateSharedString(GetFieldS(table, fielddef)).o
381 : fbb.CreateString(GetFieldS(table, fielddef)).o;
382 break;
383 }
384 case reflection::Obj: {
385 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
386 if (!subobjectdef.is_struct()) {
387 offset = CopyTable(fbb, schema, subobjectdef,
388 *GetFieldT(table, fielddef)).o;
389 }
390 break;
391 }
392 case reflection::Union: {
393 auto &subobjectdef = GetUnionType(schema, objectdef, fielddef, table);
394 offset = CopyTable(fbb, schema, subobjectdef,
395 *GetFieldT(table, fielddef)).o;
396 break;
397 }
398 case reflection::Vector: {
399 auto vec = table.GetPointer<const Vector<Offset<Table>> *>(
400 fielddef.offset());
401 auto element_base_type = fielddef.type()->element();
402 auto elemobjectdef = element_base_type == reflection::Obj
403 ? schema.objects()->Get(fielddef.type()->index())
404 : nullptr;
405 switch (element_base_type) {
406 case reflection::String: {
407 std::vector<Offset<const String *>> elements(vec->size());
408 auto vec_s = reinterpret_cast<const Vector<Offset<String>> *>(vec);
409 for (uoffset_t i = 0; i < vec_s->size(); i++) {
410 elements[i] = use_string_pooling
411 ? fbb.CreateSharedString(vec_s->Get(i)).o
412 : fbb.CreateString(vec_s->Get(i)).o;
413 }
414 offset = fbb.CreateVector(elements).o;
415 break;
416 }
417 case reflection::Obj: {
418 if (!elemobjectdef->is_struct()) {
419 std::vector<Offset<const Table *>> elements(vec->size());
420 for (uoffset_t i = 0; i < vec->size(); i++) {
421 elements[i] =
422 CopyTable(fbb, schema, *elemobjectdef, *vec->Get(i));
423 }
424 offset = fbb.CreateVector(elements).o;
425 break;
426 }
427 }
428 // FALL-THRU
429 default: { // Scalars and structs.
430 auto element_size = GetTypeSize(element_base_type);
431 if (elemobjectdef && elemobjectdef->is_struct())
432 element_size = elemobjectdef->bytesize();
433 fbb.StartVector(element_size, vec->size());
434 fbb.PushBytes(vec->Data(), element_size * vec->size());
435 offset = fbb.EndVector(vec->size());
436 break;
437 }
438 }
439 break;
440 }
441 default: // Scalars.
442 break;
443 }
444 if (offset) {
445 offsets.push_back(offset);
446 }
447 }
448 // Now we can build the actual table from either offsets or scalar data.
449 auto start = objectdef.is_struct()
450 ? fbb.StartStruct(objectdef.minalign())
451 : fbb.StartTable();
452 size_t offset_idx = 0;
453 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
454 auto &fielddef = **it;
455 if (!table.CheckField(fielddef.offset())) continue;
456 auto base_type = fielddef.type()->base_type();
457 switch (base_type) {
458 case reflection::Obj: {
459 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
460 if (subobjectdef.is_struct()) {
461 CopyInline(fbb, fielddef, table, subobjectdef.minalign(),
462 subobjectdef.bytesize());
463 break;
464 }
465 }
466 // ELSE FALL-THRU
467 case reflection::Union:
468 case reflection::String:
469 case reflection::Vector:
470 fbb.AddOffset(fielddef.offset(), Offset<void>(offsets[offset_idx++]));
471 break;
472 default: { // Scalars.
473 auto size = GetTypeSize(base_type);
474 CopyInline(fbb, fielddef, table, size, size);
475 break;
476 }
477 }
478 }
479 assert(offset_idx == offsets.size());
480 if (objectdef.is_struct()) {
481 fbb.ClearOffsets();
482 return fbb.EndStruct();
483 } else {
484 return fbb.EndTable(start);
485 }
486 }
487
VerifyStruct(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)488 bool VerifyStruct(flatbuffers::Verifier &v,
489 const flatbuffers::Table &parent_table,
490 voffset_t field_offset,
491 const reflection::Object &obj,
492 bool required) {
493 auto offset = parent_table.GetOptionalFieldOffset(field_offset);
494 if (required && !offset) {
495 return false;
496 }
497
498 return !offset || v.Verify(reinterpret_cast<const uint8_t*>(&parent_table)
499 + offset, obj.bytesize());
500 }
501
VerifyVectorOfStructs(flatbuffers::Verifier & v,const flatbuffers::Table & parent_table,voffset_t field_offset,const reflection::Object & obj,bool required)502 bool VerifyVectorOfStructs(flatbuffers::Verifier &v,
503 const flatbuffers::Table &parent_table,
504 voffset_t field_offset,
505 const reflection::Object &obj,
506 bool required) {
507 auto p = parent_table.GetPointer<const uint8_t*>(field_offset);
508 const uint8_t* end;
509 if (required && !p) {
510 return false;
511 }
512
513 return !p || v.VerifyVector(p, obj.bytesize(), &end);
514 }
515
516 // forward declare to resolve cyclic deps between VerifyObject and VerifyVector
517 bool VerifyObject(flatbuffers::Verifier &v,
518 const reflection::Schema &schema,
519 const reflection::Object &obj,
520 const flatbuffers::Table *table,
521 bool isRequired);
522
VerifyVector(flatbuffers::Verifier & v,const reflection::Schema & schema,const flatbuffers::Table & table,const reflection::Field & vec_field)523 bool VerifyVector(flatbuffers::Verifier &v,
524 const reflection::Schema &schema,
525 const flatbuffers::Table &table,
526 const reflection::Field &vec_field) {
527 assert(vec_field.type()->base_type() == reflection::Vector);
528 if (!table.VerifyField<uoffset_t>(v, vec_field.offset()))
529 return false;
530
531 switch (vec_field.type()->element()) {
532 case reflection::None:
533 assert(false);
534 break;
535 case reflection::UType:
536 return v.Verify(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
537 case reflection::Bool:
538 case reflection::Byte:
539 case reflection::UByte:
540 return v.Verify(flatbuffers::GetFieldV<int8_t>(table, vec_field));
541 case reflection::Short:
542 case reflection::UShort:
543 return v.Verify(flatbuffers::GetFieldV<int16_t>(table, vec_field));
544 case reflection::Int:
545 case reflection::UInt:
546 return v.Verify(flatbuffers::GetFieldV<int32_t>(table, vec_field));
547 case reflection::Long:
548 case reflection::ULong:
549 return v.Verify(flatbuffers::GetFieldV<int64_t>(table, vec_field));
550 case reflection::Float:
551 return v.Verify(flatbuffers::GetFieldV<float>(table, vec_field));
552 case reflection::Double:
553 return v.Verify(flatbuffers::GetFieldV<double>(table, vec_field));
554 case reflection::String: {
555 auto vecString =
556 flatbuffers::GetFieldV<flatbuffers::
557 Offset<flatbuffers::String>>(table, vec_field);
558 if (v.Verify(vecString) && v.VerifyVectorOfStrings(vecString)) {
559 return true;
560 } else {
561 return false;
562 }
563 }
564 case reflection::Vector:
565 assert(false);
566 break;
567 case reflection::Obj: {
568 auto obj = schema.objects()->Get(vec_field.type()->index());
569 if (obj->is_struct()) {
570 if (!VerifyVectorOfStructs(v, table, vec_field.offset(), *obj,
571 vec_field.required())) {
572 return false;
573 }
574 } else {
575 auto vec =
576 flatbuffers::GetFieldV<flatbuffers::
577 Offset<flatbuffers::Table>>(table, vec_field);
578 if (!v.Verify(vec))
579 return false;
580 if (vec) {
581 for (uoffset_t j = 0; j < vec->size(); j++) {
582 if (!VerifyObject(v, schema, *obj, vec->Get(j), true)) {
583 return false;
584 }
585 }
586 }
587 }
588 return true;
589 }
590 case reflection::Union:
591 assert(false);
592 break;
593 default:
594 assert(false);
595 break;
596 }
597
598 return false;
599 }
600
VerifyObject(flatbuffers::Verifier & v,const reflection::Schema & schema,const reflection::Object & obj,const flatbuffers::Table * table,bool required)601 bool VerifyObject(flatbuffers::Verifier &v,
602 const reflection::Schema &schema,
603 const reflection::Object &obj,
604 const flatbuffers::Table *table,
605 bool required) {
606 if (!table) {
607 if (!required)
608 return true;
609 else
610 return false;
611 }
612
613 if (!table->VerifyTableStart(v))
614 return false;
615
616 for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
617 auto field_def = obj.fields()->Get(i);
618 switch (field_def->type()->base_type()) {
619 case reflection::None:
620 assert(false);
621 break;
622 case reflection::UType:
623 if (!table->VerifyField<uint8_t>(v, field_def->offset()))
624 return false;
625 break;
626 case reflection::Bool:
627 case reflection::Byte:
628 case reflection::UByte:
629 if (!table->VerifyField<int8_t>(v, field_def->offset()))
630 return false;
631 break;
632 case reflection::Short:
633 case reflection::UShort:
634 if (!table->VerifyField<int16_t>(v, field_def->offset()))
635 return false;
636 break;
637 case reflection::Int:
638 case reflection::UInt:
639 if (!table->VerifyField<int32_t>(v, field_def->offset()))
640 return false;
641 break;
642 case reflection::Long:
643 case reflection::ULong:
644 if (!table->VerifyField<int64_t>(v, field_def->offset()))
645 return false;
646 break;
647 case reflection::Float:
648 if (!table->VerifyField<float>(v, field_def->offset()))
649 return false;
650 break;
651 case reflection::Double:
652 if (!table->VerifyField<double>(v, field_def->offset()))
653 return false;
654 break;
655 case reflection::String:
656 if (!table->VerifyField<uoffset_t>(v, field_def->offset()) ||
657 !v.Verify(flatbuffers::GetFieldS(*table, *field_def))) {
658 return false;
659 }
660 break;
661 case reflection::Vector:
662 if (!VerifyVector(v, schema, *table, *field_def))
663 return false;
664 break;
665 case reflection::Obj: {
666 auto child_obj = schema.objects()->Get(field_def->type()->index());
667 if (child_obj->is_struct()) {
668 if (!VerifyStruct(v, *table, field_def->offset(), *child_obj,
669 field_def->required())) {
670 return false;
671 }
672 } else {
673 if (!VerifyObject(v, schema, *child_obj,
674 flatbuffers::GetFieldT(*table, *field_def),
675 field_def->required())) {
676 return false;
677 }
678 }
679 break;
680 }
681 case reflection::Union: {
682 // get union type from the prev field
683 voffset_t utype_offset = field_def->offset() - sizeof(voffset_t);
684 auto utype = table->GetField<uint8_t>(utype_offset, 0);
685 if (utype != 0) {
686 // Means we have this union field present
687 auto fb_enum = schema.enums()->Get(field_def->type()->index());
688 auto child_obj = fb_enum->values()->Get(utype)->object();
689 if (!VerifyObject(v, schema, *child_obj,
690 flatbuffers::GetFieldT(*table, *field_def),
691 field_def->required())) {
692 return false;
693 }
694 }
695 break;
696 }
697 default:
698 assert(false);
699 break;
700 }
701 }
702
703 if (!v.EndTable())
704 return false;
705
706 return true;
707 }
708
Verify(const reflection::Schema & schema,const reflection::Object & root,const uint8_t * buf,size_t length)709 bool Verify(const reflection::Schema &schema,
710 const reflection::Object &root,
711 const uint8_t *buf,
712 size_t length) {
713 Verifier v(buf, length);
714 return VerifyObject(v, schema, root, flatbuffers::GetAnyRoot(buf), true);
715 }
716
717 } // namespace flatbuffers
718