1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 // automatically generated by the FlatBuffers compiler, do not modify
16
17
18 #ifndef FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
19 #define FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
20
21 #include "flatbuffers/flatbuffers.h"
22
23 namespace tflite {
24
25 struct ComputeSettings;
26 struct ComputeSettingsT;
27
28 struct NNAPISettings;
29 struct NNAPISettingsT;
30
31 struct GPUSettings;
32 struct GPUSettingsT;
33
34 struct HexagonSettings;
35 struct HexagonSettingsT;
36
37 struct XNNPackSettings;
38 struct XNNPackSettingsT;
39
40 struct EdgeTpuDeviceSpec;
41 struct EdgeTpuDeviceSpecT;
42
43 struct EdgeTpuInactivePowerConfig;
44 struct EdgeTpuInactivePowerConfigT;
45
46 struct EdgeTpuSettings;
47 struct EdgeTpuSettingsT;
48
49 struct CoralSettings;
50 struct CoralSettingsT;
51
52 struct CPUSettings;
53 struct CPUSettingsT;
54
55 struct TFLiteSettings;
56 struct TFLiteSettingsT;
57
58 struct FallbackSettings;
59 struct FallbackSettingsT;
60
61 struct BenchmarkMetric;
62 struct BenchmarkMetricT;
63
64 struct BenchmarkResult;
65 struct BenchmarkResultT;
66
67 struct ErrorCode;
68 struct ErrorCodeT;
69
70 struct BenchmarkError;
71 struct BenchmarkErrorT;
72
73 struct BenchmarkEvent;
74 struct BenchmarkEventT;
75
76 enum ExecutionPreference {
77 ExecutionPreference_ANY = 0,
78 ExecutionPreference_LOW_LATENCY = 1,
79 ExecutionPreference_LOW_POWER = 2,
80 ExecutionPreference_FORCE_CPU = 3,
81 ExecutionPreference_MIN = ExecutionPreference_ANY,
82 ExecutionPreference_MAX = ExecutionPreference_FORCE_CPU
83 };
84
EnumValuesExecutionPreference()85 inline const ExecutionPreference (&EnumValuesExecutionPreference())[4] {
86 static const ExecutionPreference values[] = {
87 ExecutionPreference_ANY,
88 ExecutionPreference_LOW_LATENCY,
89 ExecutionPreference_LOW_POWER,
90 ExecutionPreference_FORCE_CPU
91 };
92 return values;
93 }
94
EnumNamesExecutionPreference()95 inline const char * const *EnumNamesExecutionPreference() {
96 static const char * const names[5] = {
97 "ANY",
98 "LOW_LATENCY",
99 "LOW_POWER",
100 "FORCE_CPU",
101 nullptr
102 };
103 return names;
104 }
105
EnumNameExecutionPreference(ExecutionPreference e)106 inline const char *EnumNameExecutionPreference(ExecutionPreference e) {
107 if (flatbuffers::IsOutRange(e, ExecutionPreference_ANY, ExecutionPreference_FORCE_CPU)) return "";
108 const size_t index = static_cast<size_t>(e);
109 return EnumNamesExecutionPreference()[index];
110 }
111
112 enum Delegate {
113 Delegate_NONE = 0,
114 Delegate_NNAPI = 1,
115 Delegate_GPU = 2,
116 Delegate_HEXAGON = 3,
117 Delegate_XNNPACK = 4,
118 Delegate_EDGETPU = 5,
119 Delegate_EDGETPU_CORAL = 6,
120 Delegate_MIN = Delegate_NONE,
121 Delegate_MAX = Delegate_EDGETPU_CORAL
122 };
123
EnumValuesDelegate()124 inline const Delegate (&EnumValuesDelegate())[7] {
125 static const Delegate values[] = {
126 Delegate_NONE,
127 Delegate_NNAPI,
128 Delegate_GPU,
129 Delegate_HEXAGON,
130 Delegate_XNNPACK,
131 Delegate_EDGETPU,
132 Delegate_EDGETPU_CORAL
133 };
134 return values;
135 }
136
EnumNamesDelegate()137 inline const char * const *EnumNamesDelegate() {
138 static const char * const names[8] = {
139 "NONE",
140 "NNAPI",
141 "GPU",
142 "HEXAGON",
143 "XNNPACK",
144 "EDGETPU",
145 "EDGETPU_CORAL",
146 nullptr
147 };
148 return names;
149 }
150
EnumNameDelegate(Delegate e)151 inline const char *EnumNameDelegate(Delegate e) {
152 if (flatbuffers::IsOutRange(e, Delegate_NONE, Delegate_EDGETPU_CORAL)) return "";
153 const size_t index = static_cast<size_t>(e);
154 return EnumNamesDelegate()[index];
155 }
156
157 enum NNAPIExecutionPreference {
158 NNAPIExecutionPreference_UNDEFINED = 0,
159 NNAPIExecutionPreference_NNAPI_LOW_POWER = 1,
160 NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER = 2,
161 NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED = 3,
162 NNAPIExecutionPreference_MIN = NNAPIExecutionPreference_UNDEFINED,
163 NNAPIExecutionPreference_MAX = NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
164 };
165
EnumValuesNNAPIExecutionPreference()166 inline const NNAPIExecutionPreference (&EnumValuesNNAPIExecutionPreference())[4] {
167 static const NNAPIExecutionPreference values[] = {
168 NNAPIExecutionPreference_UNDEFINED,
169 NNAPIExecutionPreference_NNAPI_LOW_POWER,
170 NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER,
171 NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
172 };
173 return values;
174 }
175
EnumNamesNNAPIExecutionPreference()176 inline const char * const *EnumNamesNNAPIExecutionPreference() {
177 static const char * const names[5] = {
178 "UNDEFINED",
179 "NNAPI_LOW_POWER",
180 "NNAPI_FAST_SINGLE_ANSWER",
181 "NNAPI_SUSTAINED_SPEED",
182 nullptr
183 };
184 return names;
185 }
186
EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e)187 inline const char *EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e) {
188 if (flatbuffers::IsOutRange(e, NNAPIExecutionPreference_UNDEFINED, NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED)) return "";
189 const size_t index = static_cast<size_t>(e);
190 return EnumNamesNNAPIExecutionPreference()[index];
191 }
192
193 enum NNAPIExecutionPriority {
194 NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED = 0,
195 NNAPIExecutionPriority_NNAPI_PRIORITY_LOW = 1,
196 NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM = 2,
197 NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH = 3,
198 NNAPIExecutionPriority_MIN = NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
199 NNAPIExecutionPriority_MAX = NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
200 };
201
EnumValuesNNAPIExecutionPriority()202 inline const NNAPIExecutionPriority (&EnumValuesNNAPIExecutionPriority())[4] {
203 static const NNAPIExecutionPriority values[] = {
204 NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
205 NNAPIExecutionPriority_NNAPI_PRIORITY_LOW,
206 NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM,
207 NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
208 };
209 return values;
210 }
211
EnumNamesNNAPIExecutionPriority()212 inline const char * const *EnumNamesNNAPIExecutionPriority() {
213 static const char * const names[5] = {
214 "NNAPI_PRIORITY_UNDEFINED",
215 "NNAPI_PRIORITY_LOW",
216 "NNAPI_PRIORITY_MEDIUM",
217 "NNAPI_PRIORITY_HIGH",
218 nullptr
219 };
220 return names;
221 }
222
EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e)223 inline const char *EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e) {
224 if (flatbuffers::IsOutRange(e, NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED, NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH)) return "";
225 const size_t index = static_cast<size_t>(e);
226 return EnumNamesNNAPIExecutionPriority()[index];
227 }
228
229 enum GPUBackend {
230 GPUBackend_UNSET = 0,
231 GPUBackend_OPENCL = 1,
232 GPUBackend_OPENGL = 2,
233 GPUBackend_MIN = GPUBackend_UNSET,
234 GPUBackend_MAX = GPUBackend_OPENGL
235 };
236
EnumValuesGPUBackend()237 inline const GPUBackend (&EnumValuesGPUBackend())[3] {
238 static const GPUBackend values[] = {
239 GPUBackend_UNSET,
240 GPUBackend_OPENCL,
241 GPUBackend_OPENGL
242 };
243 return values;
244 }
245
EnumNamesGPUBackend()246 inline const char * const *EnumNamesGPUBackend() {
247 static const char * const names[4] = {
248 "UNSET",
249 "OPENCL",
250 "OPENGL",
251 nullptr
252 };
253 return names;
254 }
255
EnumNameGPUBackend(GPUBackend e)256 inline const char *EnumNameGPUBackend(GPUBackend e) {
257 if (flatbuffers::IsOutRange(e, GPUBackend_UNSET, GPUBackend_OPENGL)) return "";
258 const size_t index = static_cast<size_t>(e);
259 return EnumNamesGPUBackend()[index];
260 }
261
262 namespace EdgeTpuDeviceSpec_ {
263
264 enum PlatformType {
265 PlatformType_MMIO = 0,
266 PlatformType_REFERENCE = 1,
267 PlatformType_SIMULATOR = 2,
268 PlatformType_REMOTE_SIMULATOR = 3,
269 PlatformType_MIN = PlatformType_MMIO,
270 PlatformType_MAX = PlatformType_REMOTE_SIMULATOR
271 };
272
EnumValuesPlatformType()273 inline const PlatformType (&EnumValuesPlatformType())[4] {
274 static const PlatformType values[] = {
275 PlatformType_MMIO,
276 PlatformType_REFERENCE,
277 PlatformType_SIMULATOR,
278 PlatformType_REMOTE_SIMULATOR
279 };
280 return values;
281 }
282
EnumNamesPlatformType()283 inline const char * const *EnumNamesPlatformType() {
284 static const char * const names[5] = {
285 "MMIO",
286 "REFERENCE",
287 "SIMULATOR",
288 "REMOTE_SIMULATOR",
289 nullptr
290 };
291 return names;
292 }
293
EnumNamePlatformType(PlatformType e)294 inline const char *EnumNamePlatformType(PlatformType e) {
295 if (flatbuffers::IsOutRange(e, PlatformType_MMIO, PlatformType_REMOTE_SIMULATOR)) return "";
296 const size_t index = static_cast<size_t>(e);
297 return EnumNamesPlatformType()[index];
298 }
299
300 } // namespace EdgeTpuDeviceSpec_
301
302 enum EdgeTpuPowerState {
303 EdgeTpuPowerState_UNDEFINED_POWERSTATE = 0,
304 EdgeTpuPowerState_TPU_CORE_OFF = 1,
305 EdgeTpuPowerState_READY = 2,
306 EdgeTpuPowerState_ACTIVE_MIN_POWER = 3,
307 EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER = 4,
308 EdgeTpuPowerState_ACTIVE_LOW_POWER = 5,
309 EdgeTpuPowerState_ACTIVE = 6,
310 EdgeTpuPowerState_OVER_DRIVE = 7,
311 EdgeTpuPowerState_MIN = EdgeTpuPowerState_UNDEFINED_POWERSTATE,
312 EdgeTpuPowerState_MAX = EdgeTpuPowerState_OVER_DRIVE
313 };
314
EnumValuesEdgeTpuPowerState()315 inline const EdgeTpuPowerState (&EnumValuesEdgeTpuPowerState())[8] {
316 static const EdgeTpuPowerState values[] = {
317 EdgeTpuPowerState_UNDEFINED_POWERSTATE,
318 EdgeTpuPowerState_TPU_CORE_OFF,
319 EdgeTpuPowerState_READY,
320 EdgeTpuPowerState_ACTIVE_MIN_POWER,
321 EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER,
322 EdgeTpuPowerState_ACTIVE_LOW_POWER,
323 EdgeTpuPowerState_ACTIVE,
324 EdgeTpuPowerState_OVER_DRIVE
325 };
326 return values;
327 }
328
EnumNamesEdgeTpuPowerState()329 inline const char * const *EnumNamesEdgeTpuPowerState() {
330 static const char * const names[9] = {
331 "UNDEFINED_POWERSTATE",
332 "TPU_CORE_OFF",
333 "READY",
334 "ACTIVE_MIN_POWER",
335 "ACTIVE_VERY_LOW_POWER",
336 "ACTIVE_LOW_POWER",
337 "ACTIVE",
338 "OVER_DRIVE",
339 nullptr
340 };
341 return names;
342 }
343
EnumNameEdgeTpuPowerState(EdgeTpuPowerState e)344 inline const char *EnumNameEdgeTpuPowerState(EdgeTpuPowerState e) {
345 if (flatbuffers::IsOutRange(e, EdgeTpuPowerState_UNDEFINED_POWERSTATE, EdgeTpuPowerState_OVER_DRIVE)) return "";
346 const size_t index = static_cast<size_t>(e);
347 return EnumNamesEdgeTpuPowerState()[index];
348 }
349
350 namespace CoralSettings_ {
351
352 enum Performance {
353 Performance_UNDEFINED = 0,
354 Performance_MAXIMUM = 1,
355 Performance_HIGH = 2,
356 Performance_MEDIUM = 3,
357 Performance_LOW = 4,
358 Performance_MIN = Performance_UNDEFINED,
359 Performance_MAX = Performance_LOW
360 };
361
EnumValuesPerformance()362 inline const Performance (&EnumValuesPerformance())[5] {
363 static const Performance values[] = {
364 Performance_UNDEFINED,
365 Performance_MAXIMUM,
366 Performance_HIGH,
367 Performance_MEDIUM,
368 Performance_LOW
369 };
370 return values;
371 }
372
EnumNamesPerformance()373 inline const char * const *EnumNamesPerformance() {
374 static const char * const names[6] = {
375 "UNDEFINED",
376 "MAXIMUM",
377 "HIGH",
378 "MEDIUM",
379 "LOW",
380 nullptr
381 };
382 return names;
383 }
384
EnumNamePerformance(Performance e)385 inline const char *EnumNamePerformance(Performance e) {
386 if (flatbuffers::IsOutRange(e, Performance_UNDEFINED, Performance_LOW)) return "";
387 const size_t index = static_cast<size_t>(e);
388 return EnumNamesPerformance()[index];
389 }
390
391 } // namespace CoralSettings_
392
393 enum BenchmarkEventType {
394 BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE = 0,
395 BenchmarkEventType_START = 1,
396 BenchmarkEventType_END = 2,
397 BenchmarkEventType_ERROR = 3,
398 BenchmarkEventType_LOGGED = 4,
399 BenchmarkEventType_MIN = BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
400 BenchmarkEventType_MAX = BenchmarkEventType_LOGGED
401 };
402
EnumValuesBenchmarkEventType()403 inline const BenchmarkEventType (&EnumValuesBenchmarkEventType())[5] {
404 static const BenchmarkEventType values[] = {
405 BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
406 BenchmarkEventType_START,
407 BenchmarkEventType_END,
408 BenchmarkEventType_ERROR,
409 BenchmarkEventType_LOGGED
410 };
411 return values;
412 }
413
EnumNamesBenchmarkEventType()414 inline const char * const *EnumNamesBenchmarkEventType() {
415 static const char * const names[6] = {
416 "UNDEFINED_BENCHMARK_EVENT_TYPE",
417 "START",
418 "END",
419 "ERROR",
420 "LOGGED",
421 nullptr
422 };
423 return names;
424 }
425
EnumNameBenchmarkEventType(BenchmarkEventType e)426 inline const char *EnumNameBenchmarkEventType(BenchmarkEventType e) {
427 if (flatbuffers::IsOutRange(e, BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE, BenchmarkEventType_LOGGED)) return "";
428 const size_t index = static_cast<size_t>(e);
429 return EnumNamesBenchmarkEventType()[index];
430 }
431
432 enum BenchmarkStage {
433 BenchmarkStage_UNKNOWN = 0,
434 BenchmarkStage_INITIALIZATION = 1,
435 BenchmarkStage_INFERENCE = 2,
436 BenchmarkStage_MIN = BenchmarkStage_UNKNOWN,
437 BenchmarkStage_MAX = BenchmarkStage_INFERENCE
438 };
439
EnumValuesBenchmarkStage()440 inline const BenchmarkStage (&EnumValuesBenchmarkStage())[3] {
441 static const BenchmarkStage values[] = {
442 BenchmarkStage_UNKNOWN,
443 BenchmarkStage_INITIALIZATION,
444 BenchmarkStage_INFERENCE
445 };
446 return values;
447 }
448
EnumNamesBenchmarkStage()449 inline const char * const *EnumNamesBenchmarkStage() {
450 static const char * const names[4] = {
451 "UNKNOWN",
452 "INITIALIZATION",
453 "INFERENCE",
454 nullptr
455 };
456 return names;
457 }
458
EnumNameBenchmarkStage(BenchmarkStage e)459 inline const char *EnumNameBenchmarkStage(BenchmarkStage e) {
460 if (flatbuffers::IsOutRange(e, BenchmarkStage_UNKNOWN, BenchmarkStage_INFERENCE)) return "";
461 const size_t index = static_cast<size_t>(e);
462 return EnumNamesBenchmarkStage()[index];
463 }
464
465 struct ComputeSettingsT : public flatbuffers::NativeTable {
466 typedef ComputeSettings TableType;
467 tflite::ExecutionPreference preference;
468 std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings;
469 std::string model_namespace_for_statistics;
470 std::string model_identifier_for_statistics;
ComputeSettingsTComputeSettingsT471 ComputeSettingsT()
472 : preference(tflite::ExecutionPreference_ANY) {
473 }
474 };
475
476 struct ComputeSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
477 typedef ComputeSettingsT NativeTableType;
478 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
479 VT_PREFERENCE = 4,
480 VT_TFLITE_SETTINGS = 6,
481 VT_MODEL_NAMESPACE_FOR_STATISTICS = 8,
482 VT_MODEL_IDENTIFIER_FOR_STATISTICS = 10
483 };
preferenceFLATBUFFERS_FINAL_CLASS484 tflite::ExecutionPreference preference() const {
485 return static_cast<tflite::ExecutionPreference>(GetField<int32_t>(VT_PREFERENCE, 0));
486 }
tflite_settingsFLATBUFFERS_FINAL_CLASS487 const tflite::TFLiteSettings *tflite_settings() const {
488 return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
489 }
model_namespace_for_statisticsFLATBUFFERS_FINAL_CLASS490 const flatbuffers::String *model_namespace_for_statistics() const {
491 return GetPointer<const flatbuffers::String *>(VT_MODEL_NAMESPACE_FOR_STATISTICS);
492 }
model_identifier_for_statisticsFLATBUFFERS_FINAL_CLASS493 const flatbuffers::String *model_identifier_for_statistics() const {
494 return GetPointer<const flatbuffers::String *>(VT_MODEL_IDENTIFIER_FOR_STATISTICS);
495 }
VerifyFLATBUFFERS_FINAL_CLASS496 bool Verify(flatbuffers::Verifier &verifier) const {
497 return VerifyTableStart(verifier) &&
498 VerifyField<int32_t>(verifier, VT_PREFERENCE) &&
499 VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
500 verifier.VerifyTable(tflite_settings()) &&
501 VerifyOffset(verifier, VT_MODEL_NAMESPACE_FOR_STATISTICS) &&
502 verifier.VerifyString(model_namespace_for_statistics()) &&
503 VerifyOffset(verifier, VT_MODEL_IDENTIFIER_FOR_STATISTICS) &&
504 verifier.VerifyString(model_identifier_for_statistics()) &&
505 verifier.EndTable();
506 }
507 ComputeSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
508 void UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
509 static flatbuffers::Offset<ComputeSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
510 };
511
512 struct ComputeSettingsBuilder {
513 flatbuffers::FlatBufferBuilder &fbb_;
514 flatbuffers::uoffset_t start_;
add_preferenceComputeSettingsBuilder515 void add_preference(tflite::ExecutionPreference preference) {
516 fbb_.AddElement<int32_t>(ComputeSettings::VT_PREFERENCE, static_cast<int32_t>(preference), 0);
517 }
add_tflite_settingsComputeSettingsBuilder518 void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
519 fbb_.AddOffset(ComputeSettings::VT_TFLITE_SETTINGS, tflite_settings);
520 }
add_model_namespace_for_statisticsComputeSettingsBuilder521 void add_model_namespace_for_statistics(flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics) {
522 fbb_.AddOffset(ComputeSettings::VT_MODEL_NAMESPACE_FOR_STATISTICS, model_namespace_for_statistics);
523 }
add_model_identifier_for_statisticsComputeSettingsBuilder524 void add_model_identifier_for_statistics(flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics) {
525 fbb_.AddOffset(ComputeSettings::VT_MODEL_IDENTIFIER_FOR_STATISTICS, model_identifier_for_statistics);
526 }
ComputeSettingsBuilderComputeSettingsBuilder527 explicit ComputeSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
528 : fbb_(_fbb) {
529 start_ = fbb_.StartTable();
530 }
531 ComputeSettingsBuilder &operator=(const ComputeSettingsBuilder &);
FinishComputeSettingsBuilder532 flatbuffers::Offset<ComputeSettings> Finish() {
533 const auto end = fbb_.EndTable(start_);
534 auto o = flatbuffers::Offset<ComputeSettings>(end);
535 return o;
536 }
537 };
538
539 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(
540 flatbuffers::FlatBufferBuilder &_fbb,
541 tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
542 flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
543 flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics = 0,
544 flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics = 0) {
545 ComputeSettingsBuilder builder_(_fbb);
546 builder_.add_model_identifier_for_statistics(model_identifier_for_statistics);
547 builder_.add_model_namespace_for_statistics(model_namespace_for_statistics);
548 builder_.add_tflite_settings(tflite_settings);
549 builder_.add_preference(preference);
550 return builder_.Finish();
551 }
552
553 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettingsDirect(
554 flatbuffers::FlatBufferBuilder &_fbb,
555 tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
556 flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
557 const char *model_namespace_for_statistics = nullptr,
558 const char *model_identifier_for_statistics = nullptr) {
559 auto model_namespace_for_statistics__ = model_namespace_for_statistics ? _fbb.CreateString(model_namespace_for_statistics) : 0;
560 auto model_identifier_for_statistics__ = model_identifier_for_statistics ? _fbb.CreateString(model_identifier_for_statistics) : 0;
561 return tflite::CreateComputeSettings(
562 _fbb,
563 preference,
564 tflite_settings,
565 model_namespace_for_statistics__,
566 model_identifier_for_statistics__);
567 }
568
569 flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
570
571 struct NNAPISettingsT : public flatbuffers::NativeTable {
572 typedef NNAPISettings TableType;
573 std::string accelerator_name;
574 std::string cache_directory;
575 std::string model_token;
576 tflite::NNAPIExecutionPreference execution_preference;
577 int32_t no_of_nnapi_instances_to_cache;
578 std::unique_ptr<tflite::FallbackSettingsT> fallback_settings;
579 bool allow_nnapi_cpu_on_android_10_plus;
580 tflite::NNAPIExecutionPriority execution_priority;
581 bool allow_dynamic_dimensions;
582 bool allow_fp16_precision_for_fp32;
NNAPISettingsTNNAPISettingsT583 NNAPISettingsT()
584 : execution_preference(tflite::NNAPIExecutionPreference_UNDEFINED),
585 no_of_nnapi_instances_to_cache(0),
586 allow_nnapi_cpu_on_android_10_plus(false),
587 execution_priority(tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED),
588 allow_dynamic_dimensions(false),
589 allow_fp16_precision_for_fp32(false) {
590 }
591 };
592
593 struct NNAPISettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
594 typedef NNAPISettingsT NativeTableType;
595 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
596 VT_ACCELERATOR_NAME = 4,
597 VT_CACHE_DIRECTORY = 6,
598 VT_MODEL_TOKEN = 8,
599 VT_EXECUTION_PREFERENCE = 10,
600 VT_NO_OF_NNAPI_INSTANCES_TO_CACHE = 12,
601 VT_FALLBACK_SETTINGS = 14,
602 VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS = 16,
603 VT_EXECUTION_PRIORITY = 18,
604 VT_ALLOW_DYNAMIC_DIMENSIONS = 20,
605 VT_ALLOW_FP16_PRECISION_FOR_FP32 = 22
606 };
accelerator_nameFLATBUFFERS_FINAL_CLASS607 const flatbuffers::String *accelerator_name() const {
608 return GetPointer<const flatbuffers::String *>(VT_ACCELERATOR_NAME);
609 }
cache_directoryFLATBUFFERS_FINAL_CLASS610 const flatbuffers::String *cache_directory() const {
611 return GetPointer<const flatbuffers::String *>(VT_CACHE_DIRECTORY);
612 }
model_tokenFLATBUFFERS_FINAL_CLASS613 const flatbuffers::String *model_token() const {
614 return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
615 }
execution_preferenceFLATBUFFERS_FINAL_CLASS616 tflite::NNAPIExecutionPreference execution_preference() const {
617 return static_cast<tflite::NNAPIExecutionPreference>(GetField<int32_t>(VT_EXECUTION_PREFERENCE, 0));
618 }
no_of_nnapi_instances_to_cacheFLATBUFFERS_FINAL_CLASS619 int32_t no_of_nnapi_instances_to_cache() const {
620 return GetField<int32_t>(VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, 0);
621 }
fallback_settingsFLATBUFFERS_FINAL_CLASS622 const tflite::FallbackSettings *fallback_settings() const {
623 return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
624 }
allow_nnapi_cpu_on_android_10_plusFLATBUFFERS_FINAL_CLASS625 bool allow_nnapi_cpu_on_android_10_plus() const {
626 return GetField<uint8_t>(VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, 0) != 0;
627 }
execution_priorityFLATBUFFERS_FINAL_CLASS628 tflite::NNAPIExecutionPriority execution_priority() const {
629 return static_cast<tflite::NNAPIExecutionPriority>(GetField<int32_t>(VT_EXECUTION_PRIORITY, 0));
630 }
allow_dynamic_dimensionsFLATBUFFERS_FINAL_CLASS631 bool allow_dynamic_dimensions() const {
632 return GetField<uint8_t>(VT_ALLOW_DYNAMIC_DIMENSIONS, 0) != 0;
633 }
allow_fp16_precision_for_fp32FLATBUFFERS_FINAL_CLASS634 bool allow_fp16_precision_for_fp32() const {
635 return GetField<uint8_t>(VT_ALLOW_FP16_PRECISION_FOR_FP32, 0) != 0;
636 }
VerifyFLATBUFFERS_FINAL_CLASS637 bool Verify(flatbuffers::Verifier &verifier) const {
638 return VerifyTableStart(verifier) &&
639 VerifyOffset(verifier, VT_ACCELERATOR_NAME) &&
640 verifier.VerifyString(accelerator_name()) &&
641 VerifyOffset(verifier, VT_CACHE_DIRECTORY) &&
642 verifier.VerifyString(cache_directory()) &&
643 VerifyOffset(verifier, VT_MODEL_TOKEN) &&
644 verifier.VerifyString(model_token()) &&
645 VerifyField<int32_t>(verifier, VT_EXECUTION_PREFERENCE) &&
646 VerifyField<int32_t>(verifier, VT_NO_OF_NNAPI_INSTANCES_TO_CACHE) &&
647 VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
648 verifier.VerifyTable(fallback_settings()) &&
649 VerifyField<uint8_t>(verifier, VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS) &&
650 VerifyField<int32_t>(verifier, VT_EXECUTION_PRIORITY) &&
651 VerifyField<uint8_t>(verifier, VT_ALLOW_DYNAMIC_DIMENSIONS) &&
652 VerifyField<uint8_t>(verifier, VT_ALLOW_FP16_PRECISION_FOR_FP32) &&
653 verifier.EndTable();
654 }
655 NNAPISettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
656 void UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
657 static flatbuffers::Offset<NNAPISettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
658 };
659
660 struct NNAPISettingsBuilder {
661 flatbuffers::FlatBufferBuilder &fbb_;
662 flatbuffers::uoffset_t start_;
add_accelerator_nameNNAPISettingsBuilder663 void add_accelerator_name(flatbuffers::Offset<flatbuffers::String> accelerator_name) {
664 fbb_.AddOffset(NNAPISettings::VT_ACCELERATOR_NAME, accelerator_name);
665 }
add_cache_directoryNNAPISettingsBuilder666 void add_cache_directory(flatbuffers::Offset<flatbuffers::String> cache_directory) {
667 fbb_.AddOffset(NNAPISettings::VT_CACHE_DIRECTORY, cache_directory);
668 }
add_model_tokenNNAPISettingsBuilder669 void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
670 fbb_.AddOffset(NNAPISettings::VT_MODEL_TOKEN, model_token);
671 }
add_execution_preferenceNNAPISettingsBuilder672 void add_execution_preference(tflite::NNAPIExecutionPreference execution_preference) {
673 fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PREFERENCE, static_cast<int32_t>(execution_preference), 0);
674 }
add_no_of_nnapi_instances_to_cacheNNAPISettingsBuilder675 void add_no_of_nnapi_instances_to_cache(int32_t no_of_nnapi_instances_to_cache) {
676 fbb_.AddElement<int32_t>(NNAPISettings::VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, no_of_nnapi_instances_to_cache, 0);
677 }
add_fallback_settingsNNAPISettingsBuilder678 void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
679 fbb_.AddOffset(NNAPISettings::VT_FALLBACK_SETTINGS, fallback_settings);
680 }
add_allow_nnapi_cpu_on_android_10_plusNNAPISettingsBuilder681 void add_allow_nnapi_cpu_on_android_10_plus(bool allow_nnapi_cpu_on_android_10_plus) {
682 fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, static_cast<uint8_t>(allow_nnapi_cpu_on_android_10_plus), 0);
683 }
add_execution_priorityNNAPISettingsBuilder684 void add_execution_priority(tflite::NNAPIExecutionPriority execution_priority) {
685 fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PRIORITY, static_cast<int32_t>(execution_priority), 0);
686 }
add_allow_dynamic_dimensionsNNAPISettingsBuilder687 void add_allow_dynamic_dimensions(bool allow_dynamic_dimensions) {
688 fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_DYNAMIC_DIMENSIONS, static_cast<uint8_t>(allow_dynamic_dimensions), 0);
689 }
add_allow_fp16_precision_for_fp32NNAPISettingsBuilder690 void add_allow_fp16_precision_for_fp32(bool allow_fp16_precision_for_fp32) {
691 fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_FP16_PRECISION_FOR_FP32, static_cast<uint8_t>(allow_fp16_precision_for_fp32), 0);
692 }
NNAPISettingsBuilderNNAPISettingsBuilder693 explicit NNAPISettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
694 : fbb_(_fbb) {
695 start_ = fbb_.StartTable();
696 }
697 NNAPISettingsBuilder &operator=(const NNAPISettingsBuilder &);
FinishNNAPISettingsBuilder698 flatbuffers::Offset<NNAPISettings> Finish() {
699 const auto end = fbb_.EndTable(start_);
700 auto o = flatbuffers::Offset<NNAPISettings>(end);
701 return o;
702 }
703 };
704
705 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(
706 flatbuffers::FlatBufferBuilder &_fbb,
707 flatbuffers::Offset<flatbuffers::String> accelerator_name = 0,
708 flatbuffers::Offset<flatbuffers::String> cache_directory = 0,
709 flatbuffers::Offset<flatbuffers::String> model_token = 0,
710 tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
711 int32_t no_of_nnapi_instances_to_cache = 0,
712 flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
713 bool allow_nnapi_cpu_on_android_10_plus = false,
714 tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
715 bool allow_dynamic_dimensions = false,
716 bool allow_fp16_precision_for_fp32 = false) {
717 NNAPISettingsBuilder builder_(_fbb);
718 builder_.add_execution_priority(execution_priority);
719 builder_.add_fallback_settings(fallback_settings);
720 builder_.add_no_of_nnapi_instances_to_cache(no_of_nnapi_instances_to_cache);
721 builder_.add_execution_preference(execution_preference);
722 builder_.add_model_token(model_token);
723 builder_.add_cache_directory(cache_directory);
724 builder_.add_accelerator_name(accelerator_name);
725 builder_.add_allow_fp16_precision_for_fp32(allow_fp16_precision_for_fp32);
726 builder_.add_allow_dynamic_dimensions(allow_dynamic_dimensions);
727 builder_.add_allow_nnapi_cpu_on_android_10_plus(allow_nnapi_cpu_on_android_10_plus);
728 return builder_.Finish();
729 }
730
731 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettingsDirect(
732 flatbuffers::FlatBufferBuilder &_fbb,
733 const char *accelerator_name = nullptr,
734 const char *cache_directory = nullptr,
735 const char *model_token = nullptr,
736 tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
737 int32_t no_of_nnapi_instances_to_cache = 0,
738 flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
739 bool allow_nnapi_cpu_on_android_10_plus = false,
740 tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
741 bool allow_dynamic_dimensions = false,
742 bool allow_fp16_precision_for_fp32 = false) {
743 auto accelerator_name__ = accelerator_name ? _fbb.CreateString(accelerator_name) : 0;
744 auto cache_directory__ = cache_directory ? _fbb.CreateString(cache_directory) : 0;
745 auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
746 return tflite::CreateNNAPISettings(
747 _fbb,
748 accelerator_name__,
749 cache_directory__,
750 model_token__,
751 execution_preference,
752 no_of_nnapi_instances_to_cache,
753 fallback_settings,
754 allow_nnapi_cpu_on_android_10_plus,
755 execution_priority,
756 allow_dynamic_dimensions,
757 allow_fp16_precision_for_fp32);
758 }
759
760 flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
761
762 struct GPUSettingsT : public flatbuffers::NativeTable {
763 typedef GPUSettings TableType;
764 bool is_precision_loss_allowed;
765 bool enable_quantized_inference;
766 tflite::GPUBackend force_backend;
GPUSettingsTGPUSettingsT767 GPUSettingsT()
768 : is_precision_loss_allowed(false),
769 enable_quantized_inference(true),
770 force_backend(tflite::GPUBackend_UNSET) {
771 }
772 };
773
774 struct GPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
775 typedef GPUSettingsT NativeTableType;
776 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
777 VT_IS_PRECISION_LOSS_ALLOWED = 4,
778 VT_ENABLE_QUANTIZED_INFERENCE = 6,
779 VT_FORCE_BACKEND = 8
780 };
is_precision_loss_allowedFLATBUFFERS_FINAL_CLASS781 bool is_precision_loss_allowed() const {
782 return GetField<uint8_t>(VT_IS_PRECISION_LOSS_ALLOWED, 0) != 0;
783 }
enable_quantized_inferenceFLATBUFFERS_FINAL_CLASS784 bool enable_quantized_inference() const {
785 return GetField<uint8_t>(VT_ENABLE_QUANTIZED_INFERENCE, 1) != 0;
786 }
force_backendFLATBUFFERS_FINAL_CLASS787 tflite::GPUBackend force_backend() const {
788 return static_cast<tflite::GPUBackend>(GetField<int32_t>(VT_FORCE_BACKEND, 0));
789 }
VerifyFLATBUFFERS_FINAL_CLASS790 bool Verify(flatbuffers::Verifier &verifier) const {
791 return VerifyTableStart(verifier) &&
792 VerifyField<uint8_t>(verifier, VT_IS_PRECISION_LOSS_ALLOWED) &&
793 VerifyField<uint8_t>(verifier, VT_ENABLE_QUANTIZED_INFERENCE) &&
794 VerifyField<int32_t>(verifier, VT_FORCE_BACKEND) &&
795 verifier.EndTable();
796 }
797 GPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
798 void UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
799 static flatbuffers::Offset<GPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
800 };
801
802 struct GPUSettingsBuilder {
803 flatbuffers::FlatBufferBuilder &fbb_;
804 flatbuffers::uoffset_t start_;
add_is_precision_loss_allowedGPUSettingsBuilder805 void add_is_precision_loss_allowed(bool is_precision_loss_allowed) {
806 fbb_.AddElement<uint8_t>(GPUSettings::VT_IS_PRECISION_LOSS_ALLOWED, static_cast<uint8_t>(is_precision_loss_allowed), 0);
807 }
add_enable_quantized_inferenceGPUSettingsBuilder808 void add_enable_quantized_inference(bool enable_quantized_inference) {
809 fbb_.AddElement<uint8_t>(GPUSettings::VT_ENABLE_QUANTIZED_INFERENCE, static_cast<uint8_t>(enable_quantized_inference), 1);
810 }
add_force_backendGPUSettingsBuilder811 void add_force_backend(tflite::GPUBackend force_backend) {
812 fbb_.AddElement<int32_t>(GPUSettings::VT_FORCE_BACKEND, static_cast<int32_t>(force_backend), 0);
813 }
GPUSettingsBuilderGPUSettingsBuilder814 explicit GPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
815 : fbb_(_fbb) {
816 start_ = fbb_.StartTable();
817 }
818 GPUSettingsBuilder &operator=(const GPUSettingsBuilder &);
FinishGPUSettingsBuilder819 flatbuffers::Offset<GPUSettings> Finish() {
820 const auto end = fbb_.EndTable(start_);
821 auto o = flatbuffers::Offset<GPUSettings>(end);
822 return o;
823 }
824 };
825
826 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(
827 flatbuffers::FlatBufferBuilder &_fbb,
828 bool is_precision_loss_allowed = false,
829 bool enable_quantized_inference = true,
830 tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET) {
831 GPUSettingsBuilder builder_(_fbb);
832 builder_.add_force_backend(force_backend);
833 builder_.add_enable_quantized_inference(enable_quantized_inference);
834 builder_.add_is_precision_loss_allowed(is_precision_loss_allowed);
835 return builder_.Finish();
836 }
837
838 flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
839
840 struct HexagonSettingsT : public flatbuffers::NativeTable {
841 typedef HexagonSettings TableType;
842 int32_t debug_level;
843 int32_t powersave_level;
844 bool print_graph_profile;
845 bool print_graph_debug;
HexagonSettingsTHexagonSettingsT846 HexagonSettingsT()
847 : debug_level(0),
848 powersave_level(0),
849 print_graph_profile(false),
850 print_graph_debug(false) {
851 }
852 };
853
854 struct HexagonSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
855 typedef HexagonSettingsT NativeTableType;
856 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
857 VT_DEBUG_LEVEL = 4,
858 VT_POWERSAVE_LEVEL = 6,
859 VT_PRINT_GRAPH_PROFILE = 8,
860 VT_PRINT_GRAPH_DEBUG = 10
861 };
debug_levelFLATBUFFERS_FINAL_CLASS862 int32_t debug_level() const {
863 return GetField<int32_t>(VT_DEBUG_LEVEL, 0);
864 }
powersave_levelFLATBUFFERS_FINAL_CLASS865 int32_t powersave_level() const {
866 return GetField<int32_t>(VT_POWERSAVE_LEVEL, 0);
867 }
print_graph_profileFLATBUFFERS_FINAL_CLASS868 bool print_graph_profile() const {
869 return GetField<uint8_t>(VT_PRINT_GRAPH_PROFILE, 0) != 0;
870 }
print_graph_debugFLATBUFFERS_FINAL_CLASS871 bool print_graph_debug() const {
872 return GetField<uint8_t>(VT_PRINT_GRAPH_DEBUG, 0) != 0;
873 }
VerifyFLATBUFFERS_FINAL_CLASS874 bool Verify(flatbuffers::Verifier &verifier) const {
875 return VerifyTableStart(verifier) &&
876 VerifyField<int32_t>(verifier, VT_DEBUG_LEVEL) &&
877 VerifyField<int32_t>(verifier, VT_POWERSAVE_LEVEL) &&
878 VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_PROFILE) &&
879 VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_DEBUG) &&
880 verifier.EndTable();
881 }
882 HexagonSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
883 void UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
884 static flatbuffers::Offset<HexagonSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
885 };
886
887 struct HexagonSettingsBuilder {
888 flatbuffers::FlatBufferBuilder &fbb_;
889 flatbuffers::uoffset_t start_;
add_debug_levelHexagonSettingsBuilder890 void add_debug_level(int32_t debug_level) {
891 fbb_.AddElement<int32_t>(HexagonSettings::VT_DEBUG_LEVEL, debug_level, 0);
892 }
add_powersave_levelHexagonSettingsBuilder893 void add_powersave_level(int32_t powersave_level) {
894 fbb_.AddElement<int32_t>(HexagonSettings::VT_POWERSAVE_LEVEL, powersave_level, 0);
895 }
add_print_graph_profileHexagonSettingsBuilder896 void add_print_graph_profile(bool print_graph_profile) {
897 fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_PROFILE, static_cast<uint8_t>(print_graph_profile), 0);
898 }
add_print_graph_debugHexagonSettingsBuilder899 void add_print_graph_debug(bool print_graph_debug) {
900 fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_DEBUG, static_cast<uint8_t>(print_graph_debug), 0);
901 }
HexagonSettingsBuilderHexagonSettingsBuilder902 explicit HexagonSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
903 : fbb_(_fbb) {
904 start_ = fbb_.StartTable();
905 }
906 HexagonSettingsBuilder &operator=(const HexagonSettingsBuilder &);
FinishHexagonSettingsBuilder907 flatbuffers::Offset<HexagonSettings> Finish() {
908 const auto end = fbb_.EndTable(start_);
909 auto o = flatbuffers::Offset<HexagonSettings>(end);
910 return o;
911 }
912 };
913
914 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(
915 flatbuffers::FlatBufferBuilder &_fbb,
916 int32_t debug_level = 0,
917 int32_t powersave_level = 0,
918 bool print_graph_profile = false,
919 bool print_graph_debug = false) {
920 HexagonSettingsBuilder builder_(_fbb);
921 builder_.add_powersave_level(powersave_level);
922 builder_.add_debug_level(debug_level);
923 builder_.add_print_graph_debug(print_graph_debug);
924 builder_.add_print_graph_profile(print_graph_profile);
925 return builder_.Finish();
926 }
927
928 flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
929
930 struct XNNPackSettingsT : public flatbuffers::NativeTable {
931 typedef XNNPackSettings TableType;
932 int32_t num_threads;
XNNPackSettingsTXNNPackSettingsT933 XNNPackSettingsT()
934 : num_threads(0) {
935 }
936 };
937
938 struct XNNPackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
939 typedef XNNPackSettingsT NativeTableType;
940 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
941 VT_NUM_THREADS = 4
942 };
num_threadsFLATBUFFERS_FINAL_CLASS943 int32_t num_threads() const {
944 return GetField<int32_t>(VT_NUM_THREADS, 0);
945 }
VerifyFLATBUFFERS_FINAL_CLASS946 bool Verify(flatbuffers::Verifier &verifier) const {
947 return VerifyTableStart(verifier) &&
948 VerifyField<int32_t>(verifier, VT_NUM_THREADS) &&
949 verifier.EndTable();
950 }
951 XNNPackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
952 void UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
953 static flatbuffers::Offset<XNNPackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
954 };
955
956 struct XNNPackSettingsBuilder {
957 flatbuffers::FlatBufferBuilder &fbb_;
958 flatbuffers::uoffset_t start_;
add_num_threadsXNNPackSettingsBuilder959 void add_num_threads(int32_t num_threads) {
960 fbb_.AddElement<int32_t>(XNNPackSettings::VT_NUM_THREADS, num_threads, 0);
961 }
XNNPackSettingsBuilderXNNPackSettingsBuilder962 explicit XNNPackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
963 : fbb_(_fbb) {
964 start_ = fbb_.StartTable();
965 }
966 XNNPackSettingsBuilder &operator=(const XNNPackSettingsBuilder &);
FinishXNNPackSettingsBuilder967 flatbuffers::Offset<XNNPackSettings> Finish() {
968 const auto end = fbb_.EndTable(start_);
969 auto o = flatbuffers::Offset<XNNPackSettings>(end);
970 return o;
971 }
972 };
973
974 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(
975 flatbuffers::FlatBufferBuilder &_fbb,
976 int32_t num_threads = 0) {
977 XNNPackSettingsBuilder builder_(_fbb);
978 builder_.add_num_threads(num_threads);
979 return builder_.Finish();
980 }
981
982 flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
983
984 struct EdgeTpuDeviceSpecT : public flatbuffers::NativeTable {
985 typedef EdgeTpuDeviceSpec TableType;
986 tflite::EdgeTpuDeviceSpec_::PlatformType platform_type;
987 int32_t num_chips;
988 std::vector<std::string> device_paths;
989 int32_t chip_family;
EdgeTpuDeviceSpecTEdgeTpuDeviceSpecT990 EdgeTpuDeviceSpecT()
991 : platform_type(tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO),
992 num_chips(0),
993 chip_family(0) {
994 }
995 };
996
997 struct EdgeTpuDeviceSpec FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
998 typedef EdgeTpuDeviceSpecT NativeTableType;
999 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1000 VT_PLATFORM_TYPE = 4,
1001 VT_NUM_CHIPS = 6,
1002 VT_DEVICE_PATHS = 8,
1003 VT_CHIP_FAMILY = 10
1004 };
platform_typeFLATBUFFERS_FINAL_CLASS1005 tflite::EdgeTpuDeviceSpec_::PlatformType platform_type() const {
1006 return static_cast<tflite::EdgeTpuDeviceSpec_::PlatformType>(GetField<int32_t>(VT_PLATFORM_TYPE, 0));
1007 }
num_chipsFLATBUFFERS_FINAL_CLASS1008 int32_t num_chips() const {
1009 return GetField<int32_t>(VT_NUM_CHIPS, 0);
1010 }
device_pathsFLATBUFFERS_FINAL_CLASS1011 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *device_paths() const {
1012 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DEVICE_PATHS);
1013 }
chip_familyFLATBUFFERS_FINAL_CLASS1014 int32_t chip_family() const {
1015 return GetField<int32_t>(VT_CHIP_FAMILY, 0);
1016 }
VerifyFLATBUFFERS_FINAL_CLASS1017 bool Verify(flatbuffers::Verifier &verifier) const {
1018 return VerifyTableStart(verifier) &&
1019 VerifyField<int32_t>(verifier, VT_PLATFORM_TYPE) &&
1020 VerifyField<int32_t>(verifier, VT_NUM_CHIPS) &&
1021 VerifyOffset(verifier, VT_DEVICE_PATHS) &&
1022 verifier.VerifyVector(device_paths()) &&
1023 verifier.VerifyVectorOfStrings(device_paths()) &&
1024 VerifyField<int32_t>(verifier, VT_CHIP_FAMILY) &&
1025 verifier.EndTable();
1026 }
1027 EdgeTpuDeviceSpecT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1028 void UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1029 static flatbuffers::Offset<EdgeTpuDeviceSpec> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1030 };
1031
1032 struct EdgeTpuDeviceSpecBuilder {
1033 flatbuffers::FlatBufferBuilder &fbb_;
1034 flatbuffers::uoffset_t start_;
add_platform_typeEdgeTpuDeviceSpecBuilder1035 void add_platform_type(tflite::EdgeTpuDeviceSpec_::PlatformType platform_type) {
1036 fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_PLATFORM_TYPE, static_cast<int32_t>(platform_type), 0);
1037 }
add_num_chipsEdgeTpuDeviceSpecBuilder1038 void add_num_chips(int32_t num_chips) {
1039 fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_NUM_CHIPS, num_chips, 0);
1040 }
add_device_pathsEdgeTpuDeviceSpecBuilder1041 void add_device_paths(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths) {
1042 fbb_.AddOffset(EdgeTpuDeviceSpec::VT_DEVICE_PATHS, device_paths);
1043 }
add_chip_familyEdgeTpuDeviceSpecBuilder1044 void add_chip_family(int32_t chip_family) {
1045 fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_CHIP_FAMILY, chip_family, 0);
1046 }
EdgeTpuDeviceSpecBuilderEdgeTpuDeviceSpecBuilder1047 explicit EdgeTpuDeviceSpecBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1048 : fbb_(_fbb) {
1049 start_ = fbb_.StartTable();
1050 }
1051 EdgeTpuDeviceSpecBuilder &operator=(const EdgeTpuDeviceSpecBuilder &);
FinishEdgeTpuDeviceSpecBuilder1052 flatbuffers::Offset<EdgeTpuDeviceSpec> Finish() {
1053 const auto end = fbb_.EndTable(start_);
1054 auto o = flatbuffers::Offset<EdgeTpuDeviceSpec>(end);
1055 return o;
1056 }
1057 };
1058
1059 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(
1060 flatbuffers::FlatBufferBuilder &_fbb,
1061 tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1062 int32_t num_chips = 0,
1063 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths = 0,
1064 int32_t chip_family = 0) {
1065 EdgeTpuDeviceSpecBuilder builder_(_fbb);
1066 builder_.add_chip_family(chip_family);
1067 builder_.add_device_paths(device_paths);
1068 builder_.add_num_chips(num_chips);
1069 builder_.add_platform_type(platform_type);
1070 return builder_.Finish();
1071 }
1072
1073 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpecDirect(
1074 flatbuffers::FlatBufferBuilder &_fbb,
1075 tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1076 int32_t num_chips = 0,
1077 const std::vector<flatbuffers::Offset<flatbuffers::String>> *device_paths = nullptr,
1078 int32_t chip_family = 0) {
1079 auto device_paths__ = device_paths ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*device_paths) : 0;
1080 return tflite::CreateEdgeTpuDeviceSpec(
1081 _fbb,
1082 platform_type,
1083 num_chips,
1084 device_paths__,
1085 chip_family);
1086 }
1087
1088 flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1089
1090 struct EdgeTpuInactivePowerConfigT : public flatbuffers::NativeTable {
1091 typedef EdgeTpuInactivePowerConfig TableType;
1092 tflite::EdgeTpuPowerState inactive_power_state;
1093 int64_t inactive_timeout_us;
EdgeTpuInactivePowerConfigTEdgeTpuInactivePowerConfigT1094 EdgeTpuInactivePowerConfigT()
1095 : inactive_power_state(tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE),
1096 inactive_timeout_us(0) {
1097 }
1098 };
1099
1100 struct EdgeTpuInactivePowerConfig FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1101 typedef EdgeTpuInactivePowerConfigT NativeTableType;
1102 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1103 VT_INACTIVE_POWER_STATE = 4,
1104 VT_INACTIVE_TIMEOUT_US = 6
1105 };
inactive_power_stateFLATBUFFERS_FINAL_CLASS1106 tflite::EdgeTpuPowerState inactive_power_state() const {
1107 return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INACTIVE_POWER_STATE, 0));
1108 }
inactive_timeout_usFLATBUFFERS_FINAL_CLASS1109 int64_t inactive_timeout_us() const {
1110 return GetField<int64_t>(VT_INACTIVE_TIMEOUT_US, 0);
1111 }
VerifyFLATBUFFERS_FINAL_CLASS1112 bool Verify(flatbuffers::Verifier &verifier) const {
1113 return VerifyTableStart(verifier) &&
1114 VerifyField<int32_t>(verifier, VT_INACTIVE_POWER_STATE) &&
1115 VerifyField<int64_t>(verifier, VT_INACTIVE_TIMEOUT_US) &&
1116 verifier.EndTable();
1117 }
1118 EdgeTpuInactivePowerConfigT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1119 void UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1120 static flatbuffers::Offset<EdgeTpuInactivePowerConfig> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1121 };
1122
1123 struct EdgeTpuInactivePowerConfigBuilder {
1124 flatbuffers::FlatBufferBuilder &fbb_;
1125 flatbuffers::uoffset_t start_;
add_inactive_power_stateEdgeTpuInactivePowerConfigBuilder1126 void add_inactive_power_state(tflite::EdgeTpuPowerState inactive_power_state) {
1127 fbb_.AddElement<int32_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_POWER_STATE, static_cast<int32_t>(inactive_power_state), 0);
1128 }
add_inactive_timeout_usEdgeTpuInactivePowerConfigBuilder1129 void add_inactive_timeout_us(int64_t inactive_timeout_us) {
1130 fbb_.AddElement<int64_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_TIMEOUT_US, inactive_timeout_us, 0);
1131 }
EdgeTpuInactivePowerConfigBuilderEdgeTpuInactivePowerConfigBuilder1132 explicit EdgeTpuInactivePowerConfigBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1133 : fbb_(_fbb) {
1134 start_ = fbb_.StartTable();
1135 }
1136 EdgeTpuInactivePowerConfigBuilder &operator=(const EdgeTpuInactivePowerConfigBuilder &);
FinishEdgeTpuInactivePowerConfigBuilder1137 flatbuffers::Offset<EdgeTpuInactivePowerConfig> Finish() {
1138 const auto end = fbb_.EndTable(start_);
1139 auto o = flatbuffers::Offset<EdgeTpuInactivePowerConfig>(end);
1140 return o;
1141 }
1142 };
1143
1144 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(
1145 flatbuffers::FlatBufferBuilder &_fbb,
1146 tflite::EdgeTpuPowerState inactive_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1147 int64_t inactive_timeout_us = 0) {
1148 EdgeTpuInactivePowerConfigBuilder builder_(_fbb);
1149 builder_.add_inactive_timeout_us(inactive_timeout_us);
1150 builder_.add_inactive_power_state(inactive_power_state);
1151 return builder_.Finish();
1152 }
1153
1154 flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1155
1156 struct EdgeTpuSettingsT : public flatbuffers::NativeTable {
1157 typedef EdgeTpuSettings TableType;
1158 tflite::EdgeTpuPowerState inference_power_state;
1159 std::vector<std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>> inactive_power_configs;
1160 int32_t inference_priority;
1161 std::unique_ptr<tflite::EdgeTpuDeviceSpecT> edgetpu_device_spec;
EdgeTpuSettingsTEdgeTpuSettingsT1162 EdgeTpuSettingsT()
1163 : inference_power_state(tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE),
1164 inference_priority(-1) {
1165 }
1166 };
1167
1168 struct EdgeTpuSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1169 typedef EdgeTpuSettingsT NativeTableType;
1170 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1171 VT_INFERENCE_POWER_STATE = 4,
1172 VT_INACTIVE_POWER_CONFIGS = 6,
1173 VT_INFERENCE_PRIORITY = 8,
1174 VT_EDGETPU_DEVICE_SPEC = 10
1175 };
inference_power_stateFLATBUFFERS_FINAL_CLASS1176 tflite::EdgeTpuPowerState inference_power_state() const {
1177 return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INFERENCE_POWER_STATE, 0));
1178 }
inactive_power_configsFLATBUFFERS_FINAL_CLASS1179 const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs() const {
1180 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *>(VT_INACTIVE_POWER_CONFIGS);
1181 }
inference_priorityFLATBUFFERS_FINAL_CLASS1182 int32_t inference_priority() const {
1183 return GetField<int32_t>(VT_INFERENCE_PRIORITY, -1);
1184 }
edgetpu_device_specFLATBUFFERS_FINAL_CLASS1185 const tflite::EdgeTpuDeviceSpec *edgetpu_device_spec() const {
1186 return GetPointer<const tflite::EdgeTpuDeviceSpec *>(VT_EDGETPU_DEVICE_SPEC);
1187 }
VerifyFLATBUFFERS_FINAL_CLASS1188 bool Verify(flatbuffers::Verifier &verifier) const {
1189 return VerifyTableStart(verifier) &&
1190 VerifyField<int32_t>(verifier, VT_INFERENCE_POWER_STATE) &&
1191 VerifyOffset(verifier, VT_INACTIVE_POWER_CONFIGS) &&
1192 verifier.VerifyVector(inactive_power_configs()) &&
1193 verifier.VerifyVectorOfTables(inactive_power_configs()) &&
1194 VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY) &&
1195 VerifyOffset(verifier, VT_EDGETPU_DEVICE_SPEC) &&
1196 verifier.VerifyTable(edgetpu_device_spec()) &&
1197 verifier.EndTable();
1198 }
1199 EdgeTpuSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1200 void UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1201 static flatbuffers::Offset<EdgeTpuSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1202 };
1203
1204 struct EdgeTpuSettingsBuilder {
1205 flatbuffers::FlatBufferBuilder &fbb_;
1206 flatbuffers::uoffset_t start_;
add_inference_power_stateEdgeTpuSettingsBuilder1207 void add_inference_power_state(tflite::EdgeTpuPowerState inference_power_state) {
1208 fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_POWER_STATE, static_cast<int32_t>(inference_power_state), 0);
1209 }
add_inactive_power_configsEdgeTpuSettingsBuilder1210 void add_inactive_power_configs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs) {
1211 fbb_.AddOffset(EdgeTpuSettings::VT_INACTIVE_POWER_CONFIGS, inactive_power_configs);
1212 }
add_inference_priorityEdgeTpuSettingsBuilder1213 void add_inference_priority(int32_t inference_priority) {
1214 fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_PRIORITY, inference_priority, -1);
1215 }
add_edgetpu_device_specEdgeTpuSettingsBuilder1216 void add_edgetpu_device_spec(flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec) {
1217 fbb_.AddOffset(EdgeTpuSettings::VT_EDGETPU_DEVICE_SPEC, edgetpu_device_spec);
1218 }
EdgeTpuSettingsBuilderEdgeTpuSettingsBuilder1219 explicit EdgeTpuSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1220 : fbb_(_fbb) {
1221 start_ = fbb_.StartTable();
1222 }
1223 EdgeTpuSettingsBuilder &operator=(const EdgeTpuSettingsBuilder &);
FinishEdgeTpuSettingsBuilder1224 flatbuffers::Offset<EdgeTpuSettings> Finish() {
1225 const auto end = fbb_.EndTable(start_);
1226 auto o = flatbuffers::Offset<EdgeTpuSettings>(end);
1227 return o;
1228 }
1229 };
1230
1231 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(
1232 flatbuffers::FlatBufferBuilder &_fbb,
1233 tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1234 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs = 0,
1235 int32_t inference_priority = -1,
1236 flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0) {
1237 EdgeTpuSettingsBuilder builder_(_fbb);
1238 builder_.add_edgetpu_device_spec(edgetpu_device_spec);
1239 builder_.add_inference_priority(inference_priority);
1240 builder_.add_inactive_power_configs(inactive_power_configs);
1241 builder_.add_inference_power_state(inference_power_state);
1242 return builder_.Finish();
1243 }
1244
1245 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettingsDirect(
1246 flatbuffers::FlatBufferBuilder &_fbb,
1247 tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1248 const std::vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs = nullptr,
1249 int32_t inference_priority = -1,
1250 flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0) {
1251 auto inactive_power_configs__ = inactive_power_configs ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>(*inactive_power_configs) : 0;
1252 return tflite::CreateEdgeTpuSettings(
1253 _fbb,
1254 inference_power_state,
1255 inactive_power_configs__,
1256 inference_priority,
1257 edgetpu_device_spec);
1258 }
1259
1260 flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1261
1262 struct CoralSettingsT : public flatbuffers::NativeTable {
1263 typedef CoralSettings TableType;
1264 std::string device;
1265 tflite::CoralSettings_::Performance performance;
1266 bool usb_always_dfu;
1267 int32_t usb_max_bulk_in_queue_length;
CoralSettingsTCoralSettingsT1268 CoralSettingsT()
1269 : performance(tflite::CoralSettings_::Performance_UNDEFINED),
1270 usb_always_dfu(false),
1271 usb_max_bulk_in_queue_length(0) {
1272 }
1273 };
1274
1275 struct CoralSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1276 typedef CoralSettingsT NativeTableType;
1277 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1278 VT_DEVICE = 4,
1279 VT_PERFORMANCE = 6,
1280 VT_USB_ALWAYS_DFU = 8,
1281 VT_USB_MAX_BULK_IN_QUEUE_LENGTH = 10
1282 };
deviceFLATBUFFERS_FINAL_CLASS1283 const flatbuffers::String *device() const {
1284 return GetPointer<const flatbuffers::String *>(VT_DEVICE);
1285 }
performanceFLATBUFFERS_FINAL_CLASS1286 tflite::CoralSettings_::Performance performance() const {
1287 return static_cast<tflite::CoralSettings_::Performance>(GetField<int32_t>(VT_PERFORMANCE, 0));
1288 }
usb_always_dfuFLATBUFFERS_FINAL_CLASS1289 bool usb_always_dfu() const {
1290 return GetField<uint8_t>(VT_USB_ALWAYS_DFU, 0) != 0;
1291 }
usb_max_bulk_in_queue_lengthFLATBUFFERS_FINAL_CLASS1292 int32_t usb_max_bulk_in_queue_length() const {
1293 return GetField<int32_t>(VT_USB_MAX_BULK_IN_QUEUE_LENGTH, 0);
1294 }
VerifyFLATBUFFERS_FINAL_CLASS1295 bool Verify(flatbuffers::Verifier &verifier) const {
1296 return VerifyTableStart(verifier) &&
1297 VerifyOffset(verifier, VT_DEVICE) &&
1298 verifier.VerifyString(device()) &&
1299 VerifyField<int32_t>(verifier, VT_PERFORMANCE) &&
1300 VerifyField<uint8_t>(verifier, VT_USB_ALWAYS_DFU) &&
1301 VerifyField<int32_t>(verifier, VT_USB_MAX_BULK_IN_QUEUE_LENGTH) &&
1302 verifier.EndTable();
1303 }
1304 CoralSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1305 void UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1306 static flatbuffers::Offset<CoralSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1307 };
1308
1309 struct CoralSettingsBuilder {
1310 flatbuffers::FlatBufferBuilder &fbb_;
1311 flatbuffers::uoffset_t start_;
add_deviceCoralSettingsBuilder1312 void add_device(flatbuffers::Offset<flatbuffers::String> device) {
1313 fbb_.AddOffset(CoralSettings::VT_DEVICE, device);
1314 }
add_performanceCoralSettingsBuilder1315 void add_performance(tflite::CoralSettings_::Performance performance) {
1316 fbb_.AddElement<int32_t>(CoralSettings::VT_PERFORMANCE, static_cast<int32_t>(performance), 0);
1317 }
add_usb_always_dfuCoralSettingsBuilder1318 void add_usb_always_dfu(bool usb_always_dfu) {
1319 fbb_.AddElement<uint8_t>(CoralSettings::VT_USB_ALWAYS_DFU, static_cast<uint8_t>(usb_always_dfu), 0);
1320 }
add_usb_max_bulk_in_queue_lengthCoralSettingsBuilder1321 void add_usb_max_bulk_in_queue_length(int32_t usb_max_bulk_in_queue_length) {
1322 fbb_.AddElement<int32_t>(CoralSettings::VT_USB_MAX_BULK_IN_QUEUE_LENGTH, usb_max_bulk_in_queue_length, 0);
1323 }
CoralSettingsBuilderCoralSettingsBuilder1324 explicit CoralSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1325 : fbb_(_fbb) {
1326 start_ = fbb_.StartTable();
1327 }
1328 CoralSettingsBuilder &operator=(const CoralSettingsBuilder &);
FinishCoralSettingsBuilder1329 flatbuffers::Offset<CoralSettings> Finish() {
1330 const auto end = fbb_.EndTable(start_);
1331 auto o = flatbuffers::Offset<CoralSettings>(end);
1332 return o;
1333 }
1334 };
1335
1336 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(
1337 flatbuffers::FlatBufferBuilder &_fbb,
1338 flatbuffers::Offset<flatbuffers::String> device = 0,
1339 tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1340 bool usb_always_dfu = false,
1341 int32_t usb_max_bulk_in_queue_length = 0) {
1342 CoralSettingsBuilder builder_(_fbb);
1343 builder_.add_usb_max_bulk_in_queue_length(usb_max_bulk_in_queue_length);
1344 builder_.add_performance(performance);
1345 builder_.add_device(device);
1346 builder_.add_usb_always_dfu(usb_always_dfu);
1347 return builder_.Finish();
1348 }
1349
1350 inline flatbuffers::Offset<CoralSettings> CreateCoralSettingsDirect(
1351 flatbuffers::FlatBufferBuilder &_fbb,
1352 const char *device = nullptr,
1353 tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1354 bool usb_always_dfu = false,
1355 int32_t usb_max_bulk_in_queue_length = 0) {
1356 auto device__ = device ? _fbb.CreateString(device) : 0;
1357 return tflite::CreateCoralSettings(
1358 _fbb,
1359 device__,
1360 performance,
1361 usb_always_dfu,
1362 usb_max_bulk_in_queue_length);
1363 }
1364
1365 flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1366
1367 struct CPUSettingsT : public flatbuffers::NativeTable {
1368 typedef CPUSettings TableType;
1369 int32_t num_threads;
CPUSettingsTCPUSettingsT1370 CPUSettingsT()
1371 : num_threads(0) {
1372 }
1373 };
1374
1375 struct CPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1376 typedef CPUSettingsT NativeTableType;
1377 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1378 VT_NUM_THREADS = 4
1379 };
num_threadsFLATBUFFERS_FINAL_CLASS1380 int32_t num_threads() const {
1381 return GetField<int32_t>(VT_NUM_THREADS, 0);
1382 }
VerifyFLATBUFFERS_FINAL_CLASS1383 bool Verify(flatbuffers::Verifier &verifier) const {
1384 return VerifyTableStart(verifier) &&
1385 VerifyField<int32_t>(verifier, VT_NUM_THREADS) &&
1386 verifier.EndTable();
1387 }
1388 CPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1389 void UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1390 static flatbuffers::Offset<CPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1391 };
1392
1393 struct CPUSettingsBuilder {
1394 flatbuffers::FlatBufferBuilder &fbb_;
1395 flatbuffers::uoffset_t start_;
add_num_threadsCPUSettingsBuilder1396 void add_num_threads(int32_t num_threads) {
1397 fbb_.AddElement<int32_t>(CPUSettings::VT_NUM_THREADS, num_threads, 0);
1398 }
CPUSettingsBuilderCPUSettingsBuilder1399 explicit CPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1400 : fbb_(_fbb) {
1401 start_ = fbb_.StartTable();
1402 }
1403 CPUSettingsBuilder &operator=(const CPUSettingsBuilder &);
FinishCPUSettingsBuilder1404 flatbuffers::Offset<CPUSettings> Finish() {
1405 const auto end = fbb_.EndTable(start_);
1406 auto o = flatbuffers::Offset<CPUSettings>(end);
1407 return o;
1408 }
1409 };
1410
1411 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(
1412 flatbuffers::FlatBufferBuilder &_fbb,
1413 int32_t num_threads = 0) {
1414 CPUSettingsBuilder builder_(_fbb);
1415 builder_.add_num_threads(num_threads);
1416 return builder_.Finish();
1417 }
1418
1419 flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1420
1421 struct TFLiteSettingsT : public flatbuffers::NativeTable {
1422 typedef TFLiteSettings TableType;
1423 tflite::Delegate delegate;
1424 std::unique_ptr<tflite::NNAPISettingsT> nnapi_settings;
1425 std::unique_ptr<tflite::GPUSettingsT> gpu_settings;
1426 std::unique_ptr<tflite::HexagonSettingsT> hexagon_settings;
1427 std::unique_ptr<tflite::XNNPackSettingsT> xnnpack_settings;
1428 std::unique_ptr<tflite::CPUSettingsT> cpu_settings;
1429 int32_t max_delegated_partitions;
1430 std::unique_ptr<tflite::EdgeTpuSettingsT> edgetpu_settings;
1431 std::unique_ptr<tflite::CoralSettingsT> coral_settings;
1432 std::unique_ptr<tflite::FallbackSettingsT> fallback_settings;
TFLiteSettingsTTFLiteSettingsT1433 TFLiteSettingsT()
1434 : delegate(tflite::Delegate_NONE),
1435 max_delegated_partitions(0) {
1436 }
1437 };
1438
1439 struct TFLiteSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1440 typedef TFLiteSettingsT NativeTableType;
1441 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1442 VT_DELEGATE = 4,
1443 VT_NNAPI_SETTINGS = 6,
1444 VT_GPU_SETTINGS = 8,
1445 VT_HEXAGON_SETTINGS = 10,
1446 VT_XNNPACK_SETTINGS = 12,
1447 VT_CPU_SETTINGS = 14,
1448 VT_MAX_DELEGATED_PARTITIONS = 16,
1449 VT_EDGETPU_SETTINGS = 18,
1450 VT_CORAL_SETTINGS = 20,
1451 VT_FALLBACK_SETTINGS = 22
1452 };
delegateFLATBUFFERS_FINAL_CLASS1453 tflite::Delegate delegate() const {
1454 return static_cast<tflite::Delegate>(GetField<int32_t>(VT_DELEGATE, 0));
1455 }
nnapi_settingsFLATBUFFERS_FINAL_CLASS1456 const tflite::NNAPISettings *nnapi_settings() const {
1457 return GetPointer<const tflite::NNAPISettings *>(VT_NNAPI_SETTINGS);
1458 }
gpu_settingsFLATBUFFERS_FINAL_CLASS1459 const tflite::GPUSettings *gpu_settings() const {
1460 return GetPointer<const tflite::GPUSettings *>(VT_GPU_SETTINGS);
1461 }
hexagon_settingsFLATBUFFERS_FINAL_CLASS1462 const tflite::HexagonSettings *hexagon_settings() const {
1463 return GetPointer<const tflite::HexagonSettings *>(VT_HEXAGON_SETTINGS);
1464 }
xnnpack_settingsFLATBUFFERS_FINAL_CLASS1465 const tflite::XNNPackSettings *xnnpack_settings() const {
1466 return GetPointer<const tflite::XNNPackSettings *>(VT_XNNPACK_SETTINGS);
1467 }
cpu_settingsFLATBUFFERS_FINAL_CLASS1468 const tflite::CPUSettings *cpu_settings() const {
1469 return GetPointer<const tflite::CPUSettings *>(VT_CPU_SETTINGS);
1470 }
max_delegated_partitionsFLATBUFFERS_FINAL_CLASS1471 int32_t max_delegated_partitions() const {
1472 return GetField<int32_t>(VT_MAX_DELEGATED_PARTITIONS, 0);
1473 }
edgetpu_settingsFLATBUFFERS_FINAL_CLASS1474 const tflite::EdgeTpuSettings *edgetpu_settings() const {
1475 return GetPointer<const tflite::EdgeTpuSettings *>(VT_EDGETPU_SETTINGS);
1476 }
coral_settingsFLATBUFFERS_FINAL_CLASS1477 const tflite::CoralSettings *coral_settings() const {
1478 return GetPointer<const tflite::CoralSettings *>(VT_CORAL_SETTINGS);
1479 }
fallback_settingsFLATBUFFERS_FINAL_CLASS1480 const tflite::FallbackSettings *fallback_settings() const {
1481 return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
1482 }
VerifyFLATBUFFERS_FINAL_CLASS1483 bool Verify(flatbuffers::Verifier &verifier) const {
1484 return VerifyTableStart(verifier) &&
1485 VerifyField<int32_t>(verifier, VT_DELEGATE) &&
1486 VerifyOffset(verifier, VT_NNAPI_SETTINGS) &&
1487 verifier.VerifyTable(nnapi_settings()) &&
1488 VerifyOffset(verifier, VT_GPU_SETTINGS) &&
1489 verifier.VerifyTable(gpu_settings()) &&
1490 VerifyOffset(verifier, VT_HEXAGON_SETTINGS) &&
1491 verifier.VerifyTable(hexagon_settings()) &&
1492 VerifyOffset(verifier, VT_XNNPACK_SETTINGS) &&
1493 verifier.VerifyTable(xnnpack_settings()) &&
1494 VerifyOffset(verifier, VT_CPU_SETTINGS) &&
1495 verifier.VerifyTable(cpu_settings()) &&
1496 VerifyField<int32_t>(verifier, VT_MAX_DELEGATED_PARTITIONS) &&
1497 VerifyOffset(verifier, VT_EDGETPU_SETTINGS) &&
1498 verifier.VerifyTable(edgetpu_settings()) &&
1499 VerifyOffset(verifier, VT_CORAL_SETTINGS) &&
1500 verifier.VerifyTable(coral_settings()) &&
1501 VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
1502 verifier.VerifyTable(fallback_settings()) &&
1503 verifier.EndTable();
1504 }
1505 TFLiteSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1506 void UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1507 static flatbuffers::Offset<TFLiteSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1508 };
1509
1510 struct TFLiteSettingsBuilder {
1511 flatbuffers::FlatBufferBuilder &fbb_;
1512 flatbuffers::uoffset_t start_;
add_delegateTFLiteSettingsBuilder1513 void add_delegate(tflite::Delegate delegate) {
1514 fbb_.AddElement<int32_t>(TFLiteSettings::VT_DELEGATE, static_cast<int32_t>(delegate), 0);
1515 }
add_nnapi_settingsTFLiteSettingsBuilder1516 void add_nnapi_settings(flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings) {
1517 fbb_.AddOffset(TFLiteSettings::VT_NNAPI_SETTINGS, nnapi_settings);
1518 }
add_gpu_settingsTFLiteSettingsBuilder1519 void add_gpu_settings(flatbuffers::Offset<tflite::GPUSettings> gpu_settings) {
1520 fbb_.AddOffset(TFLiteSettings::VT_GPU_SETTINGS, gpu_settings);
1521 }
add_hexagon_settingsTFLiteSettingsBuilder1522 void add_hexagon_settings(flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings) {
1523 fbb_.AddOffset(TFLiteSettings::VT_HEXAGON_SETTINGS, hexagon_settings);
1524 }
add_xnnpack_settingsTFLiteSettingsBuilder1525 void add_xnnpack_settings(flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings) {
1526 fbb_.AddOffset(TFLiteSettings::VT_XNNPACK_SETTINGS, xnnpack_settings);
1527 }
add_cpu_settingsTFLiteSettingsBuilder1528 void add_cpu_settings(flatbuffers::Offset<tflite::CPUSettings> cpu_settings) {
1529 fbb_.AddOffset(TFLiteSettings::VT_CPU_SETTINGS, cpu_settings);
1530 }
add_max_delegated_partitionsTFLiteSettingsBuilder1531 void add_max_delegated_partitions(int32_t max_delegated_partitions) {
1532 fbb_.AddElement<int32_t>(TFLiteSettings::VT_MAX_DELEGATED_PARTITIONS, max_delegated_partitions, 0);
1533 }
add_edgetpu_settingsTFLiteSettingsBuilder1534 void add_edgetpu_settings(flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings) {
1535 fbb_.AddOffset(TFLiteSettings::VT_EDGETPU_SETTINGS, edgetpu_settings);
1536 }
add_coral_settingsTFLiteSettingsBuilder1537 void add_coral_settings(flatbuffers::Offset<tflite::CoralSettings> coral_settings) {
1538 fbb_.AddOffset(TFLiteSettings::VT_CORAL_SETTINGS, coral_settings);
1539 }
add_fallback_settingsTFLiteSettingsBuilder1540 void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
1541 fbb_.AddOffset(TFLiteSettings::VT_FALLBACK_SETTINGS, fallback_settings);
1542 }
TFLiteSettingsBuilderTFLiteSettingsBuilder1543 explicit TFLiteSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1544 : fbb_(_fbb) {
1545 start_ = fbb_.StartTable();
1546 }
1547 TFLiteSettingsBuilder &operator=(const TFLiteSettingsBuilder &);
FinishTFLiteSettingsBuilder1548 flatbuffers::Offset<TFLiteSettings> Finish() {
1549 const auto end = fbb_.EndTable(start_);
1550 auto o = flatbuffers::Offset<TFLiteSettings>(end);
1551 return o;
1552 }
1553 };
1554
1555 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(
1556 flatbuffers::FlatBufferBuilder &_fbb,
1557 tflite::Delegate delegate = tflite::Delegate_NONE,
1558 flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings = 0,
1559 flatbuffers::Offset<tflite::GPUSettings> gpu_settings = 0,
1560 flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings = 0,
1561 flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings = 0,
1562 flatbuffers::Offset<tflite::CPUSettings> cpu_settings = 0,
1563 int32_t max_delegated_partitions = 0,
1564 flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings = 0,
1565 flatbuffers::Offset<tflite::CoralSettings> coral_settings = 0,
1566 flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0) {
1567 TFLiteSettingsBuilder builder_(_fbb);
1568 builder_.add_fallback_settings(fallback_settings);
1569 builder_.add_coral_settings(coral_settings);
1570 builder_.add_edgetpu_settings(edgetpu_settings);
1571 builder_.add_max_delegated_partitions(max_delegated_partitions);
1572 builder_.add_cpu_settings(cpu_settings);
1573 builder_.add_xnnpack_settings(xnnpack_settings);
1574 builder_.add_hexagon_settings(hexagon_settings);
1575 builder_.add_gpu_settings(gpu_settings);
1576 builder_.add_nnapi_settings(nnapi_settings);
1577 builder_.add_delegate(delegate);
1578 return builder_.Finish();
1579 }
1580
1581 flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1582
1583 struct FallbackSettingsT : public flatbuffers::NativeTable {
1584 typedef FallbackSettings TableType;
1585 bool allow_automatic_fallback_on_compilation_error;
1586 bool allow_automatic_fallback_on_execution_error;
FallbackSettingsTFallbackSettingsT1587 FallbackSettingsT()
1588 : allow_automatic_fallback_on_compilation_error(false),
1589 allow_automatic_fallback_on_execution_error(false) {
1590 }
1591 };
1592
1593 struct FallbackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1594 typedef FallbackSettingsT NativeTableType;
1595 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1596 VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR = 4,
1597 VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR = 6
1598 };
allow_automatic_fallback_on_compilation_errorFLATBUFFERS_FINAL_CLASS1599 bool allow_automatic_fallback_on_compilation_error() const {
1600 return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, 0) != 0;
1601 }
allow_automatic_fallback_on_execution_errorFLATBUFFERS_FINAL_CLASS1602 bool allow_automatic_fallback_on_execution_error() const {
1603 return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, 0) != 0;
1604 }
VerifyFLATBUFFERS_FINAL_CLASS1605 bool Verify(flatbuffers::Verifier &verifier) const {
1606 return VerifyTableStart(verifier) &&
1607 VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR) &&
1608 VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR) &&
1609 verifier.EndTable();
1610 }
1611 FallbackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1612 void UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1613 static flatbuffers::Offset<FallbackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1614 };
1615
1616 struct FallbackSettingsBuilder {
1617 flatbuffers::FlatBufferBuilder &fbb_;
1618 flatbuffers::uoffset_t start_;
add_allow_automatic_fallback_on_compilation_errorFallbackSettingsBuilder1619 void add_allow_automatic_fallback_on_compilation_error(bool allow_automatic_fallback_on_compilation_error) {
1620 fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_compilation_error), 0);
1621 }
add_allow_automatic_fallback_on_execution_errorFallbackSettingsBuilder1622 void add_allow_automatic_fallback_on_execution_error(bool allow_automatic_fallback_on_execution_error) {
1623 fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_execution_error), 0);
1624 }
FallbackSettingsBuilderFallbackSettingsBuilder1625 explicit FallbackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1626 : fbb_(_fbb) {
1627 start_ = fbb_.StartTable();
1628 }
1629 FallbackSettingsBuilder &operator=(const FallbackSettingsBuilder &);
FinishFallbackSettingsBuilder1630 flatbuffers::Offset<FallbackSettings> Finish() {
1631 const auto end = fbb_.EndTable(start_);
1632 auto o = flatbuffers::Offset<FallbackSettings>(end);
1633 return o;
1634 }
1635 };
1636
1637 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(
1638 flatbuffers::FlatBufferBuilder &_fbb,
1639 bool allow_automatic_fallback_on_compilation_error = false,
1640 bool allow_automatic_fallback_on_execution_error = false) {
1641 FallbackSettingsBuilder builder_(_fbb);
1642 builder_.add_allow_automatic_fallback_on_execution_error(allow_automatic_fallback_on_execution_error);
1643 builder_.add_allow_automatic_fallback_on_compilation_error(allow_automatic_fallback_on_compilation_error);
1644 return builder_.Finish();
1645 }
1646
1647 flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1648
1649 struct BenchmarkMetricT : public flatbuffers::NativeTable {
1650 typedef BenchmarkMetric TableType;
1651 std::string name;
1652 std::vector<float> values;
BenchmarkMetricTBenchmarkMetricT1653 BenchmarkMetricT() {
1654 }
1655 };
1656
1657 struct BenchmarkMetric FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1658 typedef BenchmarkMetricT NativeTableType;
1659 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1660 VT_NAME = 4,
1661 VT_VALUES = 6
1662 };
nameFLATBUFFERS_FINAL_CLASS1663 const flatbuffers::String *name() const {
1664 return GetPointer<const flatbuffers::String *>(VT_NAME);
1665 }
valuesFLATBUFFERS_FINAL_CLASS1666 const flatbuffers::Vector<float> *values() const {
1667 return GetPointer<const flatbuffers::Vector<float> *>(VT_VALUES);
1668 }
VerifyFLATBUFFERS_FINAL_CLASS1669 bool Verify(flatbuffers::Verifier &verifier) const {
1670 return VerifyTableStart(verifier) &&
1671 VerifyOffset(verifier, VT_NAME) &&
1672 verifier.VerifyString(name()) &&
1673 VerifyOffset(verifier, VT_VALUES) &&
1674 verifier.VerifyVector(values()) &&
1675 verifier.EndTable();
1676 }
1677 BenchmarkMetricT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1678 void UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1679 static flatbuffers::Offset<BenchmarkMetric> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1680 };
1681
1682 struct BenchmarkMetricBuilder {
1683 flatbuffers::FlatBufferBuilder &fbb_;
1684 flatbuffers::uoffset_t start_;
add_nameBenchmarkMetricBuilder1685 void add_name(flatbuffers::Offset<flatbuffers::String> name) {
1686 fbb_.AddOffset(BenchmarkMetric::VT_NAME, name);
1687 }
add_valuesBenchmarkMetricBuilder1688 void add_values(flatbuffers::Offset<flatbuffers::Vector<float>> values) {
1689 fbb_.AddOffset(BenchmarkMetric::VT_VALUES, values);
1690 }
BenchmarkMetricBuilderBenchmarkMetricBuilder1691 explicit BenchmarkMetricBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1692 : fbb_(_fbb) {
1693 start_ = fbb_.StartTable();
1694 }
1695 BenchmarkMetricBuilder &operator=(const BenchmarkMetricBuilder &);
FinishBenchmarkMetricBuilder1696 flatbuffers::Offset<BenchmarkMetric> Finish() {
1697 const auto end = fbb_.EndTable(start_);
1698 auto o = flatbuffers::Offset<BenchmarkMetric>(end);
1699 return o;
1700 }
1701 };
1702
1703 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(
1704 flatbuffers::FlatBufferBuilder &_fbb,
1705 flatbuffers::Offset<flatbuffers::String> name = 0,
1706 flatbuffers::Offset<flatbuffers::Vector<float>> values = 0) {
1707 BenchmarkMetricBuilder builder_(_fbb);
1708 builder_.add_values(values);
1709 builder_.add_name(name);
1710 return builder_.Finish();
1711 }
1712
1713 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetricDirect(
1714 flatbuffers::FlatBufferBuilder &_fbb,
1715 const char *name = nullptr,
1716 const std::vector<float> *values = nullptr) {
1717 auto name__ = name ? _fbb.CreateString(name) : 0;
1718 auto values__ = values ? _fbb.CreateVector<float>(*values) : 0;
1719 return tflite::CreateBenchmarkMetric(
1720 _fbb,
1721 name__,
1722 values__);
1723 }
1724
1725 flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1726
1727 struct BenchmarkResultT : public flatbuffers::NativeTable {
1728 typedef BenchmarkResult TableType;
1729 std::vector<int64_t> initialization_time_us;
1730 std::vector<int64_t> inference_time_us;
1731 int32_t max_memory_kb;
1732 bool ok;
1733 std::vector<std::unique_ptr<tflite::BenchmarkMetricT>> metrics;
BenchmarkResultTBenchmarkResultT1734 BenchmarkResultT()
1735 : max_memory_kb(0),
1736 ok(false) {
1737 }
1738 };
1739
1740 struct BenchmarkResult FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1741 typedef BenchmarkResultT NativeTableType;
1742 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1743 VT_INITIALIZATION_TIME_US = 4,
1744 VT_INFERENCE_TIME_US = 6,
1745 VT_MAX_MEMORY_KB = 8,
1746 VT_OK = 10,
1747 VT_METRICS = 12
1748 };
initialization_time_usFLATBUFFERS_FINAL_CLASS1749 const flatbuffers::Vector<int64_t> *initialization_time_us() const {
1750 return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INITIALIZATION_TIME_US);
1751 }
inference_time_usFLATBUFFERS_FINAL_CLASS1752 const flatbuffers::Vector<int64_t> *inference_time_us() const {
1753 return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INFERENCE_TIME_US);
1754 }
max_memory_kbFLATBUFFERS_FINAL_CLASS1755 int32_t max_memory_kb() const {
1756 return GetField<int32_t>(VT_MAX_MEMORY_KB, 0);
1757 }
okFLATBUFFERS_FINAL_CLASS1758 bool ok() const {
1759 return GetField<uint8_t>(VT_OK, 0) != 0;
1760 }
metricsFLATBUFFERS_FINAL_CLASS1761 const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics() const {
1762 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *>(VT_METRICS);
1763 }
VerifyFLATBUFFERS_FINAL_CLASS1764 bool Verify(flatbuffers::Verifier &verifier) const {
1765 return VerifyTableStart(verifier) &&
1766 VerifyOffset(verifier, VT_INITIALIZATION_TIME_US) &&
1767 verifier.VerifyVector(initialization_time_us()) &&
1768 VerifyOffset(verifier, VT_INFERENCE_TIME_US) &&
1769 verifier.VerifyVector(inference_time_us()) &&
1770 VerifyField<int32_t>(verifier, VT_MAX_MEMORY_KB) &&
1771 VerifyField<uint8_t>(verifier, VT_OK) &&
1772 VerifyOffset(verifier, VT_METRICS) &&
1773 verifier.VerifyVector(metrics()) &&
1774 verifier.VerifyVectorOfTables(metrics()) &&
1775 verifier.EndTable();
1776 }
1777 BenchmarkResultT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1778 void UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1779 static flatbuffers::Offset<BenchmarkResult> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1780 };
1781
1782 struct BenchmarkResultBuilder {
1783 flatbuffers::FlatBufferBuilder &fbb_;
1784 flatbuffers::uoffset_t start_;
add_initialization_time_usBenchmarkResultBuilder1785 void add_initialization_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us) {
1786 fbb_.AddOffset(BenchmarkResult::VT_INITIALIZATION_TIME_US, initialization_time_us);
1787 }
add_inference_time_usBenchmarkResultBuilder1788 void add_inference_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us) {
1789 fbb_.AddOffset(BenchmarkResult::VT_INFERENCE_TIME_US, inference_time_us);
1790 }
add_max_memory_kbBenchmarkResultBuilder1791 void add_max_memory_kb(int32_t max_memory_kb) {
1792 fbb_.AddElement<int32_t>(BenchmarkResult::VT_MAX_MEMORY_KB, max_memory_kb, 0);
1793 }
add_okBenchmarkResultBuilder1794 void add_ok(bool ok) {
1795 fbb_.AddElement<uint8_t>(BenchmarkResult::VT_OK, static_cast<uint8_t>(ok), 0);
1796 }
add_metricsBenchmarkResultBuilder1797 void add_metrics(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics) {
1798 fbb_.AddOffset(BenchmarkResult::VT_METRICS, metrics);
1799 }
BenchmarkResultBuilderBenchmarkResultBuilder1800 explicit BenchmarkResultBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1801 : fbb_(_fbb) {
1802 start_ = fbb_.StartTable();
1803 }
1804 BenchmarkResultBuilder &operator=(const BenchmarkResultBuilder &);
FinishBenchmarkResultBuilder1805 flatbuffers::Offset<BenchmarkResult> Finish() {
1806 const auto end = fbb_.EndTable(start_);
1807 auto o = flatbuffers::Offset<BenchmarkResult>(end);
1808 return o;
1809 }
1810 };
1811
1812 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(
1813 flatbuffers::FlatBufferBuilder &_fbb,
1814 flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us = 0,
1815 flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us = 0,
1816 int32_t max_memory_kb = 0,
1817 bool ok = false,
1818 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics = 0) {
1819 BenchmarkResultBuilder builder_(_fbb);
1820 builder_.add_metrics(metrics);
1821 builder_.add_max_memory_kb(max_memory_kb);
1822 builder_.add_inference_time_us(inference_time_us);
1823 builder_.add_initialization_time_us(initialization_time_us);
1824 builder_.add_ok(ok);
1825 return builder_.Finish();
1826 }
1827
1828 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResultDirect(
1829 flatbuffers::FlatBufferBuilder &_fbb,
1830 const std::vector<int64_t> *initialization_time_us = nullptr,
1831 const std::vector<int64_t> *inference_time_us = nullptr,
1832 int32_t max_memory_kb = 0,
1833 bool ok = false,
1834 const std::vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics = nullptr) {
1835 auto initialization_time_us__ = initialization_time_us ? _fbb.CreateVector<int64_t>(*initialization_time_us) : 0;
1836 auto inference_time_us__ = inference_time_us ? _fbb.CreateVector<int64_t>(*inference_time_us) : 0;
1837 auto metrics__ = metrics ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>>(*metrics) : 0;
1838 return tflite::CreateBenchmarkResult(
1839 _fbb,
1840 initialization_time_us__,
1841 inference_time_us__,
1842 max_memory_kb,
1843 ok,
1844 metrics__);
1845 }
1846
1847 flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1848
1849 struct ErrorCodeT : public flatbuffers::NativeTable {
1850 typedef ErrorCode TableType;
1851 tflite::Delegate source;
1852 int32_t tflite_error;
1853 int64_t underlying_api_error;
ErrorCodeTErrorCodeT1854 ErrorCodeT()
1855 : source(tflite::Delegate_NONE),
1856 tflite_error(0),
1857 underlying_api_error(0) {
1858 }
1859 };
1860
1861 struct ErrorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1862 typedef ErrorCodeT NativeTableType;
1863 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1864 VT_SOURCE = 4,
1865 VT_TFLITE_ERROR = 6,
1866 VT_UNDERLYING_API_ERROR = 8
1867 };
sourceFLATBUFFERS_FINAL_CLASS1868 tflite::Delegate source() const {
1869 return static_cast<tflite::Delegate>(GetField<int32_t>(VT_SOURCE, 0));
1870 }
tflite_errorFLATBUFFERS_FINAL_CLASS1871 int32_t tflite_error() const {
1872 return GetField<int32_t>(VT_TFLITE_ERROR, 0);
1873 }
underlying_api_errorFLATBUFFERS_FINAL_CLASS1874 int64_t underlying_api_error() const {
1875 return GetField<int64_t>(VT_UNDERLYING_API_ERROR, 0);
1876 }
VerifyFLATBUFFERS_FINAL_CLASS1877 bool Verify(flatbuffers::Verifier &verifier) const {
1878 return VerifyTableStart(verifier) &&
1879 VerifyField<int32_t>(verifier, VT_SOURCE) &&
1880 VerifyField<int32_t>(verifier, VT_TFLITE_ERROR) &&
1881 VerifyField<int64_t>(verifier, VT_UNDERLYING_API_ERROR) &&
1882 verifier.EndTable();
1883 }
1884 ErrorCodeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1885 void UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1886 static flatbuffers::Offset<ErrorCode> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1887 };
1888
1889 struct ErrorCodeBuilder {
1890 flatbuffers::FlatBufferBuilder &fbb_;
1891 flatbuffers::uoffset_t start_;
add_sourceErrorCodeBuilder1892 void add_source(tflite::Delegate source) {
1893 fbb_.AddElement<int32_t>(ErrorCode::VT_SOURCE, static_cast<int32_t>(source), 0);
1894 }
add_tflite_errorErrorCodeBuilder1895 void add_tflite_error(int32_t tflite_error) {
1896 fbb_.AddElement<int32_t>(ErrorCode::VT_TFLITE_ERROR, tflite_error, 0);
1897 }
add_underlying_api_errorErrorCodeBuilder1898 void add_underlying_api_error(int64_t underlying_api_error) {
1899 fbb_.AddElement<int64_t>(ErrorCode::VT_UNDERLYING_API_ERROR, underlying_api_error, 0);
1900 }
ErrorCodeBuilderErrorCodeBuilder1901 explicit ErrorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1902 : fbb_(_fbb) {
1903 start_ = fbb_.StartTable();
1904 }
1905 ErrorCodeBuilder &operator=(const ErrorCodeBuilder &);
FinishErrorCodeBuilder1906 flatbuffers::Offset<ErrorCode> Finish() {
1907 const auto end = fbb_.EndTable(start_);
1908 auto o = flatbuffers::Offset<ErrorCode>(end);
1909 return o;
1910 }
1911 };
1912
1913 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(
1914 flatbuffers::FlatBufferBuilder &_fbb,
1915 tflite::Delegate source = tflite::Delegate_NONE,
1916 int32_t tflite_error = 0,
1917 int64_t underlying_api_error = 0) {
1918 ErrorCodeBuilder builder_(_fbb);
1919 builder_.add_underlying_api_error(underlying_api_error);
1920 builder_.add_tflite_error(tflite_error);
1921 builder_.add_source(source);
1922 return builder_.Finish();
1923 }
1924
1925 flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1926
1927 struct BenchmarkErrorT : public flatbuffers::NativeTable {
1928 typedef BenchmarkError TableType;
1929 tflite::BenchmarkStage stage;
1930 int32_t exit_code;
1931 int32_t signal;
1932 std::vector<std::unique_ptr<tflite::ErrorCodeT>> error_code;
BenchmarkErrorTBenchmarkErrorT1933 BenchmarkErrorT()
1934 : stage(tflite::BenchmarkStage_UNKNOWN),
1935 exit_code(0),
1936 signal(0) {
1937 }
1938 };
1939
1940 struct BenchmarkError FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1941 typedef BenchmarkErrorT NativeTableType;
1942 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1943 VT_STAGE = 4,
1944 VT_EXIT_CODE = 6,
1945 VT_SIGNAL = 8,
1946 VT_ERROR_CODE = 10
1947 };
stageFLATBUFFERS_FINAL_CLASS1948 tflite::BenchmarkStage stage() const {
1949 return static_cast<tflite::BenchmarkStage>(GetField<int32_t>(VT_STAGE, 0));
1950 }
exit_codeFLATBUFFERS_FINAL_CLASS1951 int32_t exit_code() const {
1952 return GetField<int32_t>(VT_EXIT_CODE, 0);
1953 }
signalFLATBUFFERS_FINAL_CLASS1954 int32_t signal() const {
1955 return GetField<int32_t>(VT_SIGNAL, 0);
1956 }
error_codeFLATBUFFERS_FINAL_CLASS1957 const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code() const {
1958 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *>(VT_ERROR_CODE);
1959 }
VerifyFLATBUFFERS_FINAL_CLASS1960 bool Verify(flatbuffers::Verifier &verifier) const {
1961 return VerifyTableStart(verifier) &&
1962 VerifyField<int32_t>(verifier, VT_STAGE) &&
1963 VerifyField<int32_t>(verifier, VT_EXIT_CODE) &&
1964 VerifyField<int32_t>(verifier, VT_SIGNAL) &&
1965 VerifyOffset(verifier, VT_ERROR_CODE) &&
1966 verifier.VerifyVector(error_code()) &&
1967 verifier.VerifyVectorOfTables(error_code()) &&
1968 verifier.EndTable();
1969 }
1970 BenchmarkErrorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1971 void UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1972 static flatbuffers::Offset<BenchmarkError> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1973 };
1974
1975 struct BenchmarkErrorBuilder {
1976 flatbuffers::FlatBufferBuilder &fbb_;
1977 flatbuffers::uoffset_t start_;
add_stageBenchmarkErrorBuilder1978 void add_stage(tflite::BenchmarkStage stage) {
1979 fbb_.AddElement<int32_t>(BenchmarkError::VT_STAGE, static_cast<int32_t>(stage), 0);
1980 }
add_exit_codeBenchmarkErrorBuilder1981 void add_exit_code(int32_t exit_code) {
1982 fbb_.AddElement<int32_t>(BenchmarkError::VT_EXIT_CODE, exit_code, 0);
1983 }
add_signalBenchmarkErrorBuilder1984 void add_signal(int32_t signal) {
1985 fbb_.AddElement<int32_t>(BenchmarkError::VT_SIGNAL, signal, 0);
1986 }
add_error_codeBenchmarkErrorBuilder1987 void add_error_code(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code) {
1988 fbb_.AddOffset(BenchmarkError::VT_ERROR_CODE, error_code);
1989 }
BenchmarkErrorBuilderBenchmarkErrorBuilder1990 explicit BenchmarkErrorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1991 : fbb_(_fbb) {
1992 start_ = fbb_.StartTable();
1993 }
1994 BenchmarkErrorBuilder &operator=(const BenchmarkErrorBuilder &);
FinishBenchmarkErrorBuilder1995 flatbuffers::Offset<BenchmarkError> Finish() {
1996 const auto end = fbb_.EndTable(start_);
1997 auto o = flatbuffers::Offset<BenchmarkError>(end);
1998 return o;
1999 }
2000 };
2001
2002 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(
2003 flatbuffers::FlatBufferBuilder &_fbb,
2004 tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2005 int32_t exit_code = 0,
2006 int32_t signal = 0,
2007 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code = 0) {
2008 BenchmarkErrorBuilder builder_(_fbb);
2009 builder_.add_error_code(error_code);
2010 builder_.add_signal(signal);
2011 builder_.add_exit_code(exit_code);
2012 builder_.add_stage(stage);
2013 return builder_.Finish();
2014 }
2015
2016 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkErrorDirect(
2017 flatbuffers::FlatBufferBuilder &_fbb,
2018 tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2019 int32_t exit_code = 0,
2020 int32_t signal = 0,
2021 const std::vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code = nullptr) {
2022 auto error_code__ = error_code ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>>(*error_code) : 0;
2023 return tflite::CreateBenchmarkError(
2024 _fbb,
2025 stage,
2026 exit_code,
2027 signal,
2028 error_code__);
2029 }
2030
2031 flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2032
2033 struct BenchmarkEventT : public flatbuffers::NativeTable {
2034 typedef BenchmarkEvent TableType;
2035 std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings;
2036 tflite::BenchmarkEventType event_type;
2037 std::unique_ptr<tflite::BenchmarkResultT> result;
2038 std::unique_ptr<tflite::BenchmarkErrorT> error;
BenchmarkEventTBenchmarkEventT2039 BenchmarkEventT()
2040 : event_type(tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE) {
2041 }
2042 };
2043
2044 struct BenchmarkEvent FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2045 typedef BenchmarkEventT NativeTableType;
2046 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2047 VT_TFLITE_SETTINGS = 4,
2048 VT_EVENT_TYPE = 6,
2049 VT_RESULT = 8,
2050 VT_ERROR = 10
2051 };
tflite_settingsFLATBUFFERS_FINAL_CLASS2052 const tflite::TFLiteSettings *tflite_settings() const {
2053 return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
2054 }
event_typeFLATBUFFERS_FINAL_CLASS2055 tflite::BenchmarkEventType event_type() const {
2056 return static_cast<tflite::BenchmarkEventType>(GetField<int32_t>(VT_EVENT_TYPE, 0));
2057 }
resultFLATBUFFERS_FINAL_CLASS2058 const tflite::BenchmarkResult *result() const {
2059 return GetPointer<const tflite::BenchmarkResult *>(VT_RESULT);
2060 }
errorFLATBUFFERS_FINAL_CLASS2061 const tflite::BenchmarkError *error() const {
2062 return GetPointer<const tflite::BenchmarkError *>(VT_ERROR);
2063 }
VerifyFLATBUFFERS_FINAL_CLASS2064 bool Verify(flatbuffers::Verifier &verifier) const {
2065 return VerifyTableStart(verifier) &&
2066 VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
2067 verifier.VerifyTable(tflite_settings()) &&
2068 VerifyField<int32_t>(verifier, VT_EVENT_TYPE) &&
2069 VerifyOffset(verifier, VT_RESULT) &&
2070 verifier.VerifyTable(result()) &&
2071 VerifyOffset(verifier, VT_ERROR) &&
2072 verifier.VerifyTable(error()) &&
2073 verifier.EndTable();
2074 }
2075 BenchmarkEventT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2076 void UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2077 static flatbuffers::Offset<BenchmarkEvent> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2078 };
2079
2080 struct BenchmarkEventBuilder {
2081 flatbuffers::FlatBufferBuilder &fbb_;
2082 flatbuffers::uoffset_t start_;
add_tflite_settingsBenchmarkEventBuilder2083 void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
2084 fbb_.AddOffset(BenchmarkEvent::VT_TFLITE_SETTINGS, tflite_settings);
2085 }
add_event_typeBenchmarkEventBuilder2086 void add_event_type(tflite::BenchmarkEventType event_type) {
2087 fbb_.AddElement<int32_t>(BenchmarkEvent::VT_EVENT_TYPE, static_cast<int32_t>(event_type), 0);
2088 }
add_resultBenchmarkEventBuilder2089 void add_result(flatbuffers::Offset<tflite::BenchmarkResult> result) {
2090 fbb_.AddOffset(BenchmarkEvent::VT_RESULT, result);
2091 }
add_errorBenchmarkEventBuilder2092 void add_error(flatbuffers::Offset<tflite::BenchmarkError> error) {
2093 fbb_.AddOffset(BenchmarkEvent::VT_ERROR, error);
2094 }
BenchmarkEventBuilderBenchmarkEventBuilder2095 explicit BenchmarkEventBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2096 : fbb_(_fbb) {
2097 start_ = fbb_.StartTable();
2098 }
2099 BenchmarkEventBuilder &operator=(const BenchmarkEventBuilder &);
FinishBenchmarkEventBuilder2100 flatbuffers::Offset<BenchmarkEvent> Finish() {
2101 const auto end = fbb_.EndTable(start_);
2102 auto o = flatbuffers::Offset<BenchmarkEvent>(end);
2103 return o;
2104 }
2105 };
2106
2107 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(
2108 flatbuffers::FlatBufferBuilder &_fbb,
2109 flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
2110 tflite::BenchmarkEventType event_type = tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
2111 flatbuffers::Offset<tflite::BenchmarkResult> result = 0,
2112 flatbuffers::Offset<tflite::BenchmarkError> error = 0) {
2113 BenchmarkEventBuilder builder_(_fbb);
2114 builder_.add_error(error);
2115 builder_.add_result(result);
2116 builder_.add_event_type(event_type);
2117 builder_.add_tflite_settings(tflite_settings);
2118 return builder_.Finish();
2119 }
2120
2121 flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2122
UnPack(const flatbuffers::resolver_function_t * _resolver)2123 inline ComputeSettingsT *ComputeSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2124 auto _o = new ComputeSettingsT();
2125 UnPackTo(_o, _resolver);
2126 return _o;
2127 }
2128
UnPackTo(ComputeSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2129 inline void ComputeSettings::UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2130 (void)_o;
2131 (void)_resolver;
2132 { auto _e = preference(); _o->preference = _e; }
2133 { auto _e = tflite_settings(); if (_e) _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); }
2134 { auto _e = model_namespace_for_statistics(); if (_e) _o->model_namespace_for_statistics = _e->str(); }
2135 { auto _e = model_identifier_for_statistics(); if (_e) _o->model_identifier_for_statistics = _e->str(); }
2136 }
2137
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2138 inline flatbuffers::Offset<ComputeSettings> ComputeSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2139 return CreateComputeSettings(_fbb, _o, _rehasher);
2140 }
2141
CreateComputeSettings(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2142 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2143 (void)_rehasher;
2144 (void)_o;
2145 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ComputeSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2146 auto _preference = _o->preference;
2147 auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
2148 auto _model_namespace_for_statistics = _o->model_namespace_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_namespace_for_statistics);
2149 auto _model_identifier_for_statistics = _o->model_identifier_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_identifier_for_statistics);
2150 return tflite::CreateComputeSettings(
2151 _fbb,
2152 _preference,
2153 _tflite_settings,
2154 _model_namespace_for_statistics,
2155 _model_identifier_for_statistics);
2156 }
2157
UnPack(const flatbuffers::resolver_function_t * _resolver)2158 inline NNAPISettingsT *NNAPISettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2159 auto _o = new NNAPISettingsT();
2160 UnPackTo(_o, _resolver);
2161 return _o;
2162 }
2163
UnPackTo(NNAPISettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2164 inline void NNAPISettings::UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2165 (void)_o;
2166 (void)_resolver;
2167 { auto _e = accelerator_name(); if (_e) _o->accelerator_name = _e->str(); }
2168 { auto _e = cache_directory(); if (_e) _o->cache_directory = _e->str(); }
2169 { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
2170 { auto _e = execution_preference(); _o->execution_preference = _e; }
2171 { auto _e = no_of_nnapi_instances_to_cache(); _o->no_of_nnapi_instances_to_cache = _e; }
2172 { auto _e = fallback_settings(); if (_e) _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); }
2173 { auto _e = allow_nnapi_cpu_on_android_10_plus(); _o->allow_nnapi_cpu_on_android_10_plus = _e; }
2174 { auto _e = execution_priority(); _o->execution_priority = _e; }
2175 { auto _e = allow_dynamic_dimensions(); _o->allow_dynamic_dimensions = _e; }
2176 { auto _e = allow_fp16_precision_for_fp32(); _o->allow_fp16_precision_for_fp32 = _e; }
2177 }
2178
Pack(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2179 inline flatbuffers::Offset<NNAPISettings> NNAPISettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2180 return CreateNNAPISettings(_fbb, _o, _rehasher);
2181 }
2182
CreateNNAPISettings(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2183 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2184 (void)_rehasher;
2185 (void)_o;
2186 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NNAPISettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2187 auto _accelerator_name = _o->accelerator_name.empty() ? 0 : _fbb.CreateString(_o->accelerator_name);
2188 auto _cache_directory = _o->cache_directory.empty() ? 0 : _fbb.CreateString(_o->cache_directory);
2189 auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
2190 auto _execution_preference = _o->execution_preference;
2191 auto _no_of_nnapi_instances_to_cache = _o->no_of_nnapi_instances_to_cache;
2192 auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
2193 auto _allow_nnapi_cpu_on_android_10_plus = _o->allow_nnapi_cpu_on_android_10_plus;
2194 auto _execution_priority = _o->execution_priority;
2195 auto _allow_dynamic_dimensions = _o->allow_dynamic_dimensions;
2196 auto _allow_fp16_precision_for_fp32 = _o->allow_fp16_precision_for_fp32;
2197 return tflite::CreateNNAPISettings(
2198 _fbb,
2199 _accelerator_name,
2200 _cache_directory,
2201 _model_token,
2202 _execution_preference,
2203 _no_of_nnapi_instances_to_cache,
2204 _fallback_settings,
2205 _allow_nnapi_cpu_on_android_10_plus,
2206 _execution_priority,
2207 _allow_dynamic_dimensions,
2208 _allow_fp16_precision_for_fp32);
2209 }
2210
UnPack(const flatbuffers::resolver_function_t * _resolver)2211 inline GPUSettingsT *GPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2212 auto _o = new GPUSettingsT();
2213 UnPackTo(_o, _resolver);
2214 return _o;
2215 }
2216
UnPackTo(GPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2217 inline void GPUSettings::UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2218 (void)_o;
2219 (void)_resolver;
2220 { auto _e = is_precision_loss_allowed(); _o->is_precision_loss_allowed = _e; }
2221 { auto _e = enable_quantized_inference(); _o->enable_quantized_inference = _e; }
2222 { auto _e = force_backend(); _o->force_backend = _e; }
2223 }
2224
Pack(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2225 inline flatbuffers::Offset<GPUSettings> GPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2226 return CreateGPUSettings(_fbb, _o, _rehasher);
2227 }
2228
CreateGPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2229 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2230 (void)_rehasher;
2231 (void)_o;
2232 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2233 auto _is_precision_loss_allowed = _o->is_precision_loss_allowed;
2234 auto _enable_quantized_inference = _o->enable_quantized_inference;
2235 auto _force_backend = _o->force_backend;
2236 return tflite::CreateGPUSettings(
2237 _fbb,
2238 _is_precision_loss_allowed,
2239 _enable_quantized_inference,
2240 _force_backend);
2241 }
2242
UnPack(const flatbuffers::resolver_function_t * _resolver)2243 inline HexagonSettingsT *HexagonSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2244 auto _o = new HexagonSettingsT();
2245 UnPackTo(_o, _resolver);
2246 return _o;
2247 }
2248
UnPackTo(HexagonSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2249 inline void HexagonSettings::UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2250 (void)_o;
2251 (void)_resolver;
2252 { auto _e = debug_level(); _o->debug_level = _e; }
2253 { auto _e = powersave_level(); _o->powersave_level = _e; }
2254 { auto _e = print_graph_profile(); _o->print_graph_profile = _e; }
2255 { auto _e = print_graph_debug(); _o->print_graph_debug = _e; }
2256 }
2257
Pack(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2258 inline flatbuffers::Offset<HexagonSettings> HexagonSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2259 return CreateHexagonSettings(_fbb, _o, _rehasher);
2260 }
2261
CreateHexagonSettings(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2262 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2263 (void)_rehasher;
2264 (void)_o;
2265 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HexagonSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2266 auto _debug_level = _o->debug_level;
2267 auto _powersave_level = _o->powersave_level;
2268 auto _print_graph_profile = _o->print_graph_profile;
2269 auto _print_graph_debug = _o->print_graph_debug;
2270 return tflite::CreateHexagonSettings(
2271 _fbb,
2272 _debug_level,
2273 _powersave_level,
2274 _print_graph_profile,
2275 _print_graph_debug);
2276 }
2277
UnPack(const flatbuffers::resolver_function_t * _resolver)2278 inline XNNPackSettingsT *XNNPackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2279 auto _o = new XNNPackSettingsT();
2280 UnPackTo(_o, _resolver);
2281 return _o;
2282 }
2283
UnPackTo(XNNPackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2284 inline void XNNPackSettings::UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2285 (void)_o;
2286 (void)_resolver;
2287 { auto _e = num_threads(); _o->num_threads = _e; }
2288 }
2289
Pack(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2290 inline flatbuffers::Offset<XNNPackSettings> XNNPackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2291 return CreateXNNPackSettings(_fbb, _o, _rehasher);
2292 }
2293
CreateXNNPackSettings(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2294 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2295 (void)_rehasher;
2296 (void)_o;
2297 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const XNNPackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2298 auto _num_threads = _o->num_threads;
2299 return tflite::CreateXNNPackSettings(
2300 _fbb,
2301 _num_threads);
2302 }
2303
UnPack(const flatbuffers::resolver_function_t * _resolver)2304 inline EdgeTpuDeviceSpecT *EdgeTpuDeviceSpec::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2305 auto _o = new EdgeTpuDeviceSpecT();
2306 UnPackTo(_o, _resolver);
2307 return _o;
2308 }
2309
UnPackTo(EdgeTpuDeviceSpecT * _o,const flatbuffers::resolver_function_t * _resolver)2310 inline void EdgeTpuDeviceSpec::UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2311 (void)_o;
2312 (void)_resolver;
2313 { auto _e = platform_type(); _o->platform_type = _e; }
2314 { auto _e = num_chips(); _o->num_chips = _e; }
2315 { auto _e = device_paths(); if (_e) { _o->device_paths.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->device_paths[_i] = _e->Get(_i)->str(); } } }
2316 { auto _e = chip_family(); _o->chip_family = _e; }
2317 }
2318
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)2319 inline flatbuffers::Offset<EdgeTpuDeviceSpec> EdgeTpuDeviceSpec::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2320 return CreateEdgeTpuDeviceSpec(_fbb, _o, _rehasher);
2321 }
2322
CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)2323 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2324 (void)_rehasher;
2325 (void)_o;
2326 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuDeviceSpecT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2327 auto _platform_type = _o->platform_type;
2328 auto _num_chips = _o->num_chips;
2329 auto _device_paths = _o->device_paths.size() ? _fbb.CreateVectorOfStrings(_o->device_paths) : 0;
2330 auto _chip_family = _o->chip_family;
2331 return tflite::CreateEdgeTpuDeviceSpec(
2332 _fbb,
2333 _platform_type,
2334 _num_chips,
2335 _device_paths,
2336 _chip_family);
2337 }
2338
UnPack(const flatbuffers::resolver_function_t * _resolver)2339 inline EdgeTpuInactivePowerConfigT *EdgeTpuInactivePowerConfig::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2340 auto _o = new EdgeTpuInactivePowerConfigT();
2341 UnPackTo(_o, _resolver);
2342 return _o;
2343 }
2344
UnPackTo(EdgeTpuInactivePowerConfigT * _o,const flatbuffers::resolver_function_t * _resolver)2345 inline void EdgeTpuInactivePowerConfig::UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2346 (void)_o;
2347 (void)_resolver;
2348 { auto _e = inactive_power_state(); _o->inactive_power_state = _e; }
2349 { auto _e = inactive_timeout_us(); _o->inactive_timeout_us = _e; }
2350 }
2351
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)2352 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> EdgeTpuInactivePowerConfig::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2353 return CreateEdgeTpuInactivePowerConfig(_fbb, _o, _rehasher);
2354 }
2355
CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)2356 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2357 (void)_rehasher;
2358 (void)_o;
2359 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuInactivePowerConfigT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2360 auto _inactive_power_state = _o->inactive_power_state;
2361 auto _inactive_timeout_us = _o->inactive_timeout_us;
2362 return tflite::CreateEdgeTpuInactivePowerConfig(
2363 _fbb,
2364 _inactive_power_state,
2365 _inactive_timeout_us);
2366 }
2367
UnPack(const flatbuffers::resolver_function_t * _resolver)2368 inline EdgeTpuSettingsT *EdgeTpuSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2369 auto _o = new EdgeTpuSettingsT();
2370 UnPackTo(_o, _resolver);
2371 return _o;
2372 }
2373
UnPackTo(EdgeTpuSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2374 inline void EdgeTpuSettings::UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2375 (void)_o;
2376 (void)_resolver;
2377 { auto _e = inference_power_state(); _o->inference_power_state = _e; }
2378 { auto _e = inactive_power_configs(); if (_e) { _o->inactive_power_configs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inactive_power_configs[_i] = std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>(_e->Get(_i)->UnPack(_resolver)); } } }
2379 { auto _e = inference_priority(); _o->inference_priority = _e; }
2380 { auto _e = edgetpu_device_spec(); if (_e) _o->edgetpu_device_spec = std::unique_ptr<tflite::EdgeTpuDeviceSpecT>(_e->UnPack(_resolver)); }
2381 }
2382
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2383 inline flatbuffers::Offset<EdgeTpuSettings> EdgeTpuSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2384 return CreateEdgeTpuSettings(_fbb, _o, _rehasher);
2385 }
2386
CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2387 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2388 (void)_rehasher;
2389 (void)_o;
2390 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2391 auto _inference_power_state = _o->inference_power_state;
2392 auto _inactive_power_configs = _o->inactive_power_configs.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> (_o->inactive_power_configs.size(), [](size_t i, _VectorArgs *__va) { return CreateEdgeTpuInactivePowerConfig(*__va->__fbb, __va->__o->inactive_power_configs[i].get(), __va->__rehasher); }, &_va ) : 0;
2393 auto _inference_priority = _o->inference_priority;
2394 auto _edgetpu_device_spec = _o->edgetpu_device_spec ? CreateEdgeTpuDeviceSpec(_fbb, _o->edgetpu_device_spec.get(), _rehasher) : 0;
2395 return tflite::CreateEdgeTpuSettings(
2396 _fbb,
2397 _inference_power_state,
2398 _inactive_power_configs,
2399 _inference_priority,
2400 _edgetpu_device_spec);
2401 }
2402
UnPack(const flatbuffers::resolver_function_t * _resolver)2403 inline CoralSettingsT *CoralSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2404 auto _o = new CoralSettingsT();
2405 UnPackTo(_o, _resolver);
2406 return _o;
2407 }
2408
UnPackTo(CoralSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2409 inline void CoralSettings::UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2410 (void)_o;
2411 (void)_resolver;
2412 { auto _e = device(); if (_e) _o->device = _e->str(); }
2413 { auto _e = performance(); _o->performance = _e; }
2414 { auto _e = usb_always_dfu(); _o->usb_always_dfu = _e; }
2415 { auto _e = usb_max_bulk_in_queue_length(); _o->usb_max_bulk_in_queue_length = _e; }
2416 }
2417
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2418 inline flatbuffers::Offset<CoralSettings> CoralSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2419 return CreateCoralSettings(_fbb, _o, _rehasher);
2420 }
2421
CreateCoralSettings(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2422 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2423 (void)_rehasher;
2424 (void)_o;
2425 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CoralSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2426 auto _device = _o->device.empty() ? 0 : _fbb.CreateString(_o->device);
2427 auto _performance = _o->performance;
2428 auto _usb_always_dfu = _o->usb_always_dfu;
2429 auto _usb_max_bulk_in_queue_length = _o->usb_max_bulk_in_queue_length;
2430 return tflite::CreateCoralSettings(
2431 _fbb,
2432 _device,
2433 _performance,
2434 _usb_always_dfu,
2435 _usb_max_bulk_in_queue_length);
2436 }
2437
UnPack(const flatbuffers::resolver_function_t * _resolver)2438 inline CPUSettingsT *CPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2439 auto _o = new CPUSettingsT();
2440 UnPackTo(_o, _resolver);
2441 return _o;
2442 }
2443
UnPackTo(CPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2444 inline void CPUSettings::UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2445 (void)_o;
2446 (void)_resolver;
2447 { auto _e = num_threads(); _o->num_threads = _e; }
2448 }
2449
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2450 inline flatbuffers::Offset<CPUSettings> CPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2451 return CreateCPUSettings(_fbb, _o, _rehasher);
2452 }
2453
CreateCPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2454 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2455 (void)_rehasher;
2456 (void)_o;
2457 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2458 auto _num_threads = _o->num_threads;
2459 return tflite::CreateCPUSettings(
2460 _fbb,
2461 _num_threads);
2462 }
2463
UnPack(const flatbuffers::resolver_function_t * _resolver)2464 inline TFLiteSettingsT *TFLiteSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2465 auto _o = new TFLiteSettingsT();
2466 UnPackTo(_o, _resolver);
2467 return _o;
2468 }
2469
UnPackTo(TFLiteSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2470 inline void TFLiteSettings::UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2471 (void)_o;
2472 (void)_resolver;
2473 { auto _e = delegate(); _o->delegate = _e; }
2474 { auto _e = nnapi_settings(); if (_e) _o->nnapi_settings = std::unique_ptr<tflite::NNAPISettingsT>(_e->UnPack(_resolver)); }
2475 { auto _e = gpu_settings(); if (_e) _o->gpu_settings = std::unique_ptr<tflite::GPUSettingsT>(_e->UnPack(_resolver)); }
2476 { auto _e = hexagon_settings(); if (_e) _o->hexagon_settings = std::unique_ptr<tflite::HexagonSettingsT>(_e->UnPack(_resolver)); }
2477 { auto _e = xnnpack_settings(); if (_e) _o->xnnpack_settings = std::unique_ptr<tflite::XNNPackSettingsT>(_e->UnPack(_resolver)); }
2478 { auto _e = cpu_settings(); if (_e) _o->cpu_settings = std::unique_ptr<tflite::CPUSettingsT>(_e->UnPack(_resolver)); }
2479 { auto _e = max_delegated_partitions(); _o->max_delegated_partitions = _e; }
2480 { auto _e = edgetpu_settings(); if (_e) _o->edgetpu_settings = std::unique_ptr<tflite::EdgeTpuSettingsT>(_e->UnPack(_resolver)); }
2481 { auto _e = coral_settings(); if (_e) _o->coral_settings = std::unique_ptr<tflite::CoralSettingsT>(_e->UnPack(_resolver)); }
2482 { auto _e = fallback_settings(); if (_e) _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); }
2483 }
2484
Pack(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2485 inline flatbuffers::Offset<TFLiteSettings> TFLiteSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2486 return CreateTFLiteSettings(_fbb, _o, _rehasher);
2487 }
2488
CreateTFLiteSettings(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2489 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2490 (void)_rehasher;
2491 (void)_o;
2492 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TFLiteSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2493 auto _delegate = _o->delegate;
2494 auto _nnapi_settings = _o->nnapi_settings ? CreateNNAPISettings(_fbb, _o->nnapi_settings.get(), _rehasher) : 0;
2495 auto _gpu_settings = _o->gpu_settings ? CreateGPUSettings(_fbb, _o->gpu_settings.get(), _rehasher) : 0;
2496 auto _hexagon_settings = _o->hexagon_settings ? CreateHexagonSettings(_fbb, _o->hexagon_settings.get(), _rehasher) : 0;
2497 auto _xnnpack_settings = _o->xnnpack_settings ? CreateXNNPackSettings(_fbb, _o->xnnpack_settings.get(), _rehasher) : 0;
2498 auto _cpu_settings = _o->cpu_settings ? CreateCPUSettings(_fbb, _o->cpu_settings.get(), _rehasher) : 0;
2499 auto _max_delegated_partitions = _o->max_delegated_partitions;
2500 auto _edgetpu_settings = _o->edgetpu_settings ? CreateEdgeTpuSettings(_fbb, _o->edgetpu_settings.get(), _rehasher) : 0;
2501 auto _coral_settings = _o->coral_settings ? CreateCoralSettings(_fbb, _o->coral_settings.get(), _rehasher) : 0;
2502 auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
2503 return tflite::CreateTFLiteSettings(
2504 _fbb,
2505 _delegate,
2506 _nnapi_settings,
2507 _gpu_settings,
2508 _hexagon_settings,
2509 _xnnpack_settings,
2510 _cpu_settings,
2511 _max_delegated_partitions,
2512 _edgetpu_settings,
2513 _coral_settings,
2514 _fallback_settings);
2515 }
2516
UnPack(const flatbuffers::resolver_function_t * _resolver)2517 inline FallbackSettingsT *FallbackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2518 auto _o = new FallbackSettingsT();
2519 UnPackTo(_o, _resolver);
2520 return _o;
2521 }
2522
UnPackTo(FallbackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)2523 inline void FallbackSettings::UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2524 (void)_o;
2525 (void)_resolver;
2526 { auto _e = allow_automatic_fallback_on_compilation_error(); _o->allow_automatic_fallback_on_compilation_error = _e; }
2527 { auto _e = allow_automatic_fallback_on_execution_error(); _o->allow_automatic_fallback_on_execution_error = _e; }
2528 }
2529
Pack(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2530 inline flatbuffers::Offset<FallbackSettings> FallbackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2531 return CreateFallbackSettings(_fbb, _o, _rehasher);
2532 }
2533
CreateFallbackSettings(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)2534 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2535 (void)_rehasher;
2536 (void)_o;
2537 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FallbackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2538 auto _allow_automatic_fallback_on_compilation_error = _o->allow_automatic_fallback_on_compilation_error;
2539 auto _allow_automatic_fallback_on_execution_error = _o->allow_automatic_fallback_on_execution_error;
2540 return tflite::CreateFallbackSettings(
2541 _fbb,
2542 _allow_automatic_fallback_on_compilation_error,
2543 _allow_automatic_fallback_on_execution_error);
2544 }
2545
UnPack(const flatbuffers::resolver_function_t * _resolver)2546 inline BenchmarkMetricT *BenchmarkMetric::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2547 auto _o = new BenchmarkMetricT();
2548 UnPackTo(_o, _resolver);
2549 return _o;
2550 }
2551
UnPackTo(BenchmarkMetricT * _o,const flatbuffers::resolver_function_t * _resolver)2552 inline void BenchmarkMetric::UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2553 (void)_o;
2554 (void)_resolver;
2555 { auto _e = name(); if (_e) _o->name = _e->str(); }
2556 { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } }
2557 }
2558
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)2559 inline flatbuffers::Offset<BenchmarkMetric> BenchmarkMetric::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2560 return CreateBenchmarkMetric(_fbb, _o, _rehasher);
2561 }
2562
CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)2563 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2564 (void)_rehasher;
2565 (void)_o;
2566 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkMetricT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2567 auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name);
2568 auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0;
2569 return tflite::CreateBenchmarkMetric(
2570 _fbb,
2571 _name,
2572 _values);
2573 }
2574
UnPack(const flatbuffers::resolver_function_t * _resolver)2575 inline BenchmarkResultT *BenchmarkResult::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2576 auto _o = new BenchmarkResultT();
2577 UnPackTo(_o, _resolver);
2578 return _o;
2579 }
2580
UnPackTo(BenchmarkResultT * _o,const flatbuffers::resolver_function_t * _resolver)2581 inline void BenchmarkResult::UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2582 (void)_o;
2583 (void)_resolver;
2584 { auto _e = initialization_time_us(); if (_e) { _o->initialization_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->initialization_time_us[_i] = _e->Get(_i); } } }
2585 { auto _e = inference_time_us(); if (_e) { _o->inference_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inference_time_us[_i] = _e->Get(_i); } } }
2586 { auto _e = max_memory_kb(); _o->max_memory_kb = _e; }
2587 { auto _e = ok(); _o->ok = _e; }
2588 { auto _e = metrics(); if (_e) { _o->metrics.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->metrics[_i] = std::unique_ptr<tflite::BenchmarkMetricT>(_e->Get(_i)->UnPack(_resolver)); } } }
2589 }
2590
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)2591 inline flatbuffers::Offset<BenchmarkResult> BenchmarkResult::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2592 return CreateBenchmarkResult(_fbb, _o, _rehasher);
2593 }
2594
CreateBenchmarkResult(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)2595 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2596 (void)_rehasher;
2597 (void)_o;
2598 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkResultT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2599 auto _initialization_time_us = _o->initialization_time_us.size() ? _fbb.CreateVector(_o->initialization_time_us) : 0;
2600 auto _inference_time_us = _o->inference_time_us.size() ? _fbb.CreateVector(_o->inference_time_us) : 0;
2601 auto _max_memory_kb = _o->max_memory_kb;
2602 auto _ok = _o->ok;
2603 auto _metrics = _o->metrics.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>> (_o->metrics.size(), [](size_t i, _VectorArgs *__va) { return CreateBenchmarkMetric(*__va->__fbb, __va->__o->metrics[i].get(), __va->__rehasher); }, &_va ) : 0;
2604 return tflite::CreateBenchmarkResult(
2605 _fbb,
2606 _initialization_time_us,
2607 _inference_time_us,
2608 _max_memory_kb,
2609 _ok,
2610 _metrics);
2611 }
2612
UnPack(const flatbuffers::resolver_function_t * _resolver)2613 inline ErrorCodeT *ErrorCode::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2614 auto _o = new ErrorCodeT();
2615 UnPackTo(_o, _resolver);
2616 return _o;
2617 }
2618
UnPackTo(ErrorCodeT * _o,const flatbuffers::resolver_function_t * _resolver)2619 inline void ErrorCode::UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2620 (void)_o;
2621 (void)_resolver;
2622 { auto _e = source(); _o->source = _e; }
2623 { auto _e = tflite_error(); _o->tflite_error = _e; }
2624 { auto _e = underlying_api_error(); _o->underlying_api_error = _e; }
2625 }
2626
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)2627 inline flatbuffers::Offset<ErrorCode> ErrorCode::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2628 return CreateErrorCode(_fbb, _o, _rehasher);
2629 }
2630
CreateErrorCode(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)2631 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2632 (void)_rehasher;
2633 (void)_o;
2634 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ErrorCodeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2635 auto _source = _o->source;
2636 auto _tflite_error = _o->tflite_error;
2637 auto _underlying_api_error = _o->underlying_api_error;
2638 return tflite::CreateErrorCode(
2639 _fbb,
2640 _source,
2641 _tflite_error,
2642 _underlying_api_error);
2643 }
2644
UnPack(const flatbuffers::resolver_function_t * _resolver)2645 inline BenchmarkErrorT *BenchmarkError::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2646 auto _o = new BenchmarkErrorT();
2647 UnPackTo(_o, _resolver);
2648 return _o;
2649 }
2650
UnPackTo(BenchmarkErrorT * _o,const flatbuffers::resolver_function_t * _resolver)2651 inline void BenchmarkError::UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2652 (void)_o;
2653 (void)_resolver;
2654 { auto _e = stage(); _o->stage = _e; }
2655 { auto _e = exit_code(); _o->exit_code = _e; }
2656 { auto _e = signal(); _o->signal = _e; }
2657 { auto _e = error_code(); if (_e) { _o->error_code.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->error_code[_i] = std::unique_ptr<tflite::ErrorCodeT>(_e->Get(_i)->UnPack(_resolver)); } } }
2658 }
2659
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)2660 inline flatbuffers::Offset<BenchmarkError> BenchmarkError::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2661 return CreateBenchmarkError(_fbb, _o, _rehasher);
2662 }
2663
CreateBenchmarkError(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)2664 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2665 (void)_rehasher;
2666 (void)_o;
2667 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkErrorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2668 auto _stage = _o->stage;
2669 auto _exit_code = _o->exit_code;
2670 auto _signal = _o->signal;
2671 auto _error_code = _o->error_code.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>> (_o->error_code.size(), [](size_t i, _VectorArgs *__va) { return CreateErrorCode(*__va->__fbb, __va->__o->error_code[i].get(), __va->__rehasher); }, &_va ) : 0;
2672 return tflite::CreateBenchmarkError(
2673 _fbb,
2674 _stage,
2675 _exit_code,
2676 _signal,
2677 _error_code);
2678 }
2679
UnPack(const flatbuffers::resolver_function_t * _resolver)2680 inline BenchmarkEventT *BenchmarkEvent::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
2681 auto _o = new BenchmarkEventT();
2682 UnPackTo(_o, _resolver);
2683 return _o;
2684 }
2685
UnPackTo(BenchmarkEventT * _o,const flatbuffers::resolver_function_t * _resolver)2686 inline void BenchmarkEvent::UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver) const {
2687 (void)_o;
2688 (void)_resolver;
2689 { auto _e = tflite_settings(); if (_e) _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); }
2690 { auto _e = event_type(); _o->event_type = _e; }
2691 { auto _e = result(); if (_e) _o->result = std::unique_ptr<tflite::BenchmarkResultT>(_e->UnPack(_resolver)); }
2692 { auto _e = error(); if (_e) _o->error = std::unique_ptr<tflite::BenchmarkErrorT>(_e->UnPack(_resolver)); }
2693 }
2694
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)2695 inline flatbuffers::Offset<BenchmarkEvent> BenchmarkEvent::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
2696 return CreateBenchmarkEvent(_fbb, _o, _rehasher);
2697 }
2698
CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)2699 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
2700 (void)_rehasher;
2701 (void)_o;
2702 struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkEventT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
2703 auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
2704 auto _event_type = _o->event_type;
2705 auto _result = _o->result ? CreateBenchmarkResult(_fbb, _o->result.get(), _rehasher) : 0;
2706 auto _error = _o->error ? CreateBenchmarkError(_fbb, _o->error.get(), _rehasher) : 0;
2707 return tflite::CreateBenchmarkEvent(
2708 _fbb,
2709 _tflite_settings,
2710 _event_type,
2711 _result,
2712 _error);
2713 }
2714
2715 } // namespace tflite
2716
2717 #endif // FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
2718