• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 // automatically generated by the FlatBuffers compiler, do not modify
16 
17 
18 #ifndef FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
19 #define FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
20 
21 #include "flatbuffers/flatbuffers.h"
22 
23 namespace tflite {
24 
25 struct ComputeSettings;
26 struct ComputeSettingsBuilder;
27 struct ComputeSettingsT;
28 
29 struct NNAPISettings;
30 struct NNAPISettingsBuilder;
31 struct NNAPISettingsT;
32 
33 struct GPUSettings;
34 struct GPUSettingsBuilder;
35 struct GPUSettingsT;
36 
37 struct HexagonSettings;
38 struct HexagonSettingsBuilder;
39 struct HexagonSettingsT;
40 
41 struct XNNPackSettings;
42 struct XNNPackSettingsBuilder;
43 struct XNNPackSettingsT;
44 
45 struct CoreMLSettings;
46 struct CoreMLSettingsBuilder;
47 struct CoreMLSettingsT;
48 
49 struct EdgeTpuDeviceSpec;
50 struct EdgeTpuDeviceSpecBuilder;
51 struct EdgeTpuDeviceSpecT;
52 
53 struct EdgeTpuInactivePowerConfig;
54 struct EdgeTpuInactivePowerConfigBuilder;
55 struct EdgeTpuInactivePowerConfigT;
56 
57 struct EdgeTpuSettings;
58 struct EdgeTpuSettingsBuilder;
59 struct EdgeTpuSettingsT;
60 
61 struct CoralSettings;
62 struct CoralSettingsBuilder;
63 struct CoralSettingsT;
64 
65 struct CPUSettings;
66 struct CPUSettingsBuilder;
67 struct CPUSettingsT;
68 
69 struct TFLiteSettings;
70 struct TFLiteSettingsBuilder;
71 struct TFLiteSettingsT;
72 
73 struct FallbackSettings;
74 struct FallbackSettingsBuilder;
75 struct FallbackSettingsT;
76 
77 struct BenchmarkMetric;
78 struct BenchmarkMetricBuilder;
79 struct BenchmarkMetricT;
80 
81 struct BenchmarkResult;
82 struct BenchmarkResultBuilder;
83 struct BenchmarkResultT;
84 
85 struct ErrorCode;
86 struct ErrorCodeBuilder;
87 struct ErrorCodeT;
88 
89 struct BenchmarkError;
90 struct BenchmarkErrorBuilder;
91 struct BenchmarkErrorT;
92 
93 struct BenchmarkEvent;
94 struct BenchmarkEventBuilder;
95 struct BenchmarkEventT;
96 
97 struct BestAccelerationDecision;
98 struct BestAccelerationDecisionBuilder;
99 struct BestAccelerationDecisionT;
100 
101 struct BenchmarkInitializationFailure;
102 struct BenchmarkInitializationFailureBuilder;
103 struct BenchmarkInitializationFailureT;
104 
105 struct MiniBenchmarkEvent;
106 struct MiniBenchmarkEventBuilder;
107 struct MiniBenchmarkEventT;
108 
109 struct ModelFile;
110 struct ModelFileBuilder;
111 struct ModelFileT;
112 
113 struct BenchmarkStoragePaths;
114 struct BenchmarkStoragePathsBuilder;
115 struct BenchmarkStoragePathsT;
116 
117 struct MinibenchmarkSettings;
118 struct MinibenchmarkSettingsBuilder;
119 struct MinibenchmarkSettingsT;
120 
121 bool operator==(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs);
122 bool operator!=(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs);
123 bool operator==(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs);
124 bool operator!=(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs);
125 bool operator==(const GPUSettingsT &lhs, const GPUSettingsT &rhs);
126 bool operator!=(const GPUSettingsT &lhs, const GPUSettingsT &rhs);
127 bool operator==(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs);
128 bool operator!=(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs);
129 bool operator==(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs);
130 bool operator!=(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs);
131 bool operator==(const CoreMLSettingsT &lhs, const CoreMLSettingsT &rhs);
132 bool operator!=(const CoreMLSettingsT &lhs, const CoreMLSettingsT &rhs);
133 bool operator==(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs);
134 bool operator!=(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs);
135 bool operator==(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs);
136 bool operator!=(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs);
137 bool operator==(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs);
138 bool operator!=(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs);
139 bool operator==(const CoralSettingsT &lhs, const CoralSettingsT &rhs);
140 bool operator!=(const CoralSettingsT &lhs, const CoralSettingsT &rhs);
141 bool operator==(const CPUSettingsT &lhs, const CPUSettingsT &rhs);
142 bool operator!=(const CPUSettingsT &lhs, const CPUSettingsT &rhs);
143 bool operator==(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs);
144 bool operator!=(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs);
145 bool operator==(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs);
146 bool operator!=(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs);
147 bool operator==(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs);
148 bool operator!=(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs);
149 bool operator==(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs);
150 bool operator!=(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs);
151 bool operator==(const ErrorCodeT &lhs, const ErrorCodeT &rhs);
152 bool operator!=(const ErrorCodeT &lhs, const ErrorCodeT &rhs);
153 bool operator==(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs);
154 bool operator!=(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs);
155 bool operator==(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs);
156 bool operator!=(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs);
157 bool operator==(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs);
158 bool operator!=(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs);
159 bool operator==(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs);
160 bool operator!=(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs);
161 bool operator==(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs);
162 bool operator!=(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs);
163 bool operator==(const ModelFileT &lhs, const ModelFileT &rhs);
164 bool operator!=(const ModelFileT &lhs, const ModelFileT &rhs);
165 bool operator==(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs);
166 bool operator!=(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs);
167 bool operator==(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs);
168 bool operator!=(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs);
169 
170 enum ExecutionPreference : int32_t {
171   ExecutionPreference_ANY = 0,
172   ExecutionPreference_LOW_LATENCY = 1,
173   ExecutionPreference_LOW_POWER = 2,
174   ExecutionPreference_FORCE_CPU = 3,
175   ExecutionPreference_MIN = ExecutionPreference_ANY,
176   ExecutionPreference_MAX = ExecutionPreference_FORCE_CPU
177 };
178 
EnumValuesExecutionPreference()179 inline const ExecutionPreference (&EnumValuesExecutionPreference())[4] {
180   static const ExecutionPreference values[] = {
181     ExecutionPreference_ANY,
182     ExecutionPreference_LOW_LATENCY,
183     ExecutionPreference_LOW_POWER,
184     ExecutionPreference_FORCE_CPU
185   };
186   return values;
187 }
188 
EnumNamesExecutionPreference()189 inline const char * const *EnumNamesExecutionPreference() {
190   static const char * const names[5] = {
191     "ANY",
192     "LOW_LATENCY",
193     "LOW_POWER",
194     "FORCE_CPU",
195     nullptr
196   };
197   return names;
198 }
199 
EnumNameExecutionPreference(ExecutionPreference e)200 inline const char *EnumNameExecutionPreference(ExecutionPreference e) {
201   if (flatbuffers::IsOutRange(e, ExecutionPreference_ANY, ExecutionPreference_FORCE_CPU)) return "";
202   const size_t index = static_cast<size_t>(e);
203   return EnumNamesExecutionPreference()[index];
204 }
205 
206 enum Delegate : int32_t {
207   Delegate_NONE = 0,
208   Delegate_NNAPI = 1,
209   Delegate_GPU = 2,
210   Delegate_HEXAGON = 3,
211   Delegate_XNNPACK = 4,
212   Delegate_EDGETPU = 5,
213   Delegate_EDGETPU_CORAL = 6,
214   Delegate_CORE_ML = 7,
215   Delegate_MIN = Delegate_NONE,
216   Delegate_MAX = Delegate_CORE_ML
217 };
218 
EnumValuesDelegate()219 inline const Delegate (&EnumValuesDelegate())[8] {
220   static const Delegate values[] = {
221     Delegate_NONE,
222     Delegate_NNAPI,
223     Delegate_GPU,
224     Delegate_HEXAGON,
225     Delegate_XNNPACK,
226     Delegate_EDGETPU,
227     Delegate_EDGETPU_CORAL,
228     Delegate_CORE_ML
229   };
230   return values;
231 }
232 
EnumNamesDelegate()233 inline const char * const *EnumNamesDelegate() {
234   static const char * const names[9] = {
235     "NONE",
236     "NNAPI",
237     "GPU",
238     "HEXAGON",
239     "XNNPACK",
240     "EDGETPU",
241     "EDGETPU_CORAL",
242     "CORE_ML",
243     nullptr
244   };
245   return names;
246 }
247 
EnumNameDelegate(Delegate e)248 inline const char *EnumNameDelegate(Delegate e) {
249   if (flatbuffers::IsOutRange(e, Delegate_NONE, Delegate_CORE_ML)) return "";
250   const size_t index = static_cast<size_t>(e);
251   return EnumNamesDelegate()[index];
252 }
253 
254 enum NNAPIExecutionPreference : int32_t {
255   NNAPIExecutionPreference_UNDEFINED = 0,
256   NNAPIExecutionPreference_NNAPI_LOW_POWER = 1,
257   NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER = 2,
258   NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED = 3,
259   NNAPIExecutionPreference_MIN = NNAPIExecutionPreference_UNDEFINED,
260   NNAPIExecutionPreference_MAX = NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
261 };
262 
EnumValuesNNAPIExecutionPreference()263 inline const NNAPIExecutionPreference (&EnumValuesNNAPIExecutionPreference())[4] {
264   static const NNAPIExecutionPreference values[] = {
265     NNAPIExecutionPreference_UNDEFINED,
266     NNAPIExecutionPreference_NNAPI_LOW_POWER,
267     NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER,
268     NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
269   };
270   return values;
271 }
272 
EnumNamesNNAPIExecutionPreference()273 inline const char * const *EnumNamesNNAPIExecutionPreference() {
274   static const char * const names[5] = {
275     "UNDEFINED",
276     "NNAPI_LOW_POWER",
277     "NNAPI_FAST_SINGLE_ANSWER",
278     "NNAPI_SUSTAINED_SPEED",
279     nullptr
280   };
281   return names;
282 }
283 
EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e)284 inline const char *EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e) {
285   if (flatbuffers::IsOutRange(e, NNAPIExecutionPreference_UNDEFINED, NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED)) return "";
286   const size_t index = static_cast<size_t>(e);
287   return EnumNamesNNAPIExecutionPreference()[index];
288 }
289 
290 enum NNAPIExecutionPriority : int32_t {
291   NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED = 0,
292   NNAPIExecutionPriority_NNAPI_PRIORITY_LOW = 1,
293   NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM = 2,
294   NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH = 3,
295   NNAPIExecutionPriority_MIN = NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
296   NNAPIExecutionPriority_MAX = NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
297 };
298 
EnumValuesNNAPIExecutionPriority()299 inline const NNAPIExecutionPriority (&EnumValuesNNAPIExecutionPriority())[4] {
300   static const NNAPIExecutionPriority values[] = {
301     NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
302     NNAPIExecutionPriority_NNAPI_PRIORITY_LOW,
303     NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM,
304     NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
305   };
306   return values;
307 }
308 
EnumNamesNNAPIExecutionPriority()309 inline const char * const *EnumNamesNNAPIExecutionPriority() {
310   static const char * const names[5] = {
311     "NNAPI_PRIORITY_UNDEFINED",
312     "NNAPI_PRIORITY_LOW",
313     "NNAPI_PRIORITY_MEDIUM",
314     "NNAPI_PRIORITY_HIGH",
315     nullptr
316   };
317   return names;
318 }
319 
EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e)320 inline const char *EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e) {
321   if (flatbuffers::IsOutRange(e, NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED, NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH)) return "";
322   const size_t index = static_cast<size_t>(e);
323   return EnumNamesNNAPIExecutionPriority()[index];
324 }
325 
326 enum GPUBackend : int32_t {
327   GPUBackend_UNSET = 0,
328   GPUBackend_OPENCL = 1,
329   GPUBackend_OPENGL = 2,
330   GPUBackend_MIN = GPUBackend_UNSET,
331   GPUBackend_MAX = GPUBackend_OPENGL
332 };
333 
EnumValuesGPUBackend()334 inline const GPUBackend (&EnumValuesGPUBackend())[3] {
335   static const GPUBackend values[] = {
336     GPUBackend_UNSET,
337     GPUBackend_OPENCL,
338     GPUBackend_OPENGL
339   };
340   return values;
341 }
342 
EnumNamesGPUBackend()343 inline const char * const *EnumNamesGPUBackend() {
344   static const char * const names[4] = {
345     "UNSET",
346     "OPENCL",
347     "OPENGL",
348     nullptr
349   };
350   return names;
351 }
352 
EnumNameGPUBackend(GPUBackend e)353 inline const char *EnumNameGPUBackend(GPUBackend e) {
354   if (flatbuffers::IsOutRange(e, GPUBackend_UNSET, GPUBackend_OPENGL)) return "";
355   const size_t index = static_cast<size_t>(e);
356   return EnumNamesGPUBackend()[index];
357 }
358 
359 enum GPUInferencePriority : int32_t {
360   GPUInferencePriority_GPU_PRIORITY_AUTO = 0,
361   GPUInferencePriority_GPU_PRIORITY_MAX_PRECISION = 1,
362   GPUInferencePriority_GPU_PRIORITY_MIN_LATENCY = 2,
363   GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE = 3,
364   GPUInferencePriority_MIN = GPUInferencePriority_GPU_PRIORITY_AUTO,
365   GPUInferencePriority_MAX = GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE
366 };
367 
EnumValuesGPUInferencePriority()368 inline const GPUInferencePriority (&EnumValuesGPUInferencePriority())[4] {
369   static const GPUInferencePriority values[] = {
370     GPUInferencePriority_GPU_PRIORITY_AUTO,
371     GPUInferencePriority_GPU_PRIORITY_MAX_PRECISION,
372     GPUInferencePriority_GPU_PRIORITY_MIN_LATENCY,
373     GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE
374   };
375   return values;
376 }
377 
EnumNamesGPUInferencePriority()378 inline const char * const *EnumNamesGPUInferencePriority() {
379   static const char * const names[5] = {
380     "GPU_PRIORITY_AUTO",
381     "GPU_PRIORITY_MAX_PRECISION",
382     "GPU_PRIORITY_MIN_LATENCY",
383     "GPU_PRIORITY_MIN_MEMORY_USAGE",
384     nullptr
385   };
386   return names;
387 }
388 
EnumNameGPUInferencePriority(GPUInferencePriority e)389 inline const char *EnumNameGPUInferencePriority(GPUInferencePriority e) {
390   if (flatbuffers::IsOutRange(e, GPUInferencePriority_GPU_PRIORITY_AUTO, GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE)) return "";
391   const size_t index = static_cast<size_t>(e);
392   return EnumNamesGPUInferencePriority()[index];
393 }
394 
395 enum GPUInferenceUsage : int32_t {
396   GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER = 0,
397   GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED = 1,
398   GPUInferenceUsage_MIN = GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
399   GPUInferenceUsage_MAX = GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED
400 };
401 
EnumValuesGPUInferenceUsage()402 inline const GPUInferenceUsage (&EnumValuesGPUInferenceUsage())[2] {
403   static const GPUInferenceUsage values[] = {
404     GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
405     GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED
406   };
407   return values;
408 }
409 
EnumNamesGPUInferenceUsage()410 inline const char * const *EnumNamesGPUInferenceUsage() {
411   static const char * const names[3] = {
412     "GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER",
413     "GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED",
414     nullptr
415   };
416   return names;
417 }
418 
EnumNameGPUInferenceUsage(GPUInferenceUsage e)419 inline const char *EnumNameGPUInferenceUsage(GPUInferenceUsage e) {
420   if (flatbuffers::IsOutRange(e, GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER, GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED)) return "";
421   const size_t index = static_cast<size_t>(e);
422   return EnumNamesGPUInferenceUsage()[index];
423 }
424 
425 enum XNNPackFlags : int32_t {
426   XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS = 0,
427   XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QS8 = 1,
428   XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QU8 = 2,
429   XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QS8_QU8 = 3,
430   XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_FORCE_FP16 = 4,
431   XNNPackFlags_MIN = XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS,
432   XNNPackFlags_MAX = XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_FORCE_FP16
433 };
434 
EnumValuesXNNPackFlags()435 inline const XNNPackFlags (&EnumValuesXNNPackFlags())[5] {
436   static const XNNPackFlags values[] = {
437     XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS,
438     XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QS8,
439     XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QU8,
440     XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_QS8_QU8,
441     XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_FORCE_FP16
442   };
443   return values;
444 }
445 
EnumNamesXNNPackFlags()446 inline const char * const *EnumNamesXNNPackFlags() {
447   static const char * const names[6] = {
448     "TFLITE_XNNPACK_DELEGATE_NO_FLAGS",
449     "TFLITE_XNNPACK_DELEGATE_FLAG_QS8",
450     "TFLITE_XNNPACK_DELEGATE_FLAG_QU8",
451     "TFLITE_XNNPACK_DELEGATE_FLAG_QS8_QU8",
452     "TFLITE_XNNPACK_DELEGATE_FLAG_FORCE_FP16",
453     nullptr
454   };
455   return names;
456 }
457 
EnumNameXNNPackFlags(XNNPackFlags e)458 inline const char *EnumNameXNNPackFlags(XNNPackFlags e) {
459   if (flatbuffers::IsOutRange(e, XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS, XNNPackFlags_TFLITE_XNNPACK_DELEGATE_FLAG_FORCE_FP16)) return "";
460   const size_t index = static_cast<size_t>(e);
461   return EnumNamesXNNPackFlags()[index];
462 }
463 
464 namespace CoreMLSettings_ {
465 
466 enum EnabledDevices : int32_t {
467   EnabledDevices_DEVICES_ALL = 0,
468   EnabledDevices_DEVICES_WITH_NEURAL_ENGINE = 1,
469   EnabledDevices_MIN = EnabledDevices_DEVICES_ALL,
470   EnabledDevices_MAX = EnabledDevices_DEVICES_WITH_NEURAL_ENGINE
471 };
472 
EnumValuesEnabledDevices()473 inline const EnabledDevices (&EnumValuesEnabledDevices())[2] {
474   static const EnabledDevices values[] = {
475     EnabledDevices_DEVICES_ALL,
476     EnabledDevices_DEVICES_WITH_NEURAL_ENGINE
477   };
478   return values;
479 }
480 
EnumNamesEnabledDevices()481 inline const char * const *EnumNamesEnabledDevices() {
482   static const char * const names[3] = {
483     "DEVICES_ALL",
484     "DEVICES_WITH_NEURAL_ENGINE",
485     nullptr
486   };
487   return names;
488 }
489 
EnumNameEnabledDevices(EnabledDevices e)490 inline const char *EnumNameEnabledDevices(EnabledDevices e) {
491   if (flatbuffers::IsOutRange(e, EnabledDevices_DEVICES_ALL, EnabledDevices_DEVICES_WITH_NEURAL_ENGINE)) return "";
492   const size_t index = static_cast<size_t>(e);
493   return EnumNamesEnabledDevices()[index];
494 }
495 
496 }  // namespace CoreMLSettings_
497 
498 namespace EdgeTpuDeviceSpec_ {
499 
500 enum PlatformType : int32_t {
501   PlatformType_MMIO = 0,
502   PlatformType_REFERENCE = 1,
503   PlatformType_SIMULATOR = 2,
504   PlatformType_REMOTE_SIMULATOR = 3,
505   PlatformType_MIN = PlatformType_MMIO,
506   PlatformType_MAX = PlatformType_REMOTE_SIMULATOR
507 };
508 
EnumValuesPlatformType()509 inline const PlatformType (&EnumValuesPlatformType())[4] {
510   static const PlatformType values[] = {
511     PlatformType_MMIO,
512     PlatformType_REFERENCE,
513     PlatformType_SIMULATOR,
514     PlatformType_REMOTE_SIMULATOR
515   };
516   return values;
517 }
518 
EnumNamesPlatformType()519 inline const char * const *EnumNamesPlatformType() {
520   static const char * const names[5] = {
521     "MMIO",
522     "REFERENCE",
523     "SIMULATOR",
524     "REMOTE_SIMULATOR",
525     nullptr
526   };
527   return names;
528 }
529 
EnumNamePlatformType(PlatformType e)530 inline const char *EnumNamePlatformType(PlatformType e) {
531   if (flatbuffers::IsOutRange(e, PlatformType_MMIO, PlatformType_REMOTE_SIMULATOR)) return "";
532   const size_t index = static_cast<size_t>(e);
533   return EnumNamesPlatformType()[index];
534 }
535 
536 }  // namespace EdgeTpuDeviceSpec_
537 
538 enum EdgeTpuPowerState : int32_t {
539   EdgeTpuPowerState_UNDEFINED_POWERSTATE = 0,
540   EdgeTpuPowerState_TPU_CORE_OFF = 1,
541   EdgeTpuPowerState_READY = 2,
542   EdgeTpuPowerState_ACTIVE_MIN_POWER = 3,
543   EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER = 4,
544   EdgeTpuPowerState_ACTIVE_LOW_POWER = 5,
545   EdgeTpuPowerState_ACTIVE = 6,
546   EdgeTpuPowerState_OVER_DRIVE = 7,
547   EdgeTpuPowerState_MIN = EdgeTpuPowerState_UNDEFINED_POWERSTATE,
548   EdgeTpuPowerState_MAX = EdgeTpuPowerState_OVER_DRIVE
549 };
550 
EnumValuesEdgeTpuPowerState()551 inline const EdgeTpuPowerState (&EnumValuesEdgeTpuPowerState())[8] {
552   static const EdgeTpuPowerState values[] = {
553     EdgeTpuPowerState_UNDEFINED_POWERSTATE,
554     EdgeTpuPowerState_TPU_CORE_OFF,
555     EdgeTpuPowerState_READY,
556     EdgeTpuPowerState_ACTIVE_MIN_POWER,
557     EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER,
558     EdgeTpuPowerState_ACTIVE_LOW_POWER,
559     EdgeTpuPowerState_ACTIVE,
560     EdgeTpuPowerState_OVER_DRIVE
561   };
562   return values;
563 }
564 
EnumNamesEdgeTpuPowerState()565 inline const char * const *EnumNamesEdgeTpuPowerState() {
566   static const char * const names[9] = {
567     "UNDEFINED_POWERSTATE",
568     "TPU_CORE_OFF",
569     "READY",
570     "ACTIVE_MIN_POWER",
571     "ACTIVE_VERY_LOW_POWER",
572     "ACTIVE_LOW_POWER",
573     "ACTIVE",
574     "OVER_DRIVE",
575     nullptr
576   };
577   return names;
578 }
579 
EnumNameEdgeTpuPowerState(EdgeTpuPowerState e)580 inline const char *EnumNameEdgeTpuPowerState(EdgeTpuPowerState e) {
581   if (flatbuffers::IsOutRange(e, EdgeTpuPowerState_UNDEFINED_POWERSTATE, EdgeTpuPowerState_OVER_DRIVE)) return "";
582   const size_t index = static_cast<size_t>(e);
583   return EnumNamesEdgeTpuPowerState()[index];
584 }
585 
586 namespace EdgeTpuSettings_ {
587 
588 enum FloatTruncationType : int32_t {
589   FloatTruncationType_UNSPECIFIED = 0,
590   FloatTruncationType_NO_TRUNCATION = 1,
591   FloatTruncationType_BFLOAT16 = 2,
592   FloatTruncationType_HALF = 3,
593   FloatTruncationType_MIN = FloatTruncationType_UNSPECIFIED,
594   FloatTruncationType_MAX = FloatTruncationType_HALF
595 };
596 
EnumValuesFloatTruncationType()597 inline const FloatTruncationType (&EnumValuesFloatTruncationType())[4] {
598   static const FloatTruncationType values[] = {
599     FloatTruncationType_UNSPECIFIED,
600     FloatTruncationType_NO_TRUNCATION,
601     FloatTruncationType_BFLOAT16,
602     FloatTruncationType_HALF
603   };
604   return values;
605 }
606 
EnumNamesFloatTruncationType()607 inline const char * const *EnumNamesFloatTruncationType() {
608   static const char * const names[5] = {
609     "UNSPECIFIED",
610     "NO_TRUNCATION",
611     "BFLOAT16",
612     "HALF",
613     nullptr
614   };
615   return names;
616 }
617 
EnumNameFloatTruncationType(FloatTruncationType e)618 inline const char *EnumNameFloatTruncationType(FloatTruncationType e) {
619   if (flatbuffers::IsOutRange(e, FloatTruncationType_UNSPECIFIED, FloatTruncationType_HALF)) return "";
620   const size_t index = static_cast<size_t>(e);
621   return EnumNamesFloatTruncationType()[index];
622 }
623 
624 enum QosClass : int32_t {
625   QosClass_QOS_UNDEFINED = 0,
626   QosClass_BEST_EFFORT = 1,
627   QosClass_REALTIME = 2,
628   QosClass_MIN = QosClass_QOS_UNDEFINED,
629   QosClass_MAX = QosClass_REALTIME
630 };
631 
EnumValuesQosClass()632 inline const QosClass (&EnumValuesQosClass())[3] {
633   static const QosClass values[] = {
634     QosClass_QOS_UNDEFINED,
635     QosClass_BEST_EFFORT,
636     QosClass_REALTIME
637   };
638   return values;
639 }
640 
EnumNamesQosClass()641 inline const char * const *EnumNamesQosClass() {
642   static const char * const names[4] = {
643     "QOS_UNDEFINED",
644     "BEST_EFFORT",
645     "REALTIME",
646     nullptr
647   };
648   return names;
649 }
650 
EnumNameQosClass(QosClass e)651 inline const char *EnumNameQosClass(QosClass e) {
652   if (flatbuffers::IsOutRange(e, QosClass_QOS_UNDEFINED, QosClass_REALTIME)) return "";
653   const size_t index = static_cast<size_t>(e);
654   return EnumNamesQosClass()[index];
655 }
656 
657 }  // namespace EdgeTpuSettings_
658 
659 namespace CoralSettings_ {
660 
661 enum Performance : int32_t {
662   Performance_UNDEFINED = 0,
663   Performance_MAXIMUM = 1,
664   Performance_HIGH = 2,
665   Performance_MEDIUM = 3,
666   Performance_LOW = 4,
667   Performance_MIN = Performance_UNDEFINED,
668   Performance_MAX = Performance_LOW
669 };
670 
EnumValuesPerformance()671 inline const Performance (&EnumValuesPerformance())[5] {
672   static const Performance values[] = {
673     Performance_UNDEFINED,
674     Performance_MAXIMUM,
675     Performance_HIGH,
676     Performance_MEDIUM,
677     Performance_LOW
678   };
679   return values;
680 }
681 
EnumNamesPerformance()682 inline const char * const *EnumNamesPerformance() {
683   static const char * const names[6] = {
684     "UNDEFINED",
685     "MAXIMUM",
686     "HIGH",
687     "MEDIUM",
688     "LOW",
689     nullptr
690   };
691   return names;
692 }
693 
EnumNamePerformance(Performance e)694 inline const char *EnumNamePerformance(Performance e) {
695   if (flatbuffers::IsOutRange(e, Performance_UNDEFINED, Performance_LOW)) return "";
696   const size_t index = static_cast<size_t>(e);
697   return EnumNamesPerformance()[index];
698 }
699 
700 }  // namespace CoralSettings_
701 
702 enum BenchmarkEventType : int32_t {
703   BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE = 0,
704   BenchmarkEventType_START = 1,
705   BenchmarkEventType_END = 2,
706   BenchmarkEventType_ERROR = 3,
707   BenchmarkEventType_LOGGED = 4,
708   BenchmarkEventType_RECOVERED_ERROR = 5,
709   BenchmarkEventType_MIN = BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
710   BenchmarkEventType_MAX = BenchmarkEventType_RECOVERED_ERROR
711 };
712 
EnumValuesBenchmarkEventType()713 inline const BenchmarkEventType (&EnumValuesBenchmarkEventType())[6] {
714   static const BenchmarkEventType values[] = {
715     BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
716     BenchmarkEventType_START,
717     BenchmarkEventType_END,
718     BenchmarkEventType_ERROR,
719     BenchmarkEventType_LOGGED,
720     BenchmarkEventType_RECOVERED_ERROR
721   };
722   return values;
723 }
724 
EnumNamesBenchmarkEventType()725 inline const char * const *EnumNamesBenchmarkEventType() {
726   static const char * const names[7] = {
727     "UNDEFINED_BENCHMARK_EVENT_TYPE",
728     "START",
729     "END",
730     "ERROR",
731     "LOGGED",
732     "RECOVERED_ERROR",
733     nullptr
734   };
735   return names;
736 }
737 
EnumNameBenchmarkEventType(BenchmarkEventType e)738 inline const char *EnumNameBenchmarkEventType(BenchmarkEventType e) {
739   if (flatbuffers::IsOutRange(e, BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE, BenchmarkEventType_RECOVERED_ERROR)) return "";
740   const size_t index = static_cast<size_t>(e);
741   return EnumNamesBenchmarkEventType()[index];
742 }
743 
744 enum BenchmarkStage : int32_t {
745   BenchmarkStage_UNKNOWN = 0,
746   BenchmarkStage_INITIALIZATION = 1,
747   BenchmarkStage_INFERENCE = 2,
748   BenchmarkStage_MIN = BenchmarkStage_UNKNOWN,
749   BenchmarkStage_MAX = BenchmarkStage_INFERENCE
750 };
751 
EnumValuesBenchmarkStage()752 inline const BenchmarkStage (&EnumValuesBenchmarkStage())[3] {
753   static const BenchmarkStage values[] = {
754     BenchmarkStage_UNKNOWN,
755     BenchmarkStage_INITIALIZATION,
756     BenchmarkStage_INFERENCE
757   };
758   return values;
759 }
760 
EnumNamesBenchmarkStage()761 inline const char * const *EnumNamesBenchmarkStage() {
762   static const char * const names[4] = {
763     "UNKNOWN",
764     "INITIALIZATION",
765     "INFERENCE",
766     nullptr
767   };
768   return names;
769 }
770 
EnumNameBenchmarkStage(BenchmarkStage e)771 inline const char *EnumNameBenchmarkStage(BenchmarkStage e) {
772   if (flatbuffers::IsOutRange(e, BenchmarkStage_UNKNOWN, BenchmarkStage_INFERENCE)) return "";
773   const size_t index = static_cast<size_t>(e);
774   return EnumNamesBenchmarkStage()[index];
775 }
776 
777 struct ComputeSettingsT : public flatbuffers::NativeTable {
778   typedef ComputeSettings TableType;
779   tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY;
780   std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings{};
781   std::string model_namespace_for_statistics{};
782   std::string model_identifier_for_statistics{};
783   std::unique_ptr<tflite::MinibenchmarkSettingsT> settings_to_test_locally{};
784   ComputeSettingsT() = default;
785   ComputeSettingsT(const ComputeSettingsT &o);
786   ComputeSettingsT(ComputeSettingsT&&) FLATBUFFERS_NOEXCEPT = default;
787   ComputeSettingsT &operator=(ComputeSettingsT o) FLATBUFFERS_NOEXCEPT;
788 };
789 
790 struct ComputeSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
791   typedef ComputeSettingsT NativeTableType;
792   typedef ComputeSettingsBuilder Builder;
793   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
794     VT_PREFERENCE = 4,
795     VT_TFLITE_SETTINGS = 6,
796     VT_MODEL_NAMESPACE_FOR_STATISTICS = 8,
797     VT_MODEL_IDENTIFIER_FOR_STATISTICS = 10,
798     VT_SETTINGS_TO_TEST_LOCALLY = 12
799   };
preferenceFLATBUFFERS_FINAL_CLASS800   tflite::ExecutionPreference preference() const {
801     return static_cast<tflite::ExecutionPreference>(GetField<int32_t>(VT_PREFERENCE, 0));
802   }
tflite_settingsFLATBUFFERS_FINAL_CLASS803   const tflite::TFLiteSettings *tflite_settings() const {
804     return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
805   }
model_namespace_for_statisticsFLATBUFFERS_FINAL_CLASS806   const flatbuffers::String *model_namespace_for_statistics() const {
807     return GetPointer<const flatbuffers::String *>(VT_MODEL_NAMESPACE_FOR_STATISTICS);
808   }
model_identifier_for_statisticsFLATBUFFERS_FINAL_CLASS809   const flatbuffers::String *model_identifier_for_statistics() const {
810     return GetPointer<const flatbuffers::String *>(VT_MODEL_IDENTIFIER_FOR_STATISTICS);
811   }
settings_to_test_locallyFLATBUFFERS_FINAL_CLASS812   const tflite::MinibenchmarkSettings *settings_to_test_locally() const {
813     return GetPointer<const tflite::MinibenchmarkSettings *>(VT_SETTINGS_TO_TEST_LOCALLY);
814   }
VerifyFLATBUFFERS_FINAL_CLASS815   bool Verify(flatbuffers::Verifier &verifier) const {
816     return VerifyTableStart(verifier) &&
817            VerifyField<int32_t>(verifier, VT_PREFERENCE, 4) &&
818            VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
819            verifier.VerifyTable(tflite_settings()) &&
820            VerifyOffset(verifier, VT_MODEL_NAMESPACE_FOR_STATISTICS) &&
821            verifier.VerifyString(model_namespace_for_statistics()) &&
822            VerifyOffset(verifier, VT_MODEL_IDENTIFIER_FOR_STATISTICS) &&
823            verifier.VerifyString(model_identifier_for_statistics()) &&
824            VerifyOffset(verifier, VT_SETTINGS_TO_TEST_LOCALLY) &&
825            verifier.VerifyTable(settings_to_test_locally()) &&
826            verifier.EndTable();
827   }
828   ComputeSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
829   void UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
830   static flatbuffers::Offset<ComputeSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
831 };
832 
833 struct ComputeSettingsBuilder {
834   typedef ComputeSettings Table;
835   flatbuffers::FlatBufferBuilder &fbb_;
836   flatbuffers::uoffset_t start_;
add_preferenceComputeSettingsBuilder837   void add_preference(tflite::ExecutionPreference preference) {
838     fbb_.AddElement<int32_t>(ComputeSettings::VT_PREFERENCE, static_cast<int32_t>(preference), 0);
839   }
add_tflite_settingsComputeSettingsBuilder840   void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
841     fbb_.AddOffset(ComputeSettings::VT_TFLITE_SETTINGS, tflite_settings);
842   }
add_model_namespace_for_statisticsComputeSettingsBuilder843   void add_model_namespace_for_statistics(flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics) {
844     fbb_.AddOffset(ComputeSettings::VT_MODEL_NAMESPACE_FOR_STATISTICS, model_namespace_for_statistics);
845   }
add_model_identifier_for_statisticsComputeSettingsBuilder846   void add_model_identifier_for_statistics(flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics) {
847     fbb_.AddOffset(ComputeSettings::VT_MODEL_IDENTIFIER_FOR_STATISTICS, model_identifier_for_statistics);
848   }
add_settings_to_test_locallyComputeSettingsBuilder849   void add_settings_to_test_locally(flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally) {
850     fbb_.AddOffset(ComputeSettings::VT_SETTINGS_TO_TEST_LOCALLY, settings_to_test_locally);
851   }
ComputeSettingsBuilderComputeSettingsBuilder852   explicit ComputeSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
853         : fbb_(_fbb) {
854     start_ = fbb_.StartTable();
855   }
FinishComputeSettingsBuilder856   flatbuffers::Offset<ComputeSettings> Finish() {
857     const auto end = fbb_.EndTable(start_);
858     auto o = flatbuffers::Offset<ComputeSettings>(end);
859     return o;
860   }
861 };
862 
863 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(
864     flatbuffers::FlatBufferBuilder &_fbb,
865     tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
866     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
867     flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics = 0,
868     flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics = 0,
869     flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally = 0) {
870   ComputeSettingsBuilder builder_(_fbb);
871   builder_.add_settings_to_test_locally(settings_to_test_locally);
872   builder_.add_model_identifier_for_statistics(model_identifier_for_statistics);
873   builder_.add_model_namespace_for_statistics(model_namespace_for_statistics);
874   builder_.add_tflite_settings(tflite_settings);
875   builder_.add_preference(preference);
876   return builder_.Finish();
877 }
878 
879 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettingsDirect(
880     flatbuffers::FlatBufferBuilder &_fbb,
881     tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
882     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
883     const char *model_namespace_for_statistics = nullptr,
884     const char *model_identifier_for_statistics = nullptr,
885     flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally = 0) {
886   auto model_namespace_for_statistics__ = model_namespace_for_statistics ? _fbb.CreateString(model_namespace_for_statistics) : 0;
887   auto model_identifier_for_statistics__ = model_identifier_for_statistics ? _fbb.CreateString(model_identifier_for_statistics) : 0;
888   return tflite::CreateComputeSettings(
889       _fbb,
890       preference,
891       tflite_settings,
892       model_namespace_for_statistics__,
893       model_identifier_for_statistics__,
894       settings_to_test_locally);
895 }
896 
897 flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
898 
899 struct NNAPISettingsT : public flatbuffers::NativeTable {
900   typedef NNAPISettings TableType;
901   std::string accelerator_name{};
902   std::string cache_directory{};
903   std::string model_token{};
904   tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED;
905   int32_t no_of_nnapi_instances_to_cache = 0;
906   std::unique_ptr<tflite::FallbackSettingsT> fallback_settings{};
907   bool allow_nnapi_cpu_on_android_10_plus = false;
908   tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED;
909   bool allow_dynamic_dimensions = false;
910   bool allow_fp16_precision_for_fp32 = false;
911   bool use_burst_computation = false;
912   int64_t support_library_handle = 0;
913   NNAPISettingsT() = default;
914   NNAPISettingsT(const NNAPISettingsT &o);
915   NNAPISettingsT(NNAPISettingsT&&) FLATBUFFERS_NOEXCEPT = default;
916   NNAPISettingsT &operator=(NNAPISettingsT o) FLATBUFFERS_NOEXCEPT;
917 };
918 
919 struct NNAPISettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
920   typedef NNAPISettingsT NativeTableType;
921   typedef NNAPISettingsBuilder Builder;
922   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
923     VT_ACCELERATOR_NAME = 4,
924     VT_CACHE_DIRECTORY = 6,
925     VT_MODEL_TOKEN = 8,
926     VT_EXECUTION_PREFERENCE = 10,
927     VT_NO_OF_NNAPI_INSTANCES_TO_CACHE = 12,
928     VT_FALLBACK_SETTINGS = 14,
929     VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS = 16,
930     VT_EXECUTION_PRIORITY = 18,
931     VT_ALLOW_DYNAMIC_DIMENSIONS = 20,
932     VT_ALLOW_FP16_PRECISION_FOR_FP32 = 22,
933     VT_USE_BURST_COMPUTATION = 24,
934     VT_SUPPORT_LIBRARY_HANDLE = 26
935   };
accelerator_nameFLATBUFFERS_FINAL_CLASS936   const flatbuffers::String *accelerator_name() const {
937     return GetPointer<const flatbuffers::String *>(VT_ACCELERATOR_NAME);
938   }
cache_directoryFLATBUFFERS_FINAL_CLASS939   const flatbuffers::String *cache_directory() const {
940     return GetPointer<const flatbuffers::String *>(VT_CACHE_DIRECTORY);
941   }
model_tokenFLATBUFFERS_FINAL_CLASS942   const flatbuffers::String *model_token() const {
943     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
944   }
execution_preferenceFLATBUFFERS_FINAL_CLASS945   tflite::NNAPIExecutionPreference execution_preference() const {
946     return static_cast<tflite::NNAPIExecutionPreference>(GetField<int32_t>(VT_EXECUTION_PREFERENCE, 0));
947   }
no_of_nnapi_instances_to_cacheFLATBUFFERS_FINAL_CLASS948   int32_t no_of_nnapi_instances_to_cache() const {
949     return GetField<int32_t>(VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, 0);
950   }
fallback_settingsFLATBUFFERS_FINAL_CLASS951   const tflite::FallbackSettings *fallback_settings() const {
952     return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
953   }
allow_nnapi_cpu_on_android_10_plusFLATBUFFERS_FINAL_CLASS954   bool allow_nnapi_cpu_on_android_10_plus() const {
955     return GetField<uint8_t>(VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, 0) != 0;
956   }
execution_priorityFLATBUFFERS_FINAL_CLASS957   tflite::NNAPIExecutionPriority execution_priority() const {
958     return static_cast<tflite::NNAPIExecutionPriority>(GetField<int32_t>(VT_EXECUTION_PRIORITY, 0));
959   }
allow_dynamic_dimensionsFLATBUFFERS_FINAL_CLASS960   bool allow_dynamic_dimensions() const {
961     return GetField<uint8_t>(VT_ALLOW_DYNAMIC_DIMENSIONS, 0) != 0;
962   }
allow_fp16_precision_for_fp32FLATBUFFERS_FINAL_CLASS963   bool allow_fp16_precision_for_fp32() const {
964     return GetField<uint8_t>(VT_ALLOW_FP16_PRECISION_FOR_FP32, 0) != 0;
965   }
use_burst_computationFLATBUFFERS_FINAL_CLASS966   bool use_burst_computation() const {
967     return GetField<uint8_t>(VT_USE_BURST_COMPUTATION, 0) != 0;
968   }
support_library_handleFLATBUFFERS_FINAL_CLASS969   int64_t support_library_handle() const {
970     return GetField<int64_t>(VT_SUPPORT_LIBRARY_HANDLE, 0);
971   }
VerifyFLATBUFFERS_FINAL_CLASS972   bool Verify(flatbuffers::Verifier &verifier) const {
973     return VerifyTableStart(verifier) &&
974            VerifyOffset(verifier, VT_ACCELERATOR_NAME) &&
975            verifier.VerifyString(accelerator_name()) &&
976            VerifyOffset(verifier, VT_CACHE_DIRECTORY) &&
977            verifier.VerifyString(cache_directory()) &&
978            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
979            verifier.VerifyString(model_token()) &&
980            VerifyField<int32_t>(verifier, VT_EXECUTION_PREFERENCE, 4) &&
981            VerifyField<int32_t>(verifier, VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, 4) &&
982            VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
983            verifier.VerifyTable(fallback_settings()) &&
984            VerifyField<uint8_t>(verifier, VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, 1) &&
985            VerifyField<int32_t>(verifier, VT_EXECUTION_PRIORITY, 4) &&
986            VerifyField<uint8_t>(verifier, VT_ALLOW_DYNAMIC_DIMENSIONS, 1) &&
987            VerifyField<uint8_t>(verifier, VT_ALLOW_FP16_PRECISION_FOR_FP32, 1) &&
988            VerifyField<uint8_t>(verifier, VT_USE_BURST_COMPUTATION, 1) &&
989            VerifyField<int64_t>(verifier, VT_SUPPORT_LIBRARY_HANDLE, 8) &&
990            verifier.EndTable();
991   }
992   NNAPISettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
993   void UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
994   static flatbuffers::Offset<NNAPISettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
995 };
996 
997 struct NNAPISettingsBuilder {
998   typedef NNAPISettings Table;
999   flatbuffers::FlatBufferBuilder &fbb_;
1000   flatbuffers::uoffset_t start_;
add_accelerator_nameNNAPISettingsBuilder1001   void add_accelerator_name(flatbuffers::Offset<flatbuffers::String> accelerator_name) {
1002     fbb_.AddOffset(NNAPISettings::VT_ACCELERATOR_NAME, accelerator_name);
1003   }
add_cache_directoryNNAPISettingsBuilder1004   void add_cache_directory(flatbuffers::Offset<flatbuffers::String> cache_directory) {
1005     fbb_.AddOffset(NNAPISettings::VT_CACHE_DIRECTORY, cache_directory);
1006   }
add_model_tokenNNAPISettingsBuilder1007   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
1008     fbb_.AddOffset(NNAPISettings::VT_MODEL_TOKEN, model_token);
1009   }
add_execution_preferenceNNAPISettingsBuilder1010   void add_execution_preference(tflite::NNAPIExecutionPreference execution_preference) {
1011     fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PREFERENCE, static_cast<int32_t>(execution_preference), 0);
1012   }
add_no_of_nnapi_instances_to_cacheNNAPISettingsBuilder1013   void add_no_of_nnapi_instances_to_cache(int32_t no_of_nnapi_instances_to_cache) {
1014     fbb_.AddElement<int32_t>(NNAPISettings::VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, no_of_nnapi_instances_to_cache, 0);
1015   }
add_fallback_settingsNNAPISettingsBuilder1016   void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
1017     fbb_.AddOffset(NNAPISettings::VT_FALLBACK_SETTINGS, fallback_settings);
1018   }
add_allow_nnapi_cpu_on_android_10_plusNNAPISettingsBuilder1019   void add_allow_nnapi_cpu_on_android_10_plus(bool allow_nnapi_cpu_on_android_10_plus) {
1020     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, static_cast<uint8_t>(allow_nnapi_cpu_on_android_10_plus), 0);
1021   }
add_execution_priorityNNAPISettingsBuilder1022   void add_execution_priority(tflite::NNAPIExecutionPriority execution_priority) {
1023     fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PRIORITY, static_cast<int32_t>(execution_priority), 0);
1024   }
add_allow_dynamic_dimensionsNNAPISettingsBuilder1025   void add_allow_dynamic_dimensions(bool allow_dynamic_dimensions) {
1026     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_DYNAMIC_DIMENSIONS, static_cast<uint8_t>(allow_dynamic_dimensions), 0);
1027   }
add_allow_fp16_precision_for_fp32NNAPISettingsBuilder1028   void add_allow_fp16_precision_for_fp32(bool allow_fp16_precision_for_fp32) {
1029     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_FP16_PRECISION_FOR_FP32, static_cast<uint8_t>(allow_fp16_precision_for_fp32), 0);
1030   }
add_use_burst_computationNNAPISettingsBuilder1031   void add_use_burst_computation(bool use_burst_computation) {
1032     fbb_.AddElement<uint8_t>(NNAPISettings::VT_USE_BURST_COMPUTATION, static_cast<uint8_t>(use_burst_computation), 0);
1033   }
add_support_library_handleNNAPISettingsBuilder1034   void add_support_library_handle(int64_t support_library_handle) {
1035     fbb_.AddElement<int64_t>(NNAPISettings::VT_SUPPORT_LIBRARY_HANDLE, support_library_handle, 0);
1036   }
NNAPISettingsBuilderNNAPISettingsBuilder1037   explicit NNAPISettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1038         : fbb_(_fbb) {
1039     start_ = fbb_.StartTable();
1040   }
FinishNNAPISettingsBuilder1041   flatbuffers::Offset<NNAPISettings> Finish() {
1042     const auto end = fbb_.EndTable(start_);
1043     auto o = flatbuffers::Offset<NNAPISettings>(end);
1044     return o;
1045   }
1046 };
1047 
1048 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(
1049     flatbuffers::FlatBufferBuilder &_fbb,
1050     flatbuffers::Offset<flatbuffers::String> accelerator_name = 0,
1051     flatbuffers::Offset<flatbuffers::String> cache_directory = 0,
1052     flatbuffers::Offset<flatbuffers::String> model_token = 0,
1053     tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
1054     int32_t no_of_nnapi_instances_to_cache = 0,
1055     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
1056     bool allow_nnapi_cpu_on_android_10_plus = false,
1057     tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
1058     bool allow_dynamic_dimensions = false,
1059     bool allow_fp16_precision_for_fp32 = false,
1060     bool use_burst_computation = false,
1061     int64_t support_library_handle = 0) {
1062   NNAPISettingsBuilder builder_(_fbb);
1063   builder_.add_support_library_handle(support_library_handle);
1064   builder_.add_execution_priority(execution_priority);
1065   builder_.add_fallback_settings(fallback_settings);
1066   builder_.add_no_of_nnapi_instances_to_cache(no_of_nnapi_instances_to_cache);
1067   builder_.add_execution_preference(execution_preference);
1068   builder_.add_model_token(model_token);
1069   builder_.add_cache_directory(cache_directory);
1070   builder_.add_accelerator_name(accelerator_name);
1071   builder_.add_use_burst_computation(use_burst_computation);
1072   builder_.add_allow_fp16_precision_for_fp32(allow_fp16_precision_for_fp32);
1073   builder_.add_allow_dynamic_dimensions(allow_dynamic_dimensions);
1074   builder_.add_allow_nnapi_cpu_on_android_10_plus(allow_nnapi_cpu_on_android_10_plus);
1075   return builder_.Finish();
1076 }
1077 
1078 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettingsDirect(
1079     flatbuffers::FlatBufferBuilder &_fbb,
1080     const char *accelerator_name = nullptr,
1081     const char *cache_directory = nullptr,
1082     const char *model_token = nullptr,
1083     tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
1084     int32_t no_of_nnapi_instances_to_cache = 0,
1085     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
1086     bool allow_nnapi_cpu_on_android_10_plus = false,
1087     tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
1088     bool allow_dynamic_dimensions = false,
1089     bool allow_fp16_precision_for_fp32 = false,
1090     bool use_burst_computation = false,
1091     int64_t support_library_handle = 0) {
1092   auto accelerator_name__ = accelerator_name ? _fbb.CreateString(accelerator_name) : 0;
1093   auto cache_directory__ = cache_directory ? _fbb.CreateString(cache_directory) : 0;
1094   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
1095   return tflite::CreateNNAPISettings(
1096       _fbb,
1097       accelerator_name__,
1098       cache_directory__,
1099       model_token__,
1100       execution_preference,
1101       no_of_nnapi_instances_to_cache,
1102       fallback_settings,
1103       allow_nnapi_cpu_on_android_10_plus,
1104       execution_priority,
1105       allow_dynamic_dimensions,
1106       allow_fp16_precision_for_fp32,
1107       use_burst_computation,
1108       support_library_handle);
1109 }
1110 
1111 flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1112 
1113 struct GPUSettingsT : public flatbuffers::NativeTable {
1114   typedef GPUSettings TableType;
1115   bool is_precision_loss_allowed = false;
1116   bool enable_quantized_inference = true;
1117   tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET;
1118   tflite::GPUInferencePriority inference_priority1 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO;
1119   tflite::GPUInferencePriority inference_priority2 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO;
1120   tflite::GPUInferencePriority inference_priority3 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO;
1121   tflite::GPUInferenceUsage inference_preference = tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER;
1122   std::string cache_directory{};
1123   std::string model_token{};
1124 };
1125 
1126 struct GPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1127   typedef GPUSettingsT NativeTableType;
1128   typedef GPUSettingsBuilder Builder;
1129   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1130     VT_IS_PRECISION_LOSS_ALLOWED = 4,
1131     VT_ENABLE_QUANTIZED_INFERENCE = 6,
1132     VT_FORCE_BACKEND = 8,
1133     VT_INFERENCE_PRIORITY1 = 10,
1134     VT_INFERENCE_PRIORITY2 = 12,
1135     VT_INFERENCE_PRIORITY3 = 14,
1136     VT_INFERENCE_PREFERENCE = 16,
1137     VT_CACHE_DIRECTORY = 18,
1138     VT_MODEL_TOKEN = 20
1139   };
is_precision_loss_allowedFLATBUFFERS_FINAL_CLASS1140   bool is_precision_loss_allowed() const {
1141     return GetField<uint8_t>(VT_IS_PRECISION_LOSS_ALLOWED, 0) != 0;
1142   }
enable_quantized_inferenceFLATBUFFERS_FINAL_CLASS1143   bool enable_quantized_inference() const {
1144     return GetField<uint8_t>(VT_ENABLE_QUANTIZED_INFERENCE, 1) != 0;
1145   }
force_backendFLATBUFFERS_FINAL_CLASS1146   tflite::GPUBackend force_backend() const {
1147     return static_cast<tflite::GPUBackend>(GetField<int32_t>(VT_FORCE_BACKEND, 0));
1148   }
inference_priority1FLATBUFFERS_FINAL_CLASS1149   tflite::GPUInferencePriority inference_priority1() const {
1150     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY1, 0));
1151   }
inference_priority2FLATBUFFERS_FINAL_CLASS1152   tflite::GPUInferencePriority inference_priority2() const {
1153     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY2, 0));
1154   }
inference_priority3FLATBUFFERS_FINAL_CLASS1155   tflite::GPUInferencePriority inference_priority3() const {
1156     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY3, 0));
1157   }
inference_preferenceFLATBUFFERS_FINAL_CLASS1158   tflite::GPUInferenceUsage inference_preference() const {
1159     return static_cast<tflite::GPUInferenceUsage>(GetField<int32_t>(VT_INFERENCE_PREFERENCE, 0));
1160   }
cache_directoryFLATBUFFERS_FINAL_CLASS1161   const flatbuffers::String *cache_directory() const {
1162     return GetPointer<const flatbuffers::String *>(VT_CACHE_DIRECTORY);
1163   }
model_tokenFLATBUFFERS_FINAL_CLASS1164   const flatbuffers::String *model_token() const {
1165     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
1166   }
VerifyFLATBUFFERS_FINAL_CLASS1167   bool Verify(flatbuffers::Verifier &verifier) const {
1168     return VerifyTableStart(verifier) &&
1169            VerifyField<uint8_t>(verifier, VT_IS_PRECISION_LOSS_ALLOWED, 1) &&
1170            VerifyField<uint8_t>(verifier, VT_ENABLE_QUANTIZED_INFERENCE, 1) &&
1171            VerifyField<int32_t>(verifier, VT_FORCE_BACKEND, 4) &&
1172            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY1, 4) &&
1173            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY2, 4) &&
1174            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY3, 4) &&
1175            VerifyField<int32_t>(verifier, VT_INFERENCE_PREFERENCE, 4) &&
1176            VerifyOffset(verifier, VT_CACHE_DIRECTORY) &&
1177            verifier.VerifyString(cache_directory()) &&
1178            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
1179            verifier.VerifyString(model_token()) &&
1180            verifier.EndTable();
1181   }
1182   GPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1183   void UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1184   static flatbuffers::Offset<GPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1185 };
1186 
1187 struct GPUSettingsBuilder {
1188   typedef GPUSettings Table;
1189   flatbuffers::FlatBufferBuilder &fbb_;
1190   flatbuffers::uoffset_t start_;
add_is_precision_loss_allowedGPUSettingsBuilder1191   void add_is_precision_loss_allowed(bool is_precision_loss_allowed) {
1192     fbb_.AddElement<uint8_t>(GPUSettings::VT_IS_PRECISION_LOSS_ALLOWED, static_cast<uint8_t>(is_precision_loss_allowed), 0);
1193   }
add_enable_quantized_inferenceGPUSettingsBuilder1194   void add_enable_quantized_inference(bool enable_quantized_inference) {
1195     fbb_.AddElement<uint8_t>(GPUSettings::VT_ENABLE_QUANTIZED_INFERENCE, static_cast<uint8_t>(enable_quantized_inference), 1);
1196   }
add_force_backendGPUSettingsBuilder1197   void add_force_backend(tflite::GPUBackend force_backend) {
1198     fbb_.AddElement<int32_t>(GPUSettings::VT_FORCE_BACKEND, static_cast<int32_t>(force_backend), 0);
1199   }
add_inference_priority1GPUSettingsBuilder1200   void add_inference_priority1(tflite::GPUInferencePriority inference_priority1) {
1201     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY1, static_cast<int32_t>(inference_priority1), 0);
1202   }
add_inference_priority2GPUSettingsBuilder1203   void add_inference_priority2(tflite::GPUInferencePriority inference_priority2) {
1204     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY2, static_cast<int32_t>(inference_priority2), 0);
1205   }
add_inference_priority3GPUSettingsBuilder1206   void add_inference_priority3(tflite::GPUInferencePriority inference_priority3) {
1207     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY3, static_cast<int32_t>(inference_priority3), 0);
1208   }
add_inference_preferenceGPUSettingsBuilder1209   void add_inference_preference(tflite::GPUInferenceUsage inference_preference) {
1210     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PREFERENCE, static_cast<int32_t>(inference_preference), 0);
1211   }
add_cache_directoryGPUSettingsBuilder1212   void add_cache_directory(flatbuffers::Offset<flatbuffers::String> cache_directory) {
1213     fbb_.AddOffset(GPUSettings::VT_CACHE_DIRECTORY, cache_directory);
1214   }
add_model_tokenGPUSettingsBuilder1215   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
1216     fbb_.AddOffset(GPUSettings::VT_MODEL_TOKEN, model_token);
1217   }
GPUSettingsBuilderGPUSettingsBuilder1218   explicit GPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1219         : fbb_(_fbb) {
1220     start_ = fbb_.StartTable();
1221   }
FinishGPUSettingsBuilder1222   flatbuffers::Offset<GPUSettings> Finish() {
1223     const auto end = fbb_.EndTable(start_);
1224     auto o = flatbuffers::Offset<GPUSettings>(end);
1225     return o;
1226   }
1227 };
1228 
1229 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(
1230     flatbuffers::FlatBufferBuilder &_fbb,
1231     bool is_precision_loss_allowed = false,
1232     bool enable_quantized_inference = true,
1233     tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET,
1234     tflite::GPUInferencePriority inference_priority1 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1235     tflite::GPUInferencePriority inference_priority2 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1236     tflite::GPUInferencePriority inference_priority3 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1237     tflite::GPUInferenceUsage inference_preference = tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
1238     flatbuffers::Offset<flatbuffers::String> cache_directory = 0,
1239     flatbuffers::Offset<flatbuffers::String> model_token = 0) {
1240   GPUSettingsBuilder builder_(_fbb);
1241   builder_.add_model_token(model_token);
1242   builder_.add_cache_directory(cache_directory);
1243   builder_.add_inference_preference(inference_preference);
1244   builder_.add_inference_priority3(inference_priority3);
1245   builder_.add_inference_priority2(inference_priority2);
1246   builder_.add_inference_priority1(inference_priority1);
1247   builder_.add_force_backend(force_backend);
1248   builder_.add_enable_quantized_inference(enable_quantized_inference);
1249   builder_.add_is_precision_loss_allowed(is_precision_loss_allowed);
1250   return builder_.Finish();
1251 }
1252 
1253 inline flatbuffers::Offset<GPUSettings> CreateGPUSettingsDirect(
1254     flatbuffers::FlatBufferBuilder &_fbb,
1255     bool is_precision_loss_allowed = false,
1256     bool enable_quantized_inference = true,
1257     tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET,
1258     tflite::GPUInferencePriority inference_priority1 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1259     tflite::GPUInferencePriority inference_priority2 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1260     tflite::GPUInferencePriority inference_priority3 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1261     tflite::GPUInferenceUsage inference_preference = tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
1262     const char *cache_directory = nullptr,
1263     const char *model_token = nullptr) {
1264   auto cache_directory__ = cache_directory ? _fbb.CreateString(cache_directory) : 0;
1265   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
1266   return tflite::CreateGPUSettings(
1267       _fbb,
1268       is_precision_loss_allowed,
1269       enable_quantized_inference,
1270       force_backend,
1271       inference_priority1,
1272       inference_priority2,
1273       inference_priority3,
1274       inference_preference,
1275       cache_directory__,
1276       model_token__);
1277 }
1278 
1279 flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1280 
1281 struct HexagonSettingsT : public flatbuffers::NativeTable {
1282   typedef HexagonSettings TableType;
1283   int32_t debug_level = 0;
1284   int32_t powersave_level = 0;
1285   bool print_graph_profile = false;
1286   bool print_graph_debug = false;
1287 };
1288 
1289 struct HexagonSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1290   typedef HexagonSettingsT NativeTableType;
1291   typedef HexagonSettingsBuilder Builder;
1292   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1293     VT_DEBUG_LEVEL = 4,
1294     VT_POWERSAVE_LEVEL = 6,
1295     VT_PRINT_GRAPH_PROFILE = 8,
1296     VT_PRINT_GRAPH_DEBUG = 10
1297   };
debug_levelFLATBUFFERS_FINAL_CLASS1298   int32_t debug_level() const {
1299     return GetField<int32_t>(VT_DEBUG_LEVEL, 0);
1300   }
powersave_levelFLATBUFFERS_FINAL_CLASS1301   int32_t powersave_level() const {
1302     return GetField<int32_t>(VT_POWERSAVE_LEVEL, 0);
1303   }
print_graph_profileFLATBUFFERS_FINAL_CLASS1304   bool print_graph_profile() const {
1305     return GetField<uint8_t>(VT_PRINT_GRAPH_PROFILE, 0) != 0;
1306   }
print_graph_debugFLATBUFFERS_FINAL_CLASS1307   bool print_graph_debug() const {
1308     return GetField<uint8_t>(VT_PRINT_GRAPH_DEBUG, 0) != 0;
1309   }
VerifyFLATBUFFERS_FINAL_CLASS1310   bool Verify(flatbuffers::Verifier &verifier) const {
1311     return VerifyTableStart(verifier) &&
1312            VerifyField<int32_t>(verifier, VT_DEBUG_LEVEL, 4) &&
1313            VerifyField<int32_t>(verifier, VT_POWERSAVE_LEVEL, 4) &&
1314            VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_PROFILE, 1) &&
1315            VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_DEBUG, 1) &&
1316            verifier.EndTable();
1317   }
1318   HexagonSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1319   void UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1320   static flatbuffers::Offset<HexagonSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1321 };
1322 
1323 struct HexagonSettingsBuilder {
1324   typedef HexagonSettings Table;
1325   flatbuffers::FlatBufferBuilder &fbb_;
1326   flatbuffers::uoffset_t start_;
add_debug_levelHexagonSettingsBuilder1327   void add_debug_level(int32_t debug_level) {
1328     fbb_.AddElement<int32_t>(HexagonSettings::VT_DEBUG_LEVEL, debug_level, 0);
1329   }
add_powersave_levelHexagonSettingsBuilder1330   void add_powersave_level(int32_t powersave_level) {
1331     fbb_.AddElement<int32_t>(HexagonSettings::VT_POWERSAVE_LEVEL, powersave_level, 0);
1332   }
add_print_graph_profileHexagonSettingsBuilder1333   void add_print_graph_profile(bool print_graph_profile) {
1334     fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_PROFILE, static_cast<uint8_t>(print_graph_profile), 0);
1335   }
add_print_graph_debugHexagonSettingsBuilder1336   void add_print_graph_debug(bool print_graph_debug) {
1337     fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_DEBUG, static_cast<uint8_t>(print_graph_debug), 0);
1338   }
HexagonSettingsBuilderHexagonSettingsBuilder1339   explicit HexagonSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1340         : fbb_(_fbb) {
1341     start_ = fbb_.StartTable();
1342   }
FinishHexagonSettingsBuilder1343   flatbuffers::Offset<HexagonSettings> Finish() {
1344     const auto end = fbb_.EndTable(start_);
1345     auto o = flatbuffers::Offset<HexagonSettings>(end);
1346     return o;
1347   }
1348 };
1349 
1350 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(
1351     flatbuffers::FlatBufferBuilder &_fbb,
1352     int32_t debug_level = 0,
1353     int32_t powersave_level = 0,
1354     bool print_graph_profile = false,
1355     bool print_graph_debug = false) {
1356   HexagonSettingsBuilder builder_(_fbb);
1357   builder_.add_powersave_level(powersave_level);
1358   builder_.add_debug_level(debug_level);
1359   builder_.add_print_graph_debug(print_graph_debug);
1360   builder_.add_print_graph_profile(print_graph_profile);
1361   return builder_.Finish();
1362 }
1363 
1364 flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1365 
1366 struct XNNPackSettingsT : public flatbuffers::NativeTable {
1367   typedef XNNPackSettings TableType;
1368   int32_t num_threads = 0;
1369   tflite::XNNPackFlags flags = tflite::XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS;
1370 };
1371 
1372 struct XNNPackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1373   typedef XNNPackSettingsT NativeTableType;
1374   typedef XNNPackSettingsBuilder Builder;
1375   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1376     VT_NUM_THREADS = 4,
1377     VT_FLAGS = 6
1378   };
num_threadsFLATBUFFERS_FINAL_CLASS1379   int32_t num_threads() const {
1380     return GetField<int32_t>(VT_NUM_THREADS, 0);
1381   }
flagsFLATBUFFERS_FINAL_CLASS1382   tflite::XNNPackFlags flags() const {
1383     return static_cast<tflite::XNNPackFlags>(GetField<int32_t>(VT_FLAGS, 0));
1384   }
VerifyFLATBUFFERS_FINAL_CLASS1385   bool Verify(flatbuffers::Verifier &verifier) const {
1386     return VerifyTableStart(verifier) &&
1387            VerifyField<int32_t>(verifier, VT_NUM_THREADS, 4) &&
1388            VerifyField<int32_t>(verifier, VT_FLAGS, 4) &&
1389            verifier.EndTable();
1390   }
1391   XNNPackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1392   void UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1393   static flatbuffers::Offset<XNNPackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1394 };
1395 
1396 struct XNNPackSettingsBuilder {
1397   typedef XNNPackSettings Table;
1398   flatbuffers::FlatBufferBuilder &fbb_;
1399   flatbuffers::uoffset_t start_;
add_num_threadsXNNPackSettingsBuilder1400   void add_num_threads(int32_t num_threads) {
1401     fbb_.AddElement<int32_t>(XNNPackSettings::VT_NUM_THREADS, num_threads, 0);
1402   }
add_flagsXNNPackSettingsBuilder1403   void add_flags(tflite::XNNPackFlags flags) {
1404     fbb_.AddElement<int32_t>(XNNPackSettings::VT_FLAGS, static_cast<int32_t>(flags), 0);
1405   }
XNNPackSettingsBuilderXNNPackSettingsBuilder1406   explicit XNNPackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1407         : fbb_(_fbb) {
1408     start_ = fbb_.StartTable();
1409   }
FinishXNNPackSettingsBuilder1410   flatbuffers::Offset<XNNPackSettings> Finish() {
1411     const auto end = fbb_.EndTable(start_);
1412     auto o = flatbuffers::Offset<XNNPackSettings>(end);
1413     return o;
1414   }
1415 };
1416 
1417 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(
1418     flatbuffers::FlatBufferBuilder &_fbb,
1419     int32_t num_threads = 0,
1420     tflite::XNNPackFlags flags = tflite::XNNPackFlags_TFLITE_XNNPACK_DELEGATE_NO_FLAGS) {
1421   XNNPackSettingsBuilder builder_(_fbb);
1422   builder_.add_flags(flags);
1423   builder_.add_num_threads(num_threads);
1424   return builder_.Finish();
1425 }
1426 
1427 flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1428 
1429 struct CoreMLSettingsT : public flatbuffers::NativeTable {
1430   typedef CoreMLSettings TableType;
1431   tflite::CoreMLSettings_::EnabledDevices enabled_devices = tflite::CoreMLSettings_::EnabledDevices_DEVICES_ALL;
1432   int32_t coreml_version = 0;
1433   int32_t max_delegated_partitions = 0;
1434   int32_t min_nodes_per_partition = 2;
1435 };
1436 
1437 struct CoreMLSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1438   typedef CoreMLSettingsT NativeTableType;
1439   typedef CoreMLSettingsBuilder Builder;
1440   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1441     VT_ENABLED_DEVICES = 4,
1442     VT_COREML_VERSION = 6,
1443     VT_MAX_DELEGATED_PARTITIONS = 8,
1444     VT_MIN_NODES_PER_PARTITION = 10
1445   };
enabled_devicesFLATBUFFERS_FINAL_CLASS1446   tflite::CoreMLSettings_::EnabledDevices enabled_devices() const {
1447     return static_cast<tflite::CoreMLSettings_::EnabledDevices>(GetField<int32_t>(VT_ENABLED_DEVICES, 0));
1448   }
coreml_versionFLATBUFFERS_FINAL_CLASS1449   int32_t coreml_version() const {
1450     return GetField<int32_t>(VT_COREML_VERSION, 0);
1451   }
max_delegated_partitionsFLATBUFFERS_FINAL_CLASS1452   int32_t max_delegated_partitions() const {
1453     return GetField<int32_t>(VT_MAX_DELEGATED_PARTITIONS, 0);
1454   }
min_nodes_per_partitionFLATBUFFERS_FINAL_CLASS1455   int32_t min_nodes_per_partition() const {
1456     return GetField<int32_t>(VT_MIN_NODES_PER_PARTITION, 2);
1457   }
VerifyFLATBUFFERS_FINAL_CLASS1458   bool Verify(flatbuffers::Verifier &verifier) const {
1459     return VerifyTableStart(verifier) &&
1460            VerifyField<int32_t>(verifier, VT_ENABLED_DEVICES, 4) &&
1461            VerifyField<int32_t>(verifier, VT_COREML_VERSION, 4) &&
1462            VerifyField<int32_t>(verifier, VT_MAX_DELEGATED_PARTITIONS, 4) &&
1463            VerifyField<int32_t>(verifier, VT_MIN_NODES_PER_PARTITION, 4) &&
1464            verifier.EndTable();
1465   }
1466   CoreMLSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1467   void UnPackTo(CoreMLSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1468   static flatbuffers::Offset<CoreMLSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoreMLSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1469 };
1470 
1471 struct CoreMLSettingsBuilder {
1472   typedef CoreMLSettings Table;
1473   flatbuffers::FlatBufferBuilder &fbb_;
1474   flatbuffers::uoffset_t start_;
add_enabled_devicesCoreMLSettingsBuilder1475   void add_enabled_devices(tflite::CoreMLSettings_::EnabledDevices enabled_devices) {
1476     fbb_.AddElement<int32_t>(CoreMLSettings::VT_ENABLED_DEVICES, static_cast<int32_t>(enabled_devices), 0);
1477   }
add_coreml_versionCoreMLSettingsBuilder1478   void add_coreml_version(int32_t coreml_version) {
1479     fbb_.AddElement<int32_t>(CoreMLSettings::VT_COREML_VERSION, coreml_version, 0);
1480   }
add_max_delegated_partitionsCoreMLSettingsBuilder1481   void add_max_delegated_partitions(int32_t max_delegated_partitions) {
1482     fbb_.AddElement<int32_t>(CoreMLSettings::VT_MAX_DELEGATED_PARTITIONS, max_delegated_partitions, 0);
1483   }
add_min_nodes_per_partitionCoreMLSettingsBuilder1484   void add_min_nodes_per_partition(int32_t min_nodes_per_partition) {
1485     fbb_.AddElement<int32_t>(CoreMLSettings::VT_MIN_NODES_PER_PARTITION, min_nodes_per_partition, 2);
1486   }
CoreMLSettingsBuilderCoreMLSettingsBuilder1487   explicit CoreMLSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1488         : fbb_(_fbb) {
1489     start_ = fbb_.StartTable();
1490   }
FinishCoreMLSettingsBuilder1491   flatbuffers::Offset<CoreMLSettings> Finish() {
1492     const auto end = fbb_.EndTable(start_);
1493     auto o = flatbuffers::Offset<CoreMLSettings>(end);
1494     return o;
1495   }
1496 };
1497 
1498 inline flatbuffers::Offset<CoreMLSettings> CreateCoreMLSettings(
1499     flatbuffers::FlatBufferBuilder &_fbb,
1500     tflite::CoreMLSettings_::EnabledDevices enabled_devices = tflite::CoreMLSettings_::EnabledDevices_DEVICES_ALL,
1501     int32_t coreml_version = 0,
1502     int32_t max_delegated_partitions = 0,
1503     int32_t min_nodes_per_partition = 2) {
1504   CoreMLSettingsBuilder builder_(_fbb);
1505   builder_.add_min_nodes_per_partition(min_nodes_per_partition);
1506   builder_.add_max_delegated_partitions(max_delegated_partitions);
1507   builder_.add_coreml_version(coreml_version);
1508   builder_.add_enabled_devices(enabled_devices);
1509   return builder_.Finish();
1510 }
1511 
1512 flatbuffers::Offset<CoreMLSettings> CreateCoreMLSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoreMLSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1513 
1514 struct EdgeTpuDeviceSpecT : public flatbuffers::NativeTable {
1515   typedef EdgeTpuDeviceSpec TableType;
1516   tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO;
1517   int32_t num_chips = 0;
1518   std::vector<std::string> device_paths{};
1519   int32_t chip_family = 0;
1520 };
1521 
1522 struct EdgeTpuDeviceSpec FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1523   typedef EdgeTpuDeviceSpecT NativeTableType;
1524   typedef EdgeTpuDeviceSpecBuilder Builder;
1525   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1526     VT_PLATFORM_TYPE = 4,
1527     VT_NUM_CHIPS = 6,
1528     VT_DEVICE_PATHS = 8,
1529     VT_CHIP_FAMILY = 10
1530   };
platform_typeFLATBUFFERS_FINAL_CLASS1531   tflite::EdgeTpuDeviceSpec_::PlatformType platform_type() const {
1532     return static_cast<tflite::EdgeTpuDeviceSpec_::PlatformType>(GetField<int32_t>(VT_PLATFORM_TYPE, 0));
1533   }
num_chipsFLATBUFFERS_FINAL_CLASS1534   int32_t num_chips() const {
1535     return GetField<int32_t>(VT_NUM_CHIPS, 0);
1536   }
device_pathsFLATBUFFERS_FINAL_CLASS1537   const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *device_paths() const {
1538     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DEVICE_PATHS);
1539   }
chip_familyFLATBUFFERS_FINAL_CLASS1540   int32_t chip_family() const {
1541     return GetField<int32_t>(VT_CHIP_FAMILY, 0);
1542   }
VerifyFLATBUFFERS_FINAL_CLASS1543   bool Verify(flatbuffers::Verifier &verifier) const {
1544     return VerifyTableStart(verifier) &&
1545            VerifyField<int32_t>(verifier, VT_PLATFORM_TYPE, 4) &&
1546            VerifyField<int32_t>(verifier, VT_NUM_CHIPS, 4) &&
1547            VerifyOffset(verifier, VT_DEVICE_PATHS) &&
1548            verifier.VerifyVector(device_paths()) &&
1549            verifier.VerifyVectorOfStrings(device_paths()) &&
1550            VerifyField<int32_t>(verifier, VT_CHIP_FAMILY, 4) &&
1551            verifier.EndTable();
1552   }
1553   EdgeTpuDeviceSpecT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1554   void UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1555   static flatbuffers::Offset<EdgeTpuDeviceSpec> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1556 };
1557 
1558 struct EdgeTpuDeviceSpecBuilder {
1559   typedef EdgeTpuDeviceSpec Table;
1560   flatbuffers::FlatBufferBuilder &fbb_;
1561   flatbuffers::uoffset_t start_;
add_platform_typeEdgeTpuDeviceSpecBuilder1562   void add_platform_type(tflite::EdgeTpuDeviceSpec_::PlatformType platform_type) {
1563     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_PLATFORM_TYPE, static_cast<int32_t>(platform_type), 0);
1564   }
add_num_chipsEdgeTpuDeviceSpecBuilder1565   void add_num_chips(int32_t num_chips) {
1566     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_NUM_CHIPS, num_chips, 0);
1567   }
add_device_pathsEdgeTpuDeviceSpecBuilder1568   void add_device_paths(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths) {
1569     fbb_.AddOffset(EdgeTpuDeviceSpec::VT_DEVICE_PATHS, device_paths);
1570   }
add_chip_familyEdgeTpuDeviceSpecBuilder1571   void add_chip_family(int32_t chip_family) {
1572     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_CHIP_FAMILY, chip_family, 0);
1573   }
EdgeTpuDeviceSpecBuilderEdgeTpuDeviceSpecBuilder1574   explicit EdgeTpuDeviceSpecBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1575         : fbb_(_fbb) {
1576     start_ = fbb_.StartTable();
1577   }
FinishEdgeTpuDeviceSpecBuilder1578   flatbuffers::Offset<EdgeTpuDeviceSpec> Finish() {
1579     const auto end = fbb_.EndTable(start_);
1580     auto o = flatbuffers::Offset<EdgeTpuDeviceSpec>(end);
1581     return o;
1582   }
1583 };
1584 
1585 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(
1586     flatbuffers::FlatBufferBuilder &_fbb,
1587     tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1588     int32_t num_chips = 0,
1589     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths = 0,
1590     int32_t chip_family = 0) {
1591   EdgeTpuDeviceSpecBuilder builder_(_fbb);
1592   builder_.add_chip_family(chip_family);
1593   builder_.add_device_paths(device_paths);
1594   builder_.add_num_chips(num_chips);
1595   builder_.add_platform_type(platform_type);
1596   return builder_.Finish();
1597 }
1598 
1599 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpecDirect(
1600     flatbuffers::FlatBufferBuilder &_fbb,
1601     tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1602     int32_t num_chips = 0,
1603     const std::vector<flatbuffers::Offset<flatbuffers::String>> *device_paths = nullptr,
1604     int32_t chip_family = 0) {
1605   auto device_paths__ = device_paths ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*device_paths) : 0;
1606   return tflite::CreateEdgeTpuDeviceSpec(
1607       _fbb,
1608       platform_type,
1609       num_chips,
1610       device_paths__,
1611       chip_family);
1612 }
1613 
1614 flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1615 
1616 struct EdgeTpuInactivePowerConfigT : public flatbuffers::NativeTable {
1617   typedef EdgeTpuInactivePowerConfig TableType;
1618   tflite::EdgeTpuPowerState inactive_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE;
1619   int64_t inactive_timeout_us = 0;
1620 };
1621 
1622 struct EdgeTpuInactivePowerConfig FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1623   typedef EdgeTpuInactivePowerConfigT NativeTableType;
1624   typedef EdgeTpuInactivePowerConfigBuilder Builder;
1625   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1626     VT_INACTIVE_POWER_STATE = 4,
1627     VT_INACTIVE_TIMEOUT_US = 6
1628   };
inactive_power_stateFLATBUFFERS_FINAL_CLASS1629   tflite::EdgeTpuPowerState inactive_power_state() const {
1630     return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INACTIVE_POWER_STATE, 0));
1631   }
inactive_timeout_usFLATBUFFERS_FINAL_CLASS1632   int64_t inactive_timeout_us() const {
1633     return GetField<int64_t>(VT_INACTIVE_TIMEOUT_US, 0);
1634   }
VerifyFLATBUFFERS_FINAL_CLASS1635   bool Verify(flatbuffers::Verifier &verifier) const {
1636     return VerifyTableStart(verifier) &&
1637            VerifyField<int32_t>(verifier, VT_INACTIVE_POWER_STATE, 4) &&
1638            VerifyField<int64_t>(verifier, VT_INACTIVE_TIMEOUT_US, 8) &&
1639            verifier.EndTable();
1640   }
1641   EdgeTpuInactivePowerConfigT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1642   void UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1643   static flatbuffers::Offset<EdgeTpuInactivePowerConfig> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1644 };
1645 
1646 struct EdgeTpuInactivePowerConfigBuilder {
1647   typedef EdgeTpuInactivePowerConfig Table;
1648   flatbuffers::FlatBufferBuilder &fbb_;
1649   flatbuffers::uoffset_t start_;
add_inactive_power_stateEdgeTpuInactivePowerConfigBuilder1650   void add_inactive_power_state(tflite::EdgeTpuPowerState inactive_power_state) {
1651     fbb_.AddElement<int32_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_POWER_STATE, static_cast<int32_t>(inactive_power_state), 0);
1652   }
add_inactive_timeout_usEdgeTpuInactivePowerConfigBuilder1653   void add_inactive_timeout_us(int64_t inactive_timeout_us) {
1654     fbb_.AddElement<int64_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_TIMEOUT_US, inactive_timeout_us, 0);
1655   }
EdgeTpuInactivePowerConfigBuilderEdgeTpuInactivePowerConfigBuilder1656   explicit EdgeTpuInactivePowerConfigBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1657         : fbb_(_fbb) {
1658     start_ = fbb_.StartTable();
1659   }
FinishEdgeTpuInactivePowerConfigBuilder1660   flatbuffers::Offset<EdgeTpuInactivePowerConfig> Finish() {
1661     const auto end = fbb_.EndTable(start_);
1662     auto o = flatbuffers::Offset<EdgeTpuInactivePowerConfig>(end);
1663     return o;
1664   }
1665 };
1666 
1667 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(
1668     flatbuffers::FlatBufferBuilder &_fbb,
1669     tflite::EdgeTpuPowerState inactive_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1670     int64_t inactive_timeout_us = 0) {
1671   EdgeTpuInactivePowerConfigBuilder builder_(_fbb);
1672   builder_.add_inactive_timeout_us(inactive_timeout_us);
1673   builder_.add_inactive_power_state(inactive_power_state);
1674   return builder_.Finish();
1675 }
1676 
1677 flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1678 
1679 struct EdgeTpuSettingsT : public flatbuffers::NativeTable {
1680   typedef EdgeTpuSettings TableType;
1681   tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE;
1682   std::vector<std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>> inactive_power_configs{};
1683   int32_t inference_priority = -1;
1684   std::unique_ptr<tflite::EdgeTpuDeviceSpecT> edgetpu_device_spec{};
1685   std::string model_token{};
1686   tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type = tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED;
1687   tflite::EdgeTpuSettings_::QosClass qos_class = tflite::EdgeTpuSettings_::QosClass_QOS_UNDEFINED;
1688   EdgeTpuSettingsT() = default;
1689   EdgeTpuSettingsT(const EdgeTpuSettingsT &o);
1690   EdgeTpuSettingsT(EdgeTpuSettingsT&&) FLATBUFFERS_NOEXCEPT = default;
1691   EdgeTpuSettingsT &operator=(EdgeTpuSettingsT o) FLATBUFFERS_NOEXCEPT;
1692 };
1693 
1694 struct EdgeTpuSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1695   typedef EdgeTpuSettingsT NativeTableType;
1696   typedef EdgeTpuSettingsBuilder Builder;
1697   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1698     VT_INFERENCE_POWER_STATE = 4,
1699     VT_INACTIVE_POWER_CONFIGS = 6,
1700     VT_INFERENCE_PRIORITY = 8,
1701     VT_EDGETPU_DEVICE_SPEC = 10,
1702     VT_MODEL_TOKEN = 12,
1703     VT_FLOAT_TRUNCATION_TYPE = 14,
1704     VT_QOS_CLASS = 16
1705   };
inference_power_stateFLATBUFFERS_FINAL_CLASS1706   tflite::EdgeTpuPowerState inference_power_state() const {
1707     return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INFERENCE_POWER_STATE, 0));
1708   }
inactive_power_configsFLATBUFFERS_FINAL_CLASS1709   const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs() const {
1710     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *>(VT_INACTIVE_POWER_CONFIGS);
1711   }
inference_priorityFLATBUFFERS_FINAL_CLASS1712   int32_t inference_priority() const {
1713     return GetField<int32_t>(VT_INFERENCE_PRIORITY, -1);
1714   }
edgetpu_device_specFLATBUFFERS_FINAL_CLASS1715   const tflite::EdgeTpuDeviceSpec *edgetpu_device_spec() const {
1716     return GetPointer<const tflite::EdgeTpuDeviceSpec *>(VT_EDGETPU_DEVICE_SPEC);
1717   }
model_tokenFLATBUFFERS_FINAL_CLASS1718   const flatbuffers::String *model_token() const {
1719     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
1720   }
float_truncation_typeFLATBUFFERS_FINAL_CLASS1721   tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type() const {
1722     return static_cast<tflite::EdgeTpuSettings_::FloatTruncationType>(GetField<int32_t>(VT_FLOAT_TRUNCATION_TYPE, 0));
1723   }
qos_classFLATBUFFERS_FINAL_CLASS1724   tflite::EdgeTpuSettings_::QosClass qos_class() const {
1725     return static_cast<tflite::EdgeTpuSettings_::QosClass>(GetField<int32_t>(VT_QOS_CLASS, 0));
1726   }
VerifyFLATBUFFERS_FINAL_CLASS1727   bool Verify(flatbuffers::Verifier &verifier) const {
1728     return VerifyTableStart(verifier) &&
1729            VerifyField<int32_t>(verifier, VT_INFERENCE_POWER_STATE, 4) &&
1730            VerifyOffset(verifier, VT_INACTIVE_POWER_CONFIGS) &&
1731            verifier.VerifyVector(inactive_power_configs()) &&
1732            verifier.VerifyVectorOfTables(inactive_power_configs()) &&
1733            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY, 4) &&
1734            VerifyOffset(verifier, VT_EDGETPU_DEVICE_SPEC) &&
1735            verifier.VerifyTable(edgetpu_device_spec()) &&
1736            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
1737            verifier.VerifyString(model_token()) &&
1738            VerifyField<int32_t>(verifier, VT_FLOAT_TRUNCATION_TYPE, 4) &&
1739            VerifyField<int32_t>(verifier, VT_QOS_CLASS, 4) &&
1740            verifier.EndTable();
1741   }
1742   EdgeTpuSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1743   void UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1744   static flatbuffers::Offset<EdgeTpuSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1745 };
1746 
1747 struct EdgeTpuSettingsBuilder {
1748   typedef EdgeTpuSettings Table;
1749   flatbuffers::FlatBufferBuilder &fbb_;
1750   flatbuffers::uoffset_t start_;
add_inference_power_stateEdgeTpuSettingsBuilder1751   void add_inference_power_state(tflite::EdgeTpuPowerState inference_power_state) {
1752     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_POWER_STATE, static_cast<int32_t>(inference_power_state), 0);
1753   }
add_inactive_power_configsEdgeTpuSettingsBuilder1754   void add_inactive_power_configs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs) {
1755     fbb_.AddOffset(EdgeTpuSettings::VT_INACTIVE_POWER_CONFIGS, inactive_power_configs);
1756   }
add_inference_priorityEdgeTpuSettingsBuilder1757   void add_inference_priority(int32_t inference_priority) {
1758     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_PRIORITY, inference_priority, -1);
1759   }
add_edgetpu_device_specEdgeTpuSettingsBuilder1760   void add_edgetpu_device_spec(flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec) {
1761     fbb_.AddOffset(EdgeTpuSettings::VT_EDGETPU_DEVICE_SPEC, edgetpu_device_spec);
1762   }
add_model_tokenEdgeTpuSettingsBuilder1763   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
1764     fbb_.AddOffset(EdgeTpuSettings::VT_MODEL_TOKEN, model_token);
1765   }
add_float_truncation_typeEdgeTpuSettingsBuilder1766   void add_float_truncation_type(tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type) {
1767     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_FLOAT_TRUNCATION_TYPE, static_cast<int32_t>(float_truncation_type), 0);
1768   }
add_qos_classEdgeTpuSettingsBuilder1769   void add_qos_class(tflite::EdgeTpuSettings_::QosClass qos_class) {
1770     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_QOS_CLASS, static_cast<int32_t>(qos_class), 0);
1771   }
EdgeTpuSettingsBuilderEdgeTpuSettingsBuilder1772   explicit EdgeTpuSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1773         : fbb_(_fbb) {
1774     start_ = fbb_.StartTable();
1775   }
FinishEdgeTpuSettingsBuilder1776   flatbuffers::Offset<EdgeTpuSettings> Finish() {
1777     const auto end = fbb_.EndTable(start_);
1778     auto o = flatbuffers::Offset<EdgeTpuSettings>(end);
1779     return o;
1780   }
1781 };
1782 
1783 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(
1784     flatbuffers::FlatBufferBuilder &_fbb,
1785     tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1786     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs = 0,
1787     int32_t inference_priority = -1,
1788     flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0,
1789     flatbuffers::Offset<flatbuffers::String> model_token = 0,
1790     tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type = tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED,
1791     tflite::EdgeTpuSettings_::QosClass qos_class = tflite::EdgeTpuSettings_::QosClass_QOS_UNDEFINED) {
1792   EdgeTpuSettingsBuilder builder_(_fbb);
1793   builder_.add_qos_class(qos_class);
1794   builder_.add_float_truncation_type(float_truncation_type);
1795   builder_.add_model_token(model_token);
1796   builder_.add_edgetpu_device_spec(edgetpu_device_spec);
1797   builder_.add_inference_priority(inference_priority);
1798   builder_.add_inactive_power_configs(inactive_power_configs);
1799   builder_.add_inference_power_state(inference_power_state);
1800   return builder_.Finish();
1801 }
1802 
1803 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettingsDirect(
1804     flatbuffers::FlatBufferBuilder &_fbb,
1805     tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1806     const std::vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs = nullptr,
1807     int32_t inference_priority = -1,
1808     flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0,
1809     const char *model_token = nullptr,
1810     tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type = tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED,
1811     tflite::EdgeTpuSettings_::QosClass qos_class = tflite::EdgeTpuSettings_::QosClass_QOS_UNDEFINED) {
1812   auto inactive_power_configs__ = inactive_power_configs ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>(*inactive_power_configs) : 0;
1813   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
1814   return tflite::CreateEdgeTpuSettings(
1815       _fbb,
1816       inference_power_state,
1817       inactive_power_configs__,
1818       inference_priority,
1819       edgetpu_device_spec,
1820       model_token__,
1821       float_truncation_type,
1822       qos_class);
1823 }
1824 
1825 flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1826 
1827 struct CoralSettingsT : public flatbuffers::NativeTable {
1828   typedef CoralSettings TableType;
1829   std::string device{};
1830   tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED;
1831   bool usb_always_dfu = false;
1832   int32_t usb_max_bulk_in_queue_length = 0;
1833 };
1834 
1835 struct CoralSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1836   typedef CoralSettingsT NativeTableType;
1837   typedef CoralSettingsBuilder Builder;
1838   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1839     VT_DEVICE = 4,
1840     VT_PERFORMANCE = 6,
1841     VT_USB_ALWAYS_DFU = 8,
1842     VT_USB_MAX_BULK_IN_QUEUE_LENGTH = 10
1843   };
deviceFLATBUFFERS_FINAL_CLASS1844   const flatbuffers::String *device() const {
1845     return GetPointer<const flatbuffers::String *>(VT_DEVICE);
1846   }
performanceFLATBUFFERS_FINAL_CLASS1847   tflite::CoralSettings_::Performance performance() const {
1848     return static_cast<tflite::CoralSettings_::Performance>(GetField<int32_t>(VT_PERFORMANCE, 0));
1849   }
usb_always_dfuFLATBUFFERS_FINAL_CLASS1850   bool usb_always_dfu() const {
1851     return GetField<uint8_t>(VT_USB_ALWAYS_DFU, 0) != 0;
1852   }
usb_max_bulk_in_queue_lengthFLATBUFFERS_FINAL_CLASS1853   int32_t usb_max_bulk_in_queue_length() const {
1854     return GetField<int32_t>(VT_USB_MAX_BULK_IN_QUEUE_LENGTH, 0);
1855   }
VerifyFLATBUFFERS_FINAL_CLASS1856   bool Verify(flatbuffers::Verifier &verifier) const {
1857     return VerifyTableStart(verifier) &&
1858            VerifyOffset(verifier, VT_DEVICE) &&
1859            verifier.VerifyString(device()) &&
1860            VerifyField<int32_t>(verifier, VT_PERFORMANCE, 4) &&
1861            VerifyField<uint8_t>(verifier, VT_USB_ALWAYS_DFU, 1) &&
1862            VerifyField<int32_t>(verifier, VT_USB_MAX_BULK_IN_QUEUE_LENGTH, 4) &&
1863            verifier.EndTable();
1864   }
1865   CoralSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1866   void UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1867   static flatbuffers::Offset<CoralSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1868 };
1869 
1870 struct CoralSettingsBuilder {
1871   typedef CoralSettings Table;
1872   flatbuffers::FlatBufferBuilder &fbb_;
1873   flatbuffers::uoffset_t start_;
add_deviceCoralSettingsBuilder1874   void add_device(flatbuffers::Offset<flatbuffers::String> device) {
1875     fbb_.AddOffset(CoralSettings::VT_DEVICE, device);
1876   }
add_performanceCoralSettingsBuilder1877   void add_performance(tflite::CoralSettings_::Performance performance) {
1878     fbb_.AddElement<int32_t>(CoralSettings::VT_PERFORMANCE, static_cast<int32_t>(performance), 0);
1879   }
add_usb_always_dfuCoralSettingsBuilder1880   void add_usb_always_dfu(bool usb_always_dfu) {
1881     fbb_.AddElement<uint8_t>(CoralSettings::VT_USB_ALWAYS_DFU, static_cast<uint8_t>(usb_always_dfu), 0);
1882   }
add_usb_max_bulk_in_queue_lengthCoralSettingsBuilder1883   void add_usb_max_bulk_in_queue_length(int32_t usb_max_bulk_in_queue_length) {
1884     fbb_.AddElement<int32_t>(CoralSettings::VT_USB_MAX_BULK_IN_QUEUE_LENGTH, usb_max_bulk_in_queue_length, 0);
1885   }
CoralSettingsBuilderCoralSettingsBuilder1886   explicit CoralSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1887         : fbb_(_fbb) {
1888     start_ = fbb_.StartTable();
1889   }
FinishCoralSettingsBuilder1890   flatbuffers::Offset<CoralSettings> Finish() {
1891     const auto end = fbb_.EndTable(start_);
1892     auto o = flatbuffers::Offset<CoralSettings>(end);
1893     return o;
1894   }
1895 };
1896 
1897 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(
1898     flatbuffers::FlatBufferBuilder &_fbb,
1899     flatbuffers::Offset<flatbuffers::String> device = 0,
1900     tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1901     bool usb_always_dfu = false,
1902     int32_t usb_max_bulk_in_queue_length = 0) {
1903   CoralSettingsBuilder builder_(_fbb);
1904   builder_.add_usb_max_bulk_in_queue_length(usb_max_bulk_in_queue_length);
1905   builder_.add_performance(performance);
1906   builder_.add_device(device);
1907   builder_.add_usb_always_dfu(usb_always_dfu);
1908   return builder_.Finish();
1909 }
1910 
1911 inline flatbuffers::Offset<CoralSettings> CreateCoralSettingsDirect(
1912     flatbuffers::FlatBufferBuilder &_fbb,
1913     const char *device = nullptr,
1914     tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1915     bool usb_always_dfu = false,
1916     int32_t usb_max_bulk_in_queue_length = 0) {
1917   auto device__ = device ? _fbb.CreateString(device) : 0;
1918   return tflite::CreateCoralSettings(
1919       _fbb,
1920       device__,
1921       performance,
1922       usb_always_dfu,
1923       usb_max_bulk_in_queue_length);
1924 }
1925 
1926 flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1927 
1928 struct CPUSettingsT : public flatbuffers::NativeTable {
1929   typedef CPUSettings TableType;
1930   int32_t num_threads = -1;
1931 };
1932 
1933 struct CPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1934   typedef CPUSettingsT NativeTableType;
1935   typedef CPUSettingsBuilder Builder;
1936   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1937     VT_NUM_THREADS = 4
1938   };
num_threadsFLATBUFFERS_FINAL_CLASS1939   int32_t num_threads() const {
1940     return GetField<int32_t>(VT_NUM_THREADS, -1);
1941   }
VerifyFLATBUFFERS_FINAL_CLASS1942   bool Verify(flatbuffers::Verifier &verifier) const {
1943     return VerifyTableStart(verifier) &&
1944            VerifyField<int32_t>(verifier, VT_NUM_THREADS, 4) &&
1945            verifier.EndTable();
1946   }
1947   CPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1948   void UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1949   static flatbuffers::Offset<CPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1950 };
1951 
1952 struct CPUSettingsBuilder {
1953   typedef CPUSettings Table;
1954   flatbuffers::FlatBufferBuilder &fbb_;
1955   flatbuffers::uoffset_t start_;
add_num_threadsCPUSettingsBuilder1956   void add_num_threads(int32_t num_threads) {
1957     fbb_.AddElement<int32_t>(CPUSettings::VT_NUM_THREADS, num_threads, -1);
1958   }
CPUSettingsBuilderCPUSettingsBuilder1959   explicit CPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1960         : fbb_(_fbb) {
1961     start_ = fbb_.StartTable();
1962   }
FinishCPUSettingsBuilder1963   flatbuffers::Offset<CPUSettings> Finish() {
1964     const auto end = fbb_.EndTable(start_);
1965     auto o = flatbuffers::Offset<CPUSettings>(end);
1966     return o;
1967   }
1968 };
1969 
1970 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(
1971     flatbuffers::FlatBufferBuilder &_fbb,
1972     int32_t num_threads = -1) {
1973   CPUSettingsBuilder builder_(_fbb);
1974   builder_.add_num_threads(num_threads);
1975   return builder_.Finish();
1976 }
1977 
1978 flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1979 
1980 struct TFLiteSettingsT : public flatbuffers::NativeTable {
1981   typedef TFLiteSettings TableType;
1982   tflite::Delegate delegate = tflite::Delegate_NONE;
1983   std::unique_ptr<tflite::NNAPISettingsT> nnapi_settings{};
1984   std::unique_ptr<tflite::GPUSettingsT> gpu_settings{};
1985   std::unique_ptr<tflite::HexagonSettingsT> hexagon_settings{};
1986   std::unique_ptr<tflite::XNNPackSettingsT> xnnpack_settings{};
1987   std::unique_ptr<tflite::CoreMLSettingsT> coreml_settings{};
1988   std::unique_ptr<tflite::CPUSettingsT> cpu_settings{};
1989   int32_t max_delegated_partitions = 0;
1990   std::unique_ptr<tflite::EdgeTpuSettingsT> edgetpu_settings{};
1991   std::unique_ptr<tflite::CoralSettingsT> coral_settings{};
1992   std::unique_ptr<tflite::FallbackSettingsT> fallback_settings{};
1993   bool disable_default_delegates = false;
1994   TFLiteSettingsT() = default;
1995   TFLiteSettingsT(const TFLiteSettingsT &o);
1996   TFLiteSettingsT(TFLiteSettingsT&&) FLATBUFFERS_NOEXCEPT = default;
1997   TFLiteSettingsT &operator=(TFLiteSettingsT o) FLATBUFFERS_NOEXCEPT;
1998 };
1999 
2000 struct TFLiteSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2001   typedef TFLiteSettingsT NativeTableType;
2002   typedef TFLiteSettingsBuilder Builder;
2003   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2004     VT_DELEGATE = 4,
2005     VT_NNAPI_SETTINGS = 6,
2006     VT_GPU_SETTINGS = 8,
2007     VT_HEXAGON_SETTINGS = 10,
2008     VT_XNNPACK_SETTINGS = 12,
2009     VT_COREML_SETTINGS = 14,
2010     VT_CPU_SETTINGS = 16,
2011     VT_MAX_DELEGATED_PARTITIONS = 18,
2012     VT_EDGETPU_SETTINGS = 20,
2013     VT_CORAL_SETTINGS = 22,
2014     VT_FALLBACK_SETTINGS = 24,
2015     VT_DISABLE_DEFAULT_DELEGATES = 26
2016   };
delegateFLATBUFFERS_FINAL_CLASS2017   tflite::Delegate delegate() const {
2018     return static_cast<tflite::Delegate>(GetField<int32_t>(VT_DELEGATE, 0));
2019   }
nnapi_settingsFLATBUFFERS_FINAL_CLASS2020   const tflite::NNAPISettings *nnapi_settings() const {
2021     return GetPointer<const tflite::NNAPISettings *>(VT_NNAPI_SETTINGS);
2022   }
gpu_settingsFLATBUFFERS_FINAL_CLASS2023   const tflite::GPUSettings *gpu_settings() const {
2024     return GetPointer<const tflite::GPUSettings *>(VT_GPU_SETTINGS);
2025   }
hexagon_settingsFLATBUFFERS_FINAL_CLASS2026   const tflite::HexagonSettings *hexagon_settings() const {
2027     return GetPointer<const tflite::HexagonSettings *>(VT_HEXAGON_SETTINGS);
2028   }
xnnpack_settingsFLATBUFFERS_FINAL_CLASS2029   const tflite::XNNPackSettings *xnnpack_settings() const {
2030     return GetPointer<const tflite::XNNPackSettings *>(VT_XNNPACK_SETTINGS);
2031   }
coreml_settingsFLATBUFFERS_FINAL_CLASS2032   const tflite::CoreMLSettings *coreml_settings() const {
2033     return GetPointer<const tflite::CoreMLSettings *>(VT_COREML_SETTINGS);
2034   }
cpu_settingsFLATBUFFERS_FINAL_CLASS2035   const tflite::CPUSettings *cpu_settings() const {
2036     return GetPointer<const tflite::CPUSettings *>(VT_CPU_SETTINGS);
2037   }
max_delegated_partitionsFLATBUFFERS_FINAL_CLASS2038   int32_t max_delegated_partitions() const {
2039     return GetField<int32_t>(VT_MAX_DELEGATED_PARTITIONS, 0);
2040   }
edgetpu_settingsFLATBUFFERS_FINAL_CLASS2041   const tflite::EdgeTpuSettings *edgetpu_settings() const {
2042     return GetPointer<const tflite::EdgeTpuSettings *>(VT_EDGETPU_SETTINGS);
2043   }
coral_settingsFLATBUFFERS_FINAL_CLASS2044   const tflite::CoralSettings *coral_settings() const {
2045     return GetPointer<const tflite::CoralSettings *>(VT_CORAL_SETTINGS);
2046   }
fallback_settingsFLATBUFFERS_FINAL_CLASS2047   const tflite::FallbackSettings *fallback_settings() const {
2048     return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
2049   }
disable_default_delegatesFLATBUFFERS_FINAL_CLASS2050   bool disable_default_delegates() const {
2051     return GetField<uint8_t>(VT_DISABLE_DEFAULT_DELEGATES, 0) != 0;
2052   }
VerifyFLATBUFFERS_FINAL_CLASS2053   bool Verify(flatbuffers::Verifier &verifier) const {
2054     return VerifyTableStart(verifier) &&
2055            VerifyField<int32_t>(verifier, VT_DELEGATE, 4) &&
2056            VerifyOffset(verifier, VT_NNAPI_SETTINGS) &&
2057            verifier.VerifyTable(nnapi_settings()) &&
2058            VerifyOffset(verifier, VT_GPU_SETTINGS) &&
2059            verifier.VerifyTable(gpu_settings()) &&
2060            VerifyOffset(verifier, VT_HEXAGON_SETTINGS) &&
2061            verifier.VerifyTable(hexagon_settings()) &&
2062            VerifyOffset(verifier, VT_XNNPACK_SETTINGS) &&
2063            verifier.VerifyTable(xnnpack_settings()) &&
2064            VerifyOffset(verifier, VT_COREML_SETTINGS) &&
2065            verifier.VerifyTable(coreml_settings()) &&
2066            VerifyOffset(verifier, VT_CPU_SETTINGS) &&
2067            verifier.VerifyTable(cpu_settings()) &&
2068            VerifyField<int32_t>(verifier, VT_MAX_DELEGATED_PARTITIONS, 4) &&
2069            VerifyOffset(verifier, VT_EDGETPU_SETTINGS) &&
2070            verifier.VerifyTable(edgetpu_settings()) &&
2071            VerifyOffset(verifier, VT_CORAL_SETTINGS) &&
2072            verifier.VerifyTable(coral_settings()) &&
2073            VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
2074            verifier.VerifyTable(fallback_settings()) &&
2075            VerifyField<uint8_t>(verifier, VT_DISABLE_DEFAULT_DELEGATES, 1) &&
2076            verifier.EndTable();
2077   }
2078   TFLiteSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2079   void UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2080   static flatbuffers::Offset<TFLiteSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2081 };
2082 
2083 struct TFLiteSettingsBuilder {
2084   typedef TFLiteSettings Table;
2085   flatbuffers::FlatBufferBuilder &fbb_;
2086   flatbuffers::uoffset_t start_;
add_delegateTFLiteSettingsBuilder2087   void add_delegate(tflite::Delegate delegate) {
2088     fbb_.AddElement<int32_t>(TFLiteSettings::VT_DELEGATE, static_cast<int32_t>(delegate), 0);
2089   }
add_nnapi_settingsTFLiteSettingsBuilder2090   void add_nnapi_settings(flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings) {
2091     fbb_.AddOffset(TFLiteSettings::VT_NNAPI_SETTINGS, nnapi_settings);
2092   }
add_gpu_settingsTFLiteSettingsBuilder2093   void add_gpu_settings(flatbuffers::Offset<tflite::GPUSettings> gpu_settings) {
2094     fbb_.AddOffset(TFLiteSettings::VT_GPU_SETTINGS, gpu_settings);
2095   }
add_hexagon_settingsTFLiteSettingsBuilder2096   void add_hexagon_settings(flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings) {
2097     fbb_.AddOffset(TFLiteSettings::VT_HEXAGON_SETTINGS, hexagon_settings);
2098   }
add_xnnpack_settingsTFLiteSettingsBuilder2099   void add_xnnpack_settings(flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings) {
2100     fbb_.AddOffset(TFLiteSettings::VT_XNNPACK_SETTINGS, xnnpack_settings);
2101   }
add_coreml_settingsTFLiteSettingsBuilder2102   void add_coreml_settings(flatbuffers::Offset<tflite::CoreMLSettings> coreml_settings) {
2103     fbb_.AddOffset(TFLiteSettings::VT_COREML_SETTINGS, coreml_settings);
2104   }
add_cpu_settingsTFLiteSettingsBuilder2105   void add_cpu_settings(flatbuffers::Offset<tflite::CPUSettings> cpu_settings) {
2106     fbb_.AddOffset(TFLiteSettings::VT_CPU_SETTINGS, cpu_settings);
2107   }
add_max_delegated_partitionsTFLiteSettingsBuilder2108   void add_max_delegated_partitions(int32_t max_delegated_partitions) {
2109     fbb_.AddElement<int32_t>(TFLiteSettings::VT_MAX_DELEGATED_PARTITIONS, max_delegated_partitions, 0);
2110   }
add_edgetpu_settingsTFLiteSettingsBuilder2111   void add_edgetpu_settings(flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings) {
2112     fbb_.AddOffset(TFLiteSettings::VT_EDGETPU_SETTINGS, edgetpu_settings);
2113   }
add_coral_settingsTFLiteSettingsBuilder2114   void add_coral_settings(flatbuffers::Offset<tflite::CoralSettings> coral_settings) {
2115     fbb_.AddOffset(TFLiteSettings::VT_CORAL_SETTINGS, coral_settings);
2116   }
add_fallback_settingsTFLiteSettingsBuilder2117   void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
2118     fbb_.AddOffset(TFLiteSettings::VT_FALLBACK_SETTINGS, fallback_settings);
2119   }
add_disable_default_delegatesTFLiteSettingsBuilder2120   void add_disable_default_delegates(bool disable_default_delegates) {
2121     fbb_.AddElement<uint8_t>(TFLiteSettings::VT_DISABLE_DEFAULT_DELEGATES, static_cast<uint8_t>(disable_default_delegates), 0);
2122   }
TFLiteSettingsBuilderTFLiteSettingsBuilder2123   explicit TFLiteSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2124         : fbb_(_fbb) {
2125     start_ = fbb_.StartTable();
2126   }
FinishTFLiteSettingsBuilder2127   flatbuffers::Offset<TFLiteSettings> Finish() {
2128     const auto end = fbb_.EndTable(start_);
2129     auto o = flatbuffers::Offset<TFLiteSettings>(end);
2130     return o;
2131   }
2132 };
2133 
2134 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(
2135     flatbuffers::FlatBufferBuilder &_fbb,
2136     tflite::Delegate delegate = tflite::Delegate_NONE,
2137     flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings = 0,
2138     flatbuffers::Offset<tflite::GPUSettings> gpu_settings = 0,
2139     flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings = 0,
2140     flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings = 0,
2141     flatbuffers::Offset<tflite::CoreMLSettings> coreml_settings = 0,
2142     flatbuffers::Offset<tflite::CPUSettings> cpu_settings = 0,
2143     int32_t max_delegated_partitions = 0,
2144     flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings = 0,
2145     flatbuffers::Offset<tflite::CoralSettings> coral_settings = 0,
2146     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
2147     bool disable_default_delegates = false) {
2148   TFLiteSettingsBuilder builder_(_fbb);
2149   builder_.add_fallback_settings(fallback_settings);
2150   builder_.add_coral_settings(coral_settings);
2151   builder_.add_edgetpu_settings(edgetpu_settings);
2152   builder_.add_max_delegated_partitions(max_delegated_partitions);
2153   builder_.add_cpu_settings(cpu_settings);
2154   builder_.add_coreml_settings(coreml_settings);
2155   builder_.add_xnnpack_settings(xnnpack_settings);
2156   builder_.add_hexagon_settings(hexagon_settings);
2157   builder_.add_gpu_settings(gpu_settings);
2158   builder_.add_nnapi_settings(nnapi_settings);
2159   builder_.add_delegate(delegate);
2160   builder_.add_disable_default_delegates(disable_default_delegates);
2161   return builder_.Finish();
2162 }
2163 
2164 flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2165 
2166 struct FallbackSettingsT : public flatbuffers::NativeTable {
2167   typedef FallbackSettings TableType;
2168   bool allow_automatic_fallback_on_compilation_error = false;
2169   bool allow_automatic_fallback_on_execution_error = false;
2170 };
2171 
2172 struct FallbackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2173   typedef FallbackSettingsT NativeTableType;
2174   typedef FallbackSettingsBuilder Builder;
2175   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2176     VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR = 4,
2177     VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR = 6
2178   };
allow_automatic_fallback_on_compilation_errorFLATBUFFERS_FINAL_CLASS2179   bool allow_automatic_fallback_on_compilation_error() const {
2180     return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, 0) != 0;
2181   }
allow_automatic_fallback_on_execution_errorFLATBUFFERS_FINAL_CLASS2182   bool allow_automatic_fallback_on_execution_error() const {
2183     return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, 0) != 0;
2184   }
VerifyFLATBUFFERS_FINAL_CLASS2185   bool Verify(flatbuffers::Verifier &verifier) const {
2186     return VerifyTableStart(verifier) &&
2187            VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, 1) &&
2188            VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, 1) &&
2189            verifier.EndTable();
2190   }
2191   FallbackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2192   void UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2193   static flatbuffers::Offset<FallbackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2194 };
2195 
2196 struct FallbackSettingsBuilder {
2197   typedef FallbackSettings Table;
2198   flatbuffers::FlatBufferBuilder &fbb_;
2199   flatbuffers::uoffset_t start_;
add_allow_automatic_fallback_on_compilation_errorFallbackSettingsBuilder2200   void add_allow_automatic_fallback_on_compilation_error(bool allow_automatic_fallback_on_compilation_error) {
2201     fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_compilation_error), 0);
2202   }
add_allow_automatic_fallback_on_execution_errorFallbackSettingsBuilder2203   void add_allow_automatic_fallback_on_execution_error(bool allow_automatic_fallback_on_execution_error) {
2204     fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_execution_error), 0);
2205   }
FallbackSettingsBuilderFallbackSettingsBuilder2206   explicit FallbackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2207         : fbb_(_fbb) {
2208     start_ = fbb_.StartTable();
2209   }
FinishFallbackSettingsBuilder2210   flatbuffers::Offset<FallbackSettings> Finish() {
2211     const auto end = fbb_.EndTable(start_);
2212     auto o = flatbuffers::Offset<FallbackSettings>(end);
2213     return o;
2214   }
2215 };
2216 
2217 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(
2218     flatbuffers::FlatBufferBuilder &_fbb,
2219     bool allow_automatic_fallback_on_compilation_error = false,
2220     bool allow_automatic_fallback_on_execution_error = false) {
2221   FallbackSettingsBuilder builder_(_fbb);
2222   builder_.add_allow_automatic_fallback_on_execution_error(allow_automatic_fallback_on_execution_error);
2223   builder_.add_allow_automatic_fallback_on_compilation_error(allow_automatic_fallback_on_compilation_error);
2224   return builder_.Finish();
2225 }
2226 
2227 flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2228 
2229 struct BenchmarkMetricT : public flatbuffers::NativeTable {
2230   typedef BenchmarkMetric TableType;
2231   std::string name{};
2232   std::vector<float> values{};
2233 };
2234 
2235 struct BenchmarkMetric FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2236   typedef BenchmarkMetricT NativeTableType;
2237   typedef BenchmarkMetricBuilder Builder;
2238   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2239     VT_NAME = 4,
2240     VT_VALUES = 6
2241   };
nameFLATBUFFERS_FINAL_CLASS2242   const flatbuffers::String *name() const {
2243     return GetPointer<const flatbuffers::String *>(VT_NAME);
2244   }
valuesFLATBUFFERS_FINAL_CLASS2245   const flatbuffers::Vector<float> *values() const {
2246     return GetPointer<const flatbuffers::Vector<float> *>(VT_VALUES);
2247   }
VerifyFLATBUFFERS_FINAL_CLASS2248   bool Verify(flatbuffers::Verifier &verifier) const {
2249     return VerifyTableStart(verifier) &&
2250            VerifyOffset(verifier, VT_NAME) &&
2251            verifier.VerifyString(name()) &&
2252            VerifyOffset(verifier, VT_VALUES) &&
2253            verifier.VerifyVector(values()) &&
2254            verifier.EndTable();
2255   }
2256   BenchmarkMetricT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2257   void UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2258   static flatbuffers::Offset<BenchmarkMetric> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2259 };
2260 
2261 struct BenchmarkMetricBuilder {
2262   typedef BenchmarkMetric Table;
2263   flatbuffers::FlatBufferBuilder &fbb_;
2264   flatbuffers::uoffset_t start_;
add_nameBenchmarkMetricBuilder2265   void add_name(flatbuffers::Offset<flatbuffers::String> name) {
2266     fbb_.AddOffset(BenchmarkMetric::VT_NAME, name);
2267   }
add_valuesBenchmarkMetricBuilder2268   void add_values(flatbuffers::Offset<flatbuffers::Vector<float>> values) {
2269     fbb_.AddOffset(BenchmarkMetric::VT_VALUES, values);
2270   }
BenchmarkMetricBuilderBenchmarkMetricBuilder2271   explicit BenchmarkMetricBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2272         : fbb_(_fbb) {
2273     start_ = fbb_.StartTable();
2274   }
FinishBenchmarkMetricBuilder2275   flatbuffers::Offset<BenchmarkMetric> Finish() {
2276     const auto end = fbb_.EndTable(start_);
2277     auto o = flatbuffers::Offset<BenchmarkMetric>(end);
2278     return o;
2279   }
2280 };
2281 
2282 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(
2283     flatbuffers::FlatBufferBuilder &_fbb,
2284     flatbuffers::Offset<flatbuffers::String> name = 0,
2285     flatbuffers::Offset<flatbuffers::Vector<float>> values = 0) {
2286   BenchmarkMetricBuilder builder_(_fbb);
2287   builder_.add_values(values);
2288   builder_.add_name(name);
2289   return builder_.Finish();
2290 }
2291 
2292 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetricDirect(
2293     flatbuffers::FlatBufferBuilder &_fbb,
2294     const char *name = nullptr,
2295     const std::vector<float> *values = nullptr) {
2296   auto name__ = name ? _fbb.CreateString(name) : 0;
2297   auto values__ = values ? _fbb.CreateVector<float>(*values) : 0;
2298   return tflite::CreateBenchmarkMetric(
2299       _fbb,
2300       name__,
2301       values__);
2302 }
2303 
2304 flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2305 
2306 struct BenchmarkResultT : public flatbuffers::NativeTable {
2307   typedef BenchmarkResult TableType;
2308   std::vector<int64_t> initialization_time_us{};
2309   std::vector<int64_t> inference_time_us{};
2310   int32_t max_memory_kb = 0;
2311   bool ok = false;
2312   std::vector<std::unique_ptr<tflite::BenchmarkMetricT>> metrics{};
2313   BenchmarkResultT() = default;
2314   BenchmarkResultT(const BenchmarkResultT &o);
2315   BenchmarkResultT(BenchmarkResultT&&) FLATBUFFERS_NOEXCEPT = default;
2316   BenchmarkResultT &operator=(BenchmarkResultT o) FLATBUFFERS_NOEXCEPT;
2317 };
2318 
2319 struct BenchmarkResult FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2320   typedef BenchmarkResultT NativeTableType;
2321   typedef BenchmarkResultBuilder Builder;
2322   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2323     VT_INITIALIZATION_TIME_US = 4,
2324     VT_INFERENCE_TIME_US = 6,
2325     VT_MAX_MEMORY_KB = 8,
2326     VT_OK = 10,
2327     VT_METRICS = 12
2328   };
initialization_time_usFLATBUFFERS_FINAL_CLASS2329   const flatbuffers::Vector<int64_t> *initialization_time_us() const {
2330     return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INITIALIZATION_TIME_US);
2331   }
inference_time_usFLATBUFFERS_FINAL_CLASS2332   const flatbuffers::Vector<int64_t> *inference_time_us() const {
2333     return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INFERENCE_TIME_US);
2334   }
max_memory_kbFLATBUFFERS_FINAL_CLASS2335   int32_t max_memory_kb() const {
2336     return GetField<int32_t>(VT_MAX_MEMORY_KB, 0);
2337   }
okFLATBUFFERS_FINAL_CLASS2338   bool ok() const {
2339     return GetField<uint8_t>(VT_OK, 0) != 0;
2340   }
metricsFLATBUFFERS_FINAL_CLASS2341   const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics() const {
2342     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *>(VT_METRICS);
2343   }
VerifyFLATBUFFERS_FINAL_CLASS2344   bool Verify(flatbuffers::Verifier &verifier) const {
2345     return VerifyTableStart(verifier) &&
2346            VerifyOffset(verifier, VT_INITIALIZATION_TIME_US) &&
2347            verifier.VerifyVector(initialization_time_us()) &&
2348            VerifyOffset(verifier, VT_INFERENCE_TIME_US) &&
2349            verifier.VerifyVector(inference_time_us()) &&
2350            VerifyField<int32_t>(verifier, VT_MAX_MEMORY_KB, 4) &&
2351            VerifyField<uint8_t>(verifier, VT_OK, 1) &&
2352            VerifyOffset(verifier, VT_METRICS) &&
2353            verifier.VerifyVector(metrics()) &&
2354            verifier.VerifyVectorOfTables(metrics()) &&
2355            verifier.EndTable();
2356   }
2357   BenchmarkResultT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2358   void UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2359   static flatbuffers::Offset<BenchmarkResult> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2360 };
2361 
2362 struct BenchmarkResultBuilder {
2363   typedef BenchmarkResult Table;
2364   flatbuffers::FlatBufferBuilder &fbb_;
2365   flatbuffers::uoffset_t start_;
add_initialization_time_usBenchmarkResultBuilder2366   void add_initialization_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us) {
2367     fbb_.AddOffset(BenchmarkResult::VT_INITIALIZATION_TIME_US, initialization_time_us);
2368   }
add_inference_time_usBenchmarkResultBuilder2369   void add_inference_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us) {
2370     fbb_.AddOffset(BenchmarkResult::VT_INFERENCE_TIME_US, inference_time_us);
2371   }
add_max_memory_kbBenchmarkResultBuilder2372   void add_max_memory_kb(int32_t max_memory_kb) {
2373     fbb_.AddElement<int32_t>(BenchmarkResult::VT_MAX_MEMORY_KB, max_memory_kb, 0);
2374   }
add_okBenchmarkResultBuilder2375   void add_ok(bool ok) {
2376     fbb_.AddElement<uint8_t>(BenchmarkResult::VT_OK, static_cast<uint8_t>(ok), 0);
2377   }
add_metricsBenchmarkResultBuilder2378   void add_metrics(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics) {
2379     fbb_.AddOffset(BenchmarkResult::VT_METRICS, metrics);
2380   }
BenchmarkResultBuilderBenchmarkResultBuilder2381   explicit BenchmarkResultBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2382         : fbb_(_fbb) {
2383     start_ = fbb_.StartTable();
2384   }
FinishBenchmarkResultBuilder2385   flatbuffers::Offset<BenchmarkResult> Finish() {
2386     const auto end = fbb_.EndTable(start_);
2387     auto o = flatbuffers::Offset<BenchmarkResult>(end);
2388     return o;
2389   }
2390 };
2391 
2392 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(
2393     flatbuffers::FlatBufferBuilder &_fbb,
2394     flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us = 0,
2395     flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us = 0,
2396     int32_t max_memory_kb = 0,
2397     bool ok = false,
2398     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics = 0) {
2399   BenchmarkResultBuilder builder_(_fbb);
2400   builder_.add_metrics(metrics);
2401   builder_.add_max_memory_kb(max_memory_kb);
2402   builder_.add_inference_time_us(inference_time_us);
2403   builder_.add_initialization_time_us(initialization_time_us);
2404   builder_.add_ok(ok);
2405   return builder_.Finish();
2406 }
2407 
2408 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResultDirect(
2409     flatbuffers::FlatBufferBuilder &_fbb,
2410     const std::vector<int64_t> *initialization_time_us = nullptr,
2411     const std::vector<int64_t> *inference_time_us = nullptr,
2412     int32_t max_memory_kb = 0,
2413     bool ok = false,
2414     const std::vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics = nullptr) {
2415   auto initialization_time_us__ = initialization_time_us ? _fbb.CreateVector<int64_t>(*initialization_time_us) : 0;
2416   auto inference_time_us__ = inference_time_us ? _fbb.CreateVector<int64_t>(*inference_time_us) : 0;
2417   auto metrics__ = metrics ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>>(*metrics) : 0;
2418   return tflite::CreateBenchmarkResult(
2419       _fbb,
2420       initialization_time_us__,
2421       inference_time_us__,
2422       max_memory_kb,
2423       ok,
2424       metrics__);
2425 }
2426 
2427 flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2428 
2429 struct ErrorCodeT : public flatbuffers::NativeTable {
2430   typedef ErrorCode TableType;
2431   tflite::Delegate source = tflite::Delegate_NONE;
2432   int32_t tflite_error = 0;
2433   int64_t underlying_api_error = 0;
2434 };
2435 
2436 struct ErrorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2437   typedef ErrorCodeT NativeTableType;
2438   typedef ErrorCodeBuilder Builder;
2439   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2440     VT_SOURCE = 4,
2441     VT_TFLITE_ERROR = 6,
2442     VT_UNDERLYING_API_ERROR = 8
2443   };
sourceFLATBUFFERS_FINAL_CLASS2444   tflite::Delegate source() const {
2445     return static_cast<tflite::Delegate>(GetField<int32_t>(VT_SOURCE, 0));
2446   }
tflite_errorFLATBUFFERS_FINAL_CLASS2447   int32_t tflite_error() const {
2448     return GetField<int32_t>(VT_TFLITE_ERROR, 0);
2449   }
underlying_api_errorFLATBUFFERS_FINAL_CLASS2450   int64_t underlying_api_error() const {
2451     return GetField<int64_t>(VT_UNDERLYING_API_ERROR, 0);
2452   }
VerifyFLATBUFFERS_FINAL_CLASS2453   bool Verify(flatbuffers::Verifier &verifier) const {
2454     return VerifyTableStart(verifier) &&
2455            VerifyField<int32_t>(verifier, VT_SOURCE, 4) &&
2456            VerifyField<int32_t>(verifier, VT_TFLITE_ERROR, 4) &&
2457            VerifyField<int64_t>(verifier, VT_UNDERLYING_API_ERROR, 8) &&
2458            verifier.EndTable();
2459   }
2460   ErrorCodeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2461   void UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2462   static flatbuffers::Offset<ErrorCode> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2463 };
2464 
2465 struct ErrorCodeBuilder {
2466   typedef ErrorCode Table;
2467   flatbuffers::FlatBufferBuilder &fbb_;
2468   flatbuffers::uoffset_t start_;
add_sourceErrorCodeBuilder2469   void add_source(tflite::Delegate source) {
2470     fbb_.AddElement<int32_t>(ErrorCode::VT_SOURCE, static_cast<int32_t>(source), 0);
2471   }
add_tflite_errorErrorCodeBuilder2472   void add_tflite_error(int32_t tflite_error) {
2473     fbb_.AddElement<int32_t>(ErrorCode::VT_TFLITE_ERROR, tflite_error, 0);
2474   }
add_underlying_api_errorErrorCodeBuilder2475   void add_underlying_api_error(int64_t underlying_api_error) {
2476     fbb_.AddElement<int64_t>(ErrorCode::VT_UNDERLYING_API_ERROR, underlying_api_error, 0);
2477   }
ErrorCodeBuilderErrorCodeBuilder2478   explicit ErrorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2479         : fbb_(_fbb) {
2480     start_ = fbb_.StartTable();
2481   }
FinishErrorCodeBuilder2482   flatbuffers::Offset<ErrorCode> Finish() {
2483     const auto end = fbb_.EndTable(start_);
2484     auto o = flatbuffers::Offset<ErrorCode>(end);
2485     return o;
2486   }
2487 };
2488 
2489 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(
2490     flatbuffers::FlatBufferBuilder &_fbb,
2491     tflite::Delegate source = tflite::Delegate_NONE,
2492     int32_t tflite_error = 0,
2493     int64_t underlying_api_error = 0) {
2494   ErrorCodeBuilder builder_(_fbb);
2495   builder_.add_underlying_api_error(underlying_api_error);
2496   builder_.add_tflite_error(tflite_error);
2497   builder_.add_source(source);
2498   return builder_.Finish();
2499 }
2500 
2501 flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2502 
2503 struct BenchmarkErrorT : public flatbuffers::NativeTable {
2504   typedef BenchmarkError TableType;
2505   tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN;
2506   int32_t exit_code = 0;
2507   int32_t signal = 0;
2508   std::vector<std::unique_ptr<tflite::ErrorCodeT>> error_code{};
2509   int32_t mini_benchmark_error_code = 0;
2510   BenchmarkErrorT() = default;
2511   BenchmarkErrorT(const BenchmarkErrorT &o);
2512   BenchmarkErrorT(BenchmarkErrorT&&) FLATBUFFERS_NOEXCEPT = default;
2513   BenchmarkErrorT &operator=(BenchmarkErrorT o) FLATBUFFERS_NOEXCEPT;
2514 };
2515 
2516 struct BenchmarkError FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2517   typedef BenchmarkErrorT NativeTableType;
2518   typedef BenchmarkErrorBuilder Builder;
2519   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2520     VT_STAGE = 4,
2521     VT_EXIT_CODE = 6,
2522     VT_SIGNAL = 8,
2523     VT_ERROR_CODE = 10,
2524     VT_MINI_BENCHMARK_ERROR_CODE = 12
2525   };
stageFLATBUFFERS_FINAL_CLASS2526   tflite::BenchmarkStage stage() const {
2527     return static_cast<tflite::BenchmarkStage>(GetField<int32_t>(VT_STAGE, 0));
2528   }
exit_codeFLATBUFFERS_FINAL_CLASS2529   int32_t exit_code() const {
2530     return GetField<int32_t>(VT_EXIT_CODE, 0);
2531   }
signalFLATBUFFERS_FINAL_CLASS2532   int32_t signal() const {
2533     return GetField<int32_t>(VT_SIGNAL, 0);
2534   }
error_codeFLATBUFFERS_FINAL_CLASS2535   const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code() const {
2536     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *>(VT_ERROR_CODE);
2537   }
mini_benchmark_error_codeFLATBUFFERS_FINAL_CLASS2538   int32_t mini_benchmark_error_code() const {
2539     return GetField<int32_t>(VT_MINI_BENCHMARK_ERROR_CODE, 0);
2540   }
VerifyFLATBUFFERS_FINAL_CLASS2541   bool Verify(flatbuffers::Verifier &verifier) const {
2542     return VerifyTableStart(verifier) &&
2543            VerifyField<int32_t>(verifier, VT_STAGE, 4) &&
2544            VerifyField<int32_t>(verifier, VT_EXIT_CODE, 4) &&
2545            VerifyField<int32_t>(verifier, VT_SIGNAL, 4) &&
2546            VerifyOffset(verifier, VT_ERROR_CODE) &&
2547            verifier.VerifyVector(error_code()) &&
2548            verifier.VerifyVectorOfTables(error_code()) &&
2549            VerifyField<int32_t>(verifier, VT_MINI_BENCHMARK_ERROR_CODE, 4) &&
2550            verifier.EndTable();
2551   }
2552   BenchmarkErrorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2553   void UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2554   static flatbuffers::Offset<BenchmarkError> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2555 };
2556 
2557 struct BenchmarkErrorBuilder {
2558   typedef BenchmarkError Table;
2559   flatbuffers::FlatBufferBuilder &fbb_;
2560   flatbuffers::uoffset_t start_;
add_stageBenchmarkErrorBuilder2561   void add_stage(tflite::BenchmarkStage stage) {
2562     fbb_.AddElement<int32_t>(BenchmarkError::VT_STAGE, static_cast<int32_t>(stage), 0);
2563   }
add_exit_codeBenchmarkErrorBuilder2564   void add_exit_code(int32_t exit_code) {
2565     fbb_.AddElement<int32_t>(BenchmarkError::VT_EXIT_CODE, exit_code, 0);
2566   }
add_signalBenchmarkErrorBuilder2567   void add_signal(int32_t signal) {
2568     fbb_.AddElement<int32_t>(BenchmarkError::VT_SIGNAL, signal, 0);
2569   }
add_error_codeBenchmarkErrorBuilder2570   void add_error_code(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code) {
2571     fbb_.AddOffset(BenchmarkError::VT_ERROR_CODE, error_code);
2572   }
add_mini_benchmark_error_codeBenchmarkErrorBuilder2573   void add_mini_benchmark_error_code(int32_t mini_benchmark_error_code) {
2574     fbb_.AddElement<int32_t>(BenchmarkError::VT_MINI_BENCHMARK_ERROR_CODE, mini_benchmark_error_code, 0);
2575   }
BenchmarkErrorBuilderBenchmarkErrorBuilder2576   explicit BenchmarkErrorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2577         : fbb_(_fbb) {
2578     start_ = fbb_.StartTable();
2579   }
FinishBenchmarkErrorBuilder2580   flatbuffers::Offset<BenchmarkError> Finish() {
2581     const auto end = fbb_.EndTable(start_);
2582     auto o = flatbuffers::Offset<BenchmarkError>(end);
2583     return o;
2584   }
2585 };
2586 
2587 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(
2588     flatbuffers::FlatBufferBuilder &_fbb,
2589     tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2590     int32_t exit_code = 0,
2591     int32_t signal = 0,
2592     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code = 0,
2593     int32_t mini_benchmark_error_code = 0) {
2594   BenchmarkErrorBuilder builder_(_fbb);
2595   builder_.add_mini_benchmark_error_code(mini_benchmark_error_code);
2596   builder_.add_error_code(error_code);
2597   builder_.add_signal(signal);
2598   builder_.add_exit_code(exit_code);
2599   builder_.add_stage(stage);
2600   return builder_.Finish();
2601 }
2602 
2603 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkErrorDirect(
2604     flatbuffers::FlatBufferBuilder &_fbb,
2605     tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2606     int32_t exit_code = 0,
2607     int32_t signal = 0,
2608     const std::vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code = nullptr,
2609     int32_t mini_benchmark_error_code = 0) {
2610   auto error_code__ = error_code ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>>(*error_code) : 0;
2611   return tflite::CreateBenchmarkError(
2612       _fbb,
2613       stage,
2614       exit_code,
2615       signal,
2616       error_code__,
2617       mini_benchmark_error_code);
2618 }
2619 
2620 flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2621 
2622 struct BenchmarkEventT : public flatbuffers::NativeTable {
2623   typedef BenchmarkEvent TableType;
2624   std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings{};
2625   tflite::BenchmarkEventType event_type = tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE;
2626   std::unique_ptr<tflite::BenchmarkResultT> result{};
2627   std::unique_ptr<tflite::BenchmarkErrorT> error{};
2628   int64_t boottime_us = 0;
2629   int64_t wallclock_us = 0;
2630   BenchmarkEventT() = default;
2631   BenchmarkEventT(const BenchmarkEventT &o);
2632   BenchmarkEventT(BenchmarkEventT&&) FLATBUFFERS_NOEXCEPT = default;
2633   BenchmarkEventT &operator=(BenchmarkEventT o) FLATBUFFERS_NOEXCEPT;
2634 };
2635 
2636 struct BenchmarkEvent FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2637   typedef BenchmarkEventT NativeTableType;
2638   typedef BenchmarkEventBuilder Builder;
2639   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2640     VT_TFLITE_SETTINGS = 4,
2641     VT_EVENT_TYPE = 6,
2642     VT_RESULT = 8,
2643     VT_ERROR = 10,
2644     VT_BOOTTIME_US = 12,
2645     VT_WALLCLOCK_US = 14
2646   };
tflite_settingsFLATBUFFERS_FINAL_CLASS2647   const tflite::TFLiteSettings *tflite_settings() const {
2648     return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
2649   }
event_typeFLATBUFFERS_FINAL_CLASS2650   tflite::BenchmarkEventType event_type() const {
2651     return static_cast<tflite::BenchmarkEventType>(GetField<int32_t>(VT_EVENT_TYPE, 0));
2652   }
resultFLATBUFFERS_FINAL_CLASS2653   const tflite::BenchmarkResult *result() const {
2654     return GetPointer<const tflite::BenchmarkResult *>(VT_RESULT);
2655   }
errorFLATBUFFERS_FINAL_CLASS2656   const tflite::BenchmarkError *error() const {
2657     return GetPointer<const tflite::BenchmarkError *>(VT_ERROR);
2658   }
boottime_usFLATBUFFERS_FINAL_CLASS2659   int64_t boottime_us() const {
2660     return GetField<int64_t>(VT_BOOTTIME_US, 0);
2661   }
wallclock_usFLATBUFFERS_FINAL_CLASS2662   int64_t wallclock_us() const {
2663     return GetField<int64_t>(VT_WALLCLOCK_US, 0);
2664   }
VerifyFLATBUFFERS_FINAL_CLASS2665   bool Verify(flatbuffers::Verifier &verifier) const {
2666     return VerifyTableStart(verifier) &&
2667            VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
2668            verifier.VerifyTable(tflite_settings()) &&
2669            VerifyField<int32_t>(verifier, VT_EVENT_TYPE, 4) &&
2670            VerifyOffset(verifier, VT_RESULT) &&
2671            verifier.VerifyTable(result()) &&
2672            VerifyOffset(verifier, VT_ERROR) &&
2673            verifier.VerifyTable(error()) &&
2674            VerifyField<int64_t>(verifier, VT_BOOTTIME_US, 8) &&
2675            VerifyField<int64_t>(verifier, VT_WALLCLOCK_US, 8) &&
2676            verifier.EndTable();
2677   }
2678   BenchmarkEventT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2679   void UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2680   static flatbuffers::Offset<BenchmarkEvent> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2681 };
2682 
2683 struct BenchmarkEventBuilder {
2684   typedef BenchmarkEvent Table;
2685   flatbuffers::FlatBufferBuilder &fbb_;
2686   flatbuffers::uoffset_t start_;
add_tflite_settingsBenchmarkEventBuilder2687   void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
2688     fbb_.AddOffset(BenchmarkEvent::VT_TFLITE_SETTINGS, tflite_settings);
2689   }
add_event_typeBenchmarkEventBuilder2690   void add_event_type(tflite::BenchmarkEventType event_type) {
2691     fbb_.AddElement<int32_t>(BenchmarkEvent::VT_EVENT_TYPE, static_cast<int32_t>(event_type), 0);
2692   }
add_resultBenchmarkEventBuilder2693   void add_result(flatbuffers::Offset<tflite::BenchmarkResult> result) {
2694     fbb_.AddOffset(BenchmarkEvent::VT_RESULT, result);
2695   }
add_errorBenchmarkEventBuilder2696   void add_error(flatbuffers::Offset<tflite::BenchmarkError> error) {
2697     fbb_.AddOffset(BenchmarkEvent::VT_ERROR, error);
2698   }
add_boottime_usBenchmarkEventBuilder2699   void add_boottime_us(int64_t boottime_us) {
2700     fbb_.AddElement<int64_t>(BenchmarkEvent::VT_BOOTTIME_US, boottime_us, 0);
2701   }
add_wallclock_usBenchmarkEventBuilder2702   void add_wallclock_us(int64_t wallclock_us) {
2703     fbb_.AddElement<int64_t>(BenchmarkEvent::VT_WALLCLOCK_US, wallclock_us, 0);
2704   }
BenchmarkEventBuilderBenchmarkEventBuilder2705   explicit BenchmarkEventBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2706         : fbb_(_fbb) {
2707     start_ = fbb_.StartTable();
2708   }
FinishBenchmarkEventBuilder2709   flatbuffers::Offset<BenchmarkEvent> Finish() {
2710     const auto end = fbb_.EndTable(start_);
2711     auto o = flatbuffers::Offset<BenchmarkEvent>(end);
2712     return o;
2713   }
2714 };
2715 
2716 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(
2717     flatbuffers::FlatBufferBuilder &_fbb,
2718     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
2719     tflite::BenchmarkEventType event_type = tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
2720     flatbuffers::Offset<tflite::BenchmarkResult> result = 0,
2721     flatbuffers::Offset<tflite::BenchmarkError> error = 0,
2722     int64_t boottime_us = 0,
2723     int64_t wallclock_us = 0) {
2724   BenchmarkEventBuilder builder_(_fbb);
2725   builder_.add_wallclock_us(wallclock_us);
2726   builder_.add_boottime_us(boottime_us);
2727   builder_.add_error(error);
2728   builder_.add_result(result);
2729   builder_.add_event_type(event_type);
2730   builder_.add_tflite_settings(tflite_settings);
2731   return builder_.Finish();
2732 }
2733 
2734 flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2735 
2736 struct BestAccelerationDecisionT : public flatbuffers::NativeTable {
2737   typedef BestAccelerationDecision TableType;
2738   int32_t number_of_source_events = 0;
2739   std::unique_ptr<tflite::BenchmarkEventT> min_latency_event{};
2740   int64_t min_inference_time_us = 0;
2741   BestAccelerationDecisionT() = default;
2742   BestAccelerationDecisionT(const BestAccelerationDecisionT &o);
2743   BestAccelerationDecisionT(BestAccelerationDecisionT&&) FLATBUFFERS_NOEXCEPT = default;
2744   BestAccelerationDecisionT &operator=(BestAccelerationDecisionT o) FLATBUFFERS_NOEXCEPT;
2745 };
2746 
2747 struct BestAccelerationDecision FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2748   typedef BestAccelerationDecisionT NativeTableType;
2749   typedef BestAccelerationDecisionBuilder Builder;
2750   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2751     VT_NUMBER_OF_SOURCE_EVENTS = 4,
2752     VT_MIN_LATENCY_EVENT = 6,
2753     VT_MIN_INFERENCE_TIME_US = 8
2754   };
number_of_source_eventsFLATBUFFERS_FINAL_CLASS2755   int32_t number_of_source_events() const {
2756     return GetField<int32_t>(VT_NUMBER_OF_SOURCE_EVENTS, 0);
2757   }
min_latency_eventFLATBUFFERS_FINAL_CLASS2758   const tflite::BenchmarkEvent *min_latency_event() const {
2759     return GetPointer<const tflite::BenchmarkEvent *>(VT_MIN_LATENCY_EVENT);
2760   }
min_inference_time_usFLATBUFFERS_FINAL_CLASS2761   int64_t min_inference_time_us() const {
2762     return GetField<int64_t>(VT_MIN_INFERENCE_TIME_US, 0);
2763   }
VerifyFLATBUFFERS_FINAL_CLASS2764   bool Verify(flatbuffers::Verifier &verifier) const {
2765     return VerifyTableStart(verifier) &&
2766            VerifyField<int32_t>(verifier, VT_NUMBER_OF_SOURCE_EVENTS, 4) &&
2767            VerifyOffset(verifier, VT_MIN_LATENCY_EVENT) &&
2768            verifier.VerifyTable(min_latency_event()) &&
2769            VerifyField<int64_t>(verifier, VT_MIN_INFERENCE_TIME_US, 8) &&
2770            verifier.EndTable();
2771   }
2772   BestAccelerationDecisionT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2773   void UnPackTo(BestAccelerationDecisionT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2774   static flatbuffers::Offset<BestAccelerationDecision> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2775 };
2776 
2777 struct BestAccelerationDecisionBuilder {
2778   typedef BestAccelerationDecision Table;
2779   flatbuffers::FlatBufferBuilder &fbb_;
2780   flatbuffers::uoffset_t start_;
add_number_of_source_eventsBestAccelerationDecisionBuilder2781   void add_number_of_source_events(int32_t number_of_source_events) {
2782     fbb_.AddElement<int32_t>(BestAccelerationDecision::VT_NUMBER_OF_SOURCE_EVENTS, number_of_source_events, 0);
2783   }
add_min_latency_eventBestAccelerationDecisionBuilder2784   void add_min_latency_event(flatbuffers::Offset<tflite::BenchmarkEvent> min_latency_event) {
2785     fbb_.AddOffset(BestAccelerationDecision::VT_MIN_LATENCY_EVENT, min_latency_event);
2786   }
add_min_inference_time_usBestAccelerationDecisionBuilder2787   void add_min_inference_time_us(int64_t min_inference_time_us) {
2788     fbb_.AddElement<int64_t>(BestAccelerationDecision::VT_MIN_INFERENCE_TIME_US, min_inference_time_us, 0);
2789   }
BestAccelerationDecisionBuilderBestAccelerationDecisionBuilder2790   explicit BestAccelerationDecisionBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2791         : fbb_(_fbb) {
2792     start_ = fbb_.StartTable();
2793   }
FinishBestAccelerationDecisionBuilder2794   flatbuffers::Offset<BestAccelerationDecision> Finish() {
2795     const auto end = fbb_.EndTable(start_);
2796     auto o = flatbuffers::Offset<BestAccelerationDecision>(end);
2797     return o;
2798   }
2799 };
2800 
2801 inline flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(
2802     flatbuffers::FlatBufferBuilder &_fbb,
2803     int32_t number_of_source_events = 0,
2804     flatbuffers::Offset<tflite::BenchmarkEvent> min_latency_event = 0,
2805     int64_t min_inference_time_us = 0) {
2806   BestAccelerationDecisionBuilder builder_(_fbb);
2807   builder_.add_min_inference_time_us(min_inference_time_us);
2808   builder_.add_min_latency_event(min_latency_event);
2809   builder_.add_number_of_source_events(number_of_source_events);
2810   return builder_.Finish();
2811 }
2812 
2813 flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2814 
2815 struct BenchmarkInitializationFailureT : public flatbuffers::NativeTable {
2816   typedef BenchmarkInitializationFailure TableType;
2817   int32_t initialization_status = 0;
2818 };
2819 
2820 struct BenchmarkInitializationFailure FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2821   typedef BenchmarkInitializationFailureT NativeTableType;
2822   typedef BenchmarkInitializationFailureBuilder Builder;
2823   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2824     VT_INITIALIZATION_STATUS = 4
2825   };
initialization_statusFLATBUFFERS_FINAL_CLASS2826   int32_t initialization_status() const {
2827     return GetField<int32_t>(VT_INITIALIZATION_STATUS, 0);
2828   }
VerifyFLATBUFFERS_FINAL_CLASS2829   bool Verify(flatbuffers::Verifier &verifier) const {
2830     return VerifyTableStart(verifier) &&
2831            VerifyField<int32_t>(verifier, VT_INITIALIZATION_STATUS, 4) &&
2832            verifier.EndTable();
2833   }
2834   BenchmarkInitializationFailureT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2835   void UnPackTo(BenchmarkInitializationFailureT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2836   static flatbuffers::Offset<BenchmarkInitializationFailure> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2837 };
2838 
2839 struct BenchmarkInitializationFailureBuilder {
2840   typedef BenchmarkInitializationFailure Table;
2841   flatbuffers::FlatBufferBuilder &fbb_;
2842   flatbuffers::uoffset_t start_;
add_initialization_statusBenchmarkInitializationFailureBuilder2843   void add_initialization_status(int32_t initialization_status) {
2844     fbb_.AddElement<int32_t>(BenchmarkInitializationFailure::VT_INITIALIZATION_STATUS, initialization_status, 0);
2845   }
BenchmarkInitializationFailureBuilderBenchmarkInitializationFailureBuilder2846   explicit BenchmarkInitializationFailureBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2847         : fbb_(_fbb) {
2848     start_ = fbb_.StartTable();
2849   }
FinishBenchmarkInitializationFailureBuilder2850   flatbuffers::Offset<BenchmarkInitializationFailure> Finish() {
2851     const auto end = fbb_.EndTable(start_);
2852     auto o = flatbuffers::Offset<BenchmarkInitializationFailure>(end);
2853     return o;
2854   }
2855 };
2856 
2857 inline flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(
2858     flatbuffers::FlatBufferBuilder &_fbb,
2859     int32_t initialization_status = 0) {
2860   BenchmarkInitializationFailureBuilder builder_(_fbb);
2861   builder_.add_initialization_status(initialization_status);
2862   return builder_.Finish();
2863 }
2864 
2865 flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2866 
2867 struct MiniBenchmarkEventT : public flatbuffers::NativeTable {
2868   typedef MiniBenchmarkEvent TableType;
2869   bool is_log_flushing_event = false;
2870   std::unique_ptr<tflite::BestAccelerationDecisionT> best_acceleration_decision{};
2871   std::unique_ptr<tflite::BenchmarkInitializationFailureT> initialization_failure{};
2872   std::unique_ptr<tflite::BenchmarkEventT> benchmark_event{};
2873   MiniBenchmarkEventT() = default;
2874   MiniBenchmarkEventT(const MiniBenchmarkEventT &o);
2875   MiniBenchmarkEventT(MiniBenchmarkEventT&&) FLATBUFFERS_NOEXCEPT = default;
2876   MiniBenchmarkEventT &operator=(MiniBenchmarkEventT o) FLATBUFFERS_NOEXCEPT;
2877 };
2878 
2879 struct MiniBenchmarkEvent FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2880   typedef MiniBenchmarkEventT NativeTableType;
2881   typedef MiniBenchmarkEventBuilder Builder;
2882   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2883     VT_IS_LOG_FLUSHING_EVENT = 4,
2884     VT_BEST_ACCELERATION_DECISION = 6,
2885     VT_INITIALIZATION_FAILURE = 8,
2886     VT_BENCHMARK_EVENT = 10
2887   };
is_log_flushing_eventFLATBUFFERS_FINAL_CLASS2888   bool is_log_flushing_event() const {
2889     return GetField<uint8_t>(VT_IS_LOG_FLUSHING_EVENT, 0) != 0;
2890   }
best_acceleration_decisionFLATBUFFERS_FINAL_CLASS2891   const tflite::BestAccelerationDecision *best_acceleration_decision() const {
2892     return GetPointer<const tflite::BestAccelerationDecision *>(VT_BEST_ACCELERATION_DECISION);
2893   }
initialization_failureFLATBUFFERS_FINAL_CLASS2894   const tflite::BenchmarkInitializationFailure *initialization_failure() const {
2895     return GetPointer<const tflite::BenchmarkInitializationFailure *>(VT_INITIALIZATION_FAILURE);
2896   }
benchmark_eventFLATBUFFERS_FINAL_CLASS2897   const tflite::BenchmarkEvent *benchmark_event() const {
2898     return GetPointer<const tflite::BenchmarkEvent *>(VT_BENCHMARK_EVENT);
2899   }
VerifyFLATBUFFERS_FINAL_CLASS2900   bool Verify(flatbuffers::Verifier &verifier) const {
2901     return VerifyTableStart(verifier) &&
2902            VerifyField<uint8_t>(verifier, VT_IS_LOG_FLUSHING_EVENT, 1) &&
2903            VerifyOffset(verifier, VT_BEST_ACCELERATION_DECISION) &&
2904            verifier.VerifyTable(best_acceleration_decision()) &&
2905            VerifyOffset(verifier, VT_INITIALIZATION_FAILURE) &&
2906            verifier.VerifyTable(initialization_failure()) &&
2907            VerifyOffset(verifier, VT_BENCHMARK_EVENT) &&
2908            verifier.VerifyTable(benchmark_event()) &&
2909            verifier.EndTable();
2910   }
2911   MiniBenchmarkEventT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2912   void UnPackTo(MiniBenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2913   static flatbuffers::Offset<MiniBenchmarkEvent> Pack(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2914 };
2915 
2916 struct MiniBenchmarkEventBuilder {
2917   typedef MiniBenchmarkEvent Table;
2918   flatbuffers::FlatBufferBuilder &fbb_;
2919   flatbuffers::uoffset_t start_;
add_is_log_flushing_eventMiniBenchmarkEventBuilder2920   void add_is_log_flushing_event(bool is_log_flushing_event) {
2921     fbb_.AddElement<uint8_t>(MiniBenchmarkEvent::VT_IS_LOG_FLUSHING_EVENT, static_cast<uint8_t>(is_log_flushing_event), 0);
2922   }
add_best_acceleration_decisionMiniBenchmarkEventBuilder2923   void add_best_acceleration_decision(flatbuffers::Offset<tflite::BestAccelerationDecision> best_acceleration_decision) {
2924     fbb_.AddOffset(MiniBenchmarkEvent::VT_BEST_ACCELERATION_DECISION, best_acceleration_decision);
2925   }
add_initialization_failureMiniBenchmarkEventBuilder2926   void add_initialization_failure(flatbuffers::Offset<tflite::BenchmarkInitializationFailure> initialization_failure) {
2927     fbb_.AddOffset(MiniBenchmarkEvent::VT_INITIALIZATION_FAILURE, initialization_failure);
2928   }
add_benchmark_eventMiniBenchmarkEventBuilder2929   void add_benchmark_event(flatbuffers::Offset<tflite::BenchmarkEvent> benchmark_event) {
2930     fbb_.AddOffset(MiniBenchmarkEvent::VT_BENCHMARK_EVENT, benchmark_event);
2931   }
MiniBenchmarkEventBuilderMiniBenchmarkEventBuilder2932   explicit MiniBenchmarkEventBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2933         : fbb_(_fbb) {
2934     start_ = fbb_.StartTable();
2935   }
FinishMiniBenchmarkEventBuilder2936   flatbuffers::Offset<MiniBenchmarkEvent> Finish() {
2937     const auto end = fbb_.EndTable(start_);
2938     auto o = flatbuffers::Offset<MiniBenchmarkEvent>(end);
2939     return o;
2940   }
2941 };
2942 
2943 inline flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(
2944     flatbuffers::FlatBufferBuilder &_fbb,
2945     bool is_log_flushing_event = false,
2946     flatbuffers::Offset<tflite::BestAccelerationDecision> best_acceleration_decision = 0,
2947     flatbuffers::Offset<tflite::BenchmarkInitializationFailure> initialization_failure = 0,
2948     flatbuffers::Offset<tflite::BenchmarkEvent> benchmark_event = 0) {
2949   MiniBenchmarkEventBuilder builder_(_fbb);
2950   builder_.add_benchmark_event(benchmark_event);
2951   builder_.add_initialization_failure(initialization_failure);
2952   builder_.add_best_acceleration_decision(best_acceleration_decision);
2953   builder_.add_is_log_flushing_event(is_log_flushing_event);
2954   return builder_.Finish();
2955 }
2956 
2957 flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2958 
2959 struct ModelFileT : public flatbuffers::NativeTable {
2960   typedef ModelFile TableType;
2961   std::string filename{};
2962   int64_t fd = 0;
2963   int64_t offset = 0;
2964   int64_t length = 0;
2965 };
2966 
2967 struct ModelFile FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2968   typedef ModelFileT NativeTableType;
2969   typedef ModelFileBuilder Builder;
2970   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2971     VT_FILENAME = 4,
2972     VT_FD = 6,
2973     VT_OFFSET = 8,
2974     VT_LENGTH = 10
2975   };
filenameFLATBUFFERS_FINAL_CLASS2976   const flatbuffers::String *filename() const {
2977     return GetPointer<const flatbuffers::String *>(VT_FILENAME);
2978   }
fdFLATBUFFERS_FINAL_CLASS2979   int64_t fd() const {
2980     return GetField<int64_t>(VT_FD, 0);
2981   }
offsetFLATBUFFERS_FINAL_CLASS2982   int64_t offset() const {
2983     return GetField<int64_t>(VT_OFFSET, 0);
2984   }
lengthFLATBUFFERS_FINAL_CLASS2985   int64_t length() const {
2986     return GetField<int64_t>(VT_LENGTH, 0);
2987   }
VerifyFLATBUFFERS_FINAL_CLASS2988   bool Verify(flatbuffers::Verifier &verifier) const {
2989     return VerifyTableStart(verifier) &&
2990            VerifyOffset(verifier, VT_FILENAME) &&
2991            verifier.VerifyString(filename()) &&
2992            VerifyField<int64_t>(verifier, VT_FD, 8) &&
2993            VerifyField<int64_t>(verifier, VT_OFFSET, 8) &&
2994            VerifyField<int64_t>(verifier, VT_LENGTH, 8) &&
2995            verifier.EndTable();
2996   }
2997   ModelFileT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2998   void UnPackTo(ModelFileT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2999   static flatbuffers::Offset<ModelFile> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3000 };
3001 
3002 struct ModelFileBuilder {
3003   typedef ModelFile Table;
3004   flatbuffers::FlatBufferBuilder &fbb_;
3005   flatbuffers::uoffset_t start_;
add_filenameModelFileBuilder3006   void add_filename(flatbuffers::Offset<flatbuffers::String> filename) {
3007     fbb_.AddOffset(ModelFile::VT_FILENAME, filename);
3008   }
add_fdModelFileBuilder3009   void add_fd(int64_t fd) {
3010     fbb_.AddElement<int64_t>(ModelFile::VT_FD, fd, 0);
3011   }
add_offsetModelFileBuilder3012   void add_offset(int64_t offset) {
3013     fbb_.AddElement<int64_t>(ModelFile::VT_OFFSET, offset, 0);
3014   }
add_lengthModelFileBuilder3015   void add_length(int64_t length) {
3016     fbb_.AddElement<int64_t>(ModelFile::VT_LENGTH, length, 0);
3017   }
ModelFileBuilderModelFileBuilder3018   explicit ModelFileBuilder(flatbuffers::FlatBufferBuilder &_fbb)
3019         : fbb_(_fbb) {
3020     start_ = fbb_.StartTable();
3021   }
FinishModelFileBuilder3022   flatbuffers::Offset<ModelFile> Finish() {
3023     const auto end = fbb_.EndTable(start_);
3024     auto o = flatbuffers::Offset<ModelFile>(end);
3025     return o;
3026   }
3027 };
3028 
3029 inline flatbuffers::Offset<ModelFile> CreateModelFile(
3030     flatbuffers::FlatBufferBuilder &_fbb,
3031     flatbuffers::Offset<flatbuffers::String> filename = 0,
3032     int64_t fd = 0,
3033     int64_t offset = 0,
3034     int64_t length = 0) {
3035   ModelFileBuilder builder_(_fbb);
3036   builder_.add_length(length);
3037   builder_.add_offset(offset);
3038   builder_.add_fd(fd);
3039   builder_.add_filename(filename);
3040   return builder_.Finish();
3041 }
3042 
3043 inline flatbuffers::Offset<ModelFile> CreateModelFileDirect(
3044     flatbuffers::FlatBufferBuilder &_fbb,
3045     const char *filename = nullptr,
3046     int64_t fd = 0,
3047     int64_t offset = 0,
3048     int64_t length = 0) {
3049   auto filename__ = filename ? _fbb.CreateString(filename) : 0;
3050   return tflite::CreateModelFile(
3051       _fbb,
3052       filename__,
3053       fd,
3054       offset,
3055       length);
3056 }
3057 
3058 flatbuffers::Offset<ModelFile> CreateModelFile(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3059 
3060 struct BenchmarkStoragePathsT : public flatbuffers::NativeTable {
3061   typedef BenchmarkStoragePaths TableType;
3062   std::string storage_file_path{};
3063   std::string data_directory_path{};
3064 };
3065 
3066 struct BenchmarkStoragePaths FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
3067   typedef BenchmarkStoragePathsT NativeTableType;
3068   typedef BenchmarkStoragePathsBuilder Builder;
3069   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
3070     VT_STORAGE_FILE_PATH = 4,
3071     VT_DATA_DIRECTORY_PATH = 6
3072   };
storage_file_pathFLATBUFFERS_FINAL_CLASS3073   const flatbuffers::String *storage_file_path() const {
3074     return GetPointer<const flatbuffers::String *>(VT_STORAGE_FILE_PATH);
3075   }
data_directory_pathFLATBUFFERS_FINAL_CLASS3076   const flatbuffers::String *data_directory_path() const {
3077     return GetPointer<const flatbuffers::String *>(VT_DATA_DIRECTORY_PATH);
3078   }
VerifyFLATBUFFERS_FINAL_CLASS3079   bool Verify(flatbuffers::Verifier &verifier) const {
3080     return VerifyTableStart(verifier) &&
3081            VerifyOffset(verifier, VT_STORAGE_FILE_PATH) &&
3082            verifier.VerifyString(storage_file_path()) &&
3083            VerifyOffset(verifier, VT_DATA_DIRECTORY_PATH) &&
3084            verifier.VerifyString(data_directory_path()) &&
3085            verifier.EndTable();
3086   }
3087   BenchmarkStoragePathsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
3088   void UnPackTo(BenchmarkStoragePathsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
3089   static flatbuffers::Offset<BenchmarkStoragePaths> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3090 };
3091 
3092 struct BenchmarkStoragePathsBuilder {
3093   typedef BenchmarkStoragePaths Table;
3094   flatbuffers::FlatBufferBuilder &fbb_;
3095   flatbuffers::uoffset_t start_;
add_storage_file_pathBenchmarkStoragePathsBuilder3096   void add_storage_file_path(flatbuffers::Offset<flatbuffers::String> storage_file_path) {
3097     fbb_.AddOffset(BenchmarkStoragePaths::VT_STORAGE_FILE_PATH, storage_file_path);
3098   }
add_data_directory_pathBenchmarkStoragePathsBuilder3099   void add_data_directory_path(flatbuffers::Offset<flatbuffers::String> data_directory_path) {
3100     fbb_.AddOffset(BenchmarkStoragePaths::VT_DATA_DIRECTORY_PATH, data_directory_path);
3101   }
BenchmarkStoragePathsBuilderBenchmarkStoragePathsBuilder3102   explicit BenchmarkStoragePathsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
3103         : fbb_(_fbb) {
3104     start_ = fbb_.StartTable();
3105   }
FinishBenchmarkStoragePathsBuilder3106   flatbuffers::Offset<BenchmarkStoragePaths> Finish() {
3107     const auto end = fbb_.EndTable(start_);
3108     auto o = flatbuffers::Offset<BenchmarkStoragePaths>(end);
3109     return o;
3110   }
3111 };
3112 
3113 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(
3114     flatbuffers::FlatBufferBuilder &_fbb,
3115     flatbuffers::Offset<flatbuffers::String> storage_file_path = 0,
3116     flatbuffers::Offset<flatbuffers::String> data_directory_path = 0) {
3117   BenchmarkStoragePathsBuilder builder_(_fbb);
3118   builder_.add_data_directory_path(data_directory_path);
3119   builder_.add_storage_file_path(storage_file_path);
3120   return builder_.Finish();
3121 }
3122 
3123 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePathsDirect(
3124     flatbuffers::FlatBufferBuilder &_fbb,
3125     const char *storage_file_path = nullptr,
3126     const char *data_directory_path = nullptr) {
3127   auto storage_file_path__ = storage_file_path ? _fbb.CreateString(storage_file_path) : 0;
3128   auto data_directory_path__ = data_directory_path ? _fbb.CreateString(data_directory_path) : 0;
3129   return tflite::CreateBenchmarkStoragePaths(
3130       _fbb,
3131       storage_file_path__,
3132       data_directory_path__);
3133 }
3134 
3135 flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3136 
3137 struct MinibenchmarkSettingsT : public flatbuffers::NativeTable {
3138   typedef MinibenchmarkSettings TableType;
3139   std::vector<std::unique_ptr<tflite::TFLiteSettingsT>> settings_to_test{};
3140   std::unique_ptr<tflite::ModelFileT> model_file{};
3141   std::unique_ptr<tflite::BenchmarkStoragePathsT> storage_paths{};
3142   MinibenchmarkSettingsT() = default;
3143   MinibenchmarkSettingsT(const MinibenchmarkSettingsT &o);
3144   MinibenchmarkSettingsT(MinibenchmarkSettingsT&&) FLATBUFFERS_NOEXCEPT = default;
3145   MinibenchmarkSettingsT &operator=(MinibenchmarkSettingsT o) FLATBUFFERS_NOEXCEPT;
3146 };
3147 
3148 struct MinibenchmarkSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
3149   typedef MinibenchmarkSettingsT NativeTableType;
3150   typedef MinibenchmarkSettingsBuilder Builder;
3151   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
3152     VT_SETTINGS_TO_TEST = 4,
3153     VT_MODEL_FILE = 6,
3154     VT_STORAGE_PATHS = 8
3155   };
settings_to_testFLATBUFFERS_FINAL_CLASS3156   const flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>> *settings_to_test() const {
3157     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>> *>(VT_SETTINGS_TO_TEST);
3158   }
model_fileFLATBUFFERS_FINAL_CLASS3159   const tflite::ModelFile *model_file() const {
3160     return GetPointer<const tflite::ModelFile *>(VT_MODEL_FILE);
3161   }
storage_pathsFLATBUFFERS_FINAL_CLASS3162   const tflite::BenchmarkStoragePaths *storage_paths() const {
3163     return GetPointer<const tflite::BenchmarkStoragePaths *>(VT_STORAGE_PATHS);
3164   }
VerifyFLATBUFFERS_FINAL_CLASS3165   bool Verify(flatbuffers::Verifier &verifier) const {
3166     return VerifyTableStart(verifier) &&
3167            VerifyOffset(verifier, VT_SETTINGS_TO_TEST) &&
3168            verifier.VerifyVector(settings_to_test()) &&
3169            verifier.VerifyVectorOfTables(settings_to_test()) &&
3170            VerifyOffset(verifier, VT_MODEL_FILE) &&
3171            verifier.VerifyTable(model_file()) &&
3172            VerifyOffset(verifier, VT_STORAGE_PATHS) &&
3173            verifier.VerifyTable(storage_paths()) &&
3174            verifier.EndTable();
3175   }
3176   MinibenchmarkSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
3177   void UnPackTo(MinibenchmarkSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
3178   static flatbuffers::Offset<MinibenchmarkSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3179 };
3180 
3181 struct MinibenchmarkSettingsBuilder {
3182   typedef MinibenchmarkSettings Table;
3183   flatbuffers::FlatBufferBuilder &fbb_;
3184   flatbuffers::uoffset_t start_;
add_settings_to_testMinibenchmarkSettingsBuilder3185   void add_settings_to_test(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>>> settings_to_test) {
3186     fbb_.AddOffset(MinibenchmarkSettings::VT_SETTINGS_TO_TEST, settings_to_test);
3187   }
add_model_fileMinibenchmarkSettingsBuilder3188   void add_model_file(flatbuffers::Offset<tflite::ModelFile> model_file) {
3189     fbb_.AddOffset(MinibenchmarkSettings::VT_MODEL_FILE, model_file);
3190   }
add_storage_pathsMinibenchmarkSettingsBuilder3191   void add_storage_paths(flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths) {
3192     fbb_.AddOffset(MinibenchmarkSettings::VT_STORAGE_PATHS, storage_paths);
3193   }
MinibenchmarkSettingsBuilderMinibenchmarkSettingsBuilder3194   explicit MinibenchmarkSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
3195         : fbb_(_fbb) {
3196     start_ = fbb_.StartTable();
3197   }
FinishMinibenchmarkSettingsBuilder3198   flatbuffers::Offset<MinibenchmarkSettings> Finish() {
3199     const auto end = fbb_.EndTable(start_);
3200     auto o = flatbuffers::Offset<MinibenchmarkSettings>(end);
3201     return o;
3202   }
3203 };
3204 
3205 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(
3206     flatbuffers::FlatBufferBuilder &_fbb,
3207     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>>> settings_to_test = 0,
3208     flatbuffers::Offset<tflite::ModelFile> model_file = 0,
3209     flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths = 0) {
3210   MinibenchmarkSettingsBuilder builder_(_fbb);
3211   builder_.add_storage_paths(storage_paths);
3212   builder_.add_model_file(model_file);
3213   builder_.add_settings_to_test(settings_to_test);
3214   return builder_.Finish();
3215 }
3216 
3217 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettingsDirect(
3218     flatbuffers::FlatBufferBuilder &_fbb,
3219     const std::vector<flatbuffers::Offset<tflite::TFLiteSettings>> *settings_to_test = nullptr,
3220     flatbuffers::Offset<tflite::ModelFile> model_file = 0,
3221     flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths = 0) {
3222   auto settings_to_test__ = settings_to_test ? _fbb.CreateVector<flatbuffers::Offset<tflite::TFLiteSettings>>(*settings_to_test) : 0;
3223   return tflite::CreateMinibenchmarkSettings(
3224       _fbb,
3225       settings_to_test__,
3226       model_file,
3227       storage_paths);
3228 }
3229 
3230 flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
3231 
3232 
3233 inline bool operator==(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs) {
3234   return
3235       (lhs.preference == rhs.preference) &&
3236       ((lhs.tflite_settings == rhs.tflite_settings) || (lhs.tflite_settings && rhs.tflite_settings && *lhs.tflite_settings == *rhs.tflite_settings)) &&
3237       (lhs.model_namespace_for_statistics == rhs.model_namespace_for_statistics) &&
3238       (lhs.model_identifier_for_statistics == rhs.model_identifier_for_statistics) &&
3239       ((lhs.settings_to_test_locally == rhs.settings_to_test_locally) || (lhs.settings_to_test_locally && rhs.settings_to_test_locally && *lhs.settings_to_test_locally == *rhs.settings_to_test_locally));
3240 }
3241 
3242 inline bool operator!=(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs) {
3243     return !(lhs == rhs);
3244 }
3245 
3246 
ComputeSettingsT(const ComputeSettingsT & o)3247 inline ComputeSettingsT::ComputeSettingsT(const ComputeSettingsT &o)
3248       : preference(o.preference),
3249         tflite_settings((o.tflite_settings) ? new tflite::TFLiteSettingsT(*o.tflite_settings) : nullptr),
3250         model_namespace_for_statistics(o.model_namespace_for_statistics),
3251         model_identifier_for_statistics(o.model_identifier_for_statistics),
3252         settings_to_test_locally((o.settings_to_test_locally) ? new tflite::MinibenchmarkSettingsT(*o.settings_to_test_locally) : nullptr) {
3253 }
3254 
3255 inline ComputeSettingsT &ComputeSettingsT::operator=(ComputeSettingsT o) FLATBUFFERS_NOEXCEPT {
3256   std::swap(preference, o.preference);
3257   std::swap(tflite_settings, o.tflite_settings);
3258   std::swap(model_namespace_for_statistics, o.model_namespace_for_statistics);
3259   std::swap(model_identifier_for_statistics, o.model_identifier_for_statistics);
3260   std::swap(settings_to_test_locally, o.settings_to_test_locally);
3261   return *this;
3262 }
3263 
UnPack(const flatbuffers::resolver_function_t * _resolver)3264 inline ComputeSettingsT *ComputeSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3265   auto _o = std::unique_ptr<ComputeSettingsT>(new ComputeSettingsT());
3266   UnPackTo(_o.get(), _resolver);
3267   return _o.release();
3268 }
3269 
UnPackTo(ComputeSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3270 inline void ComputeSettings::UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3271   (void)_o;
3272   (void)_resolver;
3273   { auto _e = preference(); _o->preference = _e; }
3274   { auto _e = tflite_settings(); if (_e) { if(_o->tflite_settings) { _e->UnPackTo(_o->tflite_settings.get(), _resolver); } else { _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); } } }
3275   { auto _e = model_namespace_for_statistics(); if (_e) _o->model_namespace_for_statistics = _e->str(); }
3276   { auto _e = model_identifier_for_statistics(); if (_e) _o->model_identifier_for_statistics = _e->str(); }
3277   { auto _e = settings_to_test_locally(); if (_e) { if(_o->settings_to_test_locally) { _e->UnPackTo(_o->settings_to_test_locally.get(), _resolver); } else { _o->settings_to_test_locally = std::unique_ptr<tflite::MinibenchmarkSettingsT>(_e->UnPack(_resolver)); } } }
3278 }
3279 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3280 inline flatbuffers::Offset<ComputeSettings> ComputeSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3281   return CreateComputeSettings(_fbb, _o, _rehasher);
3282 }
3283 
CreateComputeSettings(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3284 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3285   (void)_rehasher;
3286   (void)_o;
3287   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ComputeSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3288   auto _preference = _o->preference;
3289   auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
3290   auto _model_namespace_for_statistics = _o->model_namespace_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_namespace_for_statistics);
3291   auto _model_identifier_for_statistics = _o->model_identifier_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_identifier_for_statistics);
3292   auto _settings_to_test_locally = _o->settings_to_test_locally ? CreateMinibenchmarkSettings(_fbb, _o->settings_to_test_locally.get(), _rehasher) : 0;
3293   return tflite::CreateComputeSettings(
3294       _fbb,
3295       _preference,
3296       _tflite_settings,
3297       _model_namespace_for_statistics,
3298       _model_identifier_for_statistics,
3299       _settings_to_test_locally);
3300 }
3301 
3302 
3303 inline bool operator==(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs) {
3304   return
3305       (lhs.accelerator_name == rhs.accelerator_name) &&
3306       (lhs.cache_directory == rhs.cache_directory) &&
3307       (lhs.model_token == rhs.model_token) &&
3308       (lhs.execution_preference == rhs.execution_preference) &&
3309       (lhs.no_of_nnapi_instances_to_cache == rhs.no_of_nnapi_instances_to_cache) &&
3310       ((lhs.fallback_settings == rhs.fallback_settings) || (lhs.fallback_settings && rhs.fallback_settings && *lhs.fallback_settings == *rhs.fallback_settings)) &&
3311       (lhs.allow_nnapi_cpu_on_android_10_plus == rhs.allow_nnapi_cpu_on_android_10_plus) &&
3312       (lhs.execution_priority == rhs.execution_priority) &&
3313       (lhs.allow_dynamic_dimensions == rhs.allow_dynamic_dimensions) &&
3314       (lhs.allow_fp16_precision_for_fp32 == rhs.allow_fp16_precision_for_fp32) &&
3315       (lhs.use_burst_computation == rhs.use_burst_computation) &&
3316       (lhs.support_library_handle == rhs.support_library_handle);
3317 }
3318 
3319 inline bool operator!=(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs) {
3320     return !(lhs == rhs);
3321 }
3322 
3323 
NNAPISettingsT(const NNAPISettingsT & o)3324 inline NNAPISettingsT::NNAPISettingsT(const NNAPISettingsT &o)
3325       : accelerator_name(o.accelerator_name),
3326         cache_directory(o.cache_directory),
3327         model_token(o.model_token),
3328         execution_preference(o.execution_preference),
3329         no_of_nnapi_instances_to_cache(o.no_of_nnapi_instances_to_cache),
3330         fallback_settings((o.fallback_settings) ? new tflite::FallbackSettingsT(*o.fallback_settings) : nullptr),
3331         allow_nnapi_cpu_on_android_10_plus(o.allow_nnapi_cpu_on_android_10_plus),
3332         execution_priority(o.execution_priority),
3333         allow_dynamic_dimensions(o.allow_dynamic_dimensions),
3334         allow_fp16_precision_for_fp32(o.allow_fp16_precision_for_fp32),
3335         use_burst_computation(o.use_burst_computation),
3336         support_library_handle(o.support_library_handle) {
3337 }
3338 
3339 inline NNAPISettingsT &NNAPISettingsT::operator=(NNAPISettingsT o) FLATBUFFERS_NOEXCEPT {
3340   std::swap(accelerator_name, o.accelerator_name);
3341   std::swap(cache_directory, o.cache_directory);
3342   std::swap(model_token, o.model_token);
3343   std::swap(execution_preference, o.execution_preference);
3344   std::swap(no_of_nnapi_instances_to_cache, o.no_of_nnapi_instances_to_cache);
3345   std::swap(fallback_settings, o.fallback_settings);
3346   std::swap(allow_nnapi_cpu_on_android_10_plus, o.allow_nnapi_cpu_on_android_10_plus);
3347   std::swap(execution_priority, o.execution_priority);
3348   std::swap(allow_dynamic_dimensions, o.allow_dynamic_dimensions);
3349   std::swap(allow_fp16_precision_for_fp32, o.allow_fp16_precision_for_fp32);
3350   std::swap(use_burst_computation, o.use_burst_computation);
3351   std::swap(support_library_handle, o.support_library_handle);
3352   return *this;
3353 }
3354 
UnPack(const flatbuffers::resolver_function_t * _resolver)3355 inline NNAPISettingsT *NNAPISettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3356   auto _o = std::unique_ptr<NNAPISettingsT>(new NNAPISettingsT());
3357   UnPackTo(_o.get(), _resolver);
3358   return _o.release();
3359 }
3360 
UnPackTo(NNAPISettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3361 inline void NNAPISettings::UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3362   (void)_o;
3363   (void)_resolver;
3364   { auto _e = accelerator_name(); if (_e) _o->accelerator_name = _e->str(); }
3365   { auto _e = cache_directory(); if (_e) _o->cache_directory = _e->str(); }
3366   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3367   { auto _e = execution_preference(); _o->execution_preference = _e; }
3368   { auto _e = no_of_nnapi_instances_to_cache(); _o->no_of_nnapi_instances_to_cache = _e; }
3369   { auto _e = fallback_settings(); if (_e) { if(_o->fallback_settings) { _e->UnPackTo(_o->fallback_settings.get(), _resolver); } else { _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); } } }
3370   { auto _e = allow_nnapi_cpu_on_android_10_plus(); _o->allow_nnapi_cpu_on_android_10_plus = _e; }
3371   { auto _e = execution_priority(); _o->execution_priority = _e; }
3372   { auto _e = allow_dynamic_dimensions(); _o->allow_dynamic_dimensions = _e; }
3373   { auto _e = allow_fp16_precision_for_fp32(); _o->allow_fp16_precision_for_fp32 = _e; }
3374   { auto _e = use_burst_computation(); _o->use_burst_computation = _e; }
3375   { auto _e = support_library_handle(); _o->support_library_handle = _e; }
3376 }
3377 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3378 inline flatbuffers::Offset<NNAPISettings> NNAPISettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3379   return CreateNNAPISettings(_fbb, _o, _rehasher);
3380 }
3381 
CreateNNAPISettings(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3382 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3383   (void)_rehasher;
3384   (void)_o;
3385   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NNAPISettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3386   auto _accelerator_name = _o->accelerator_name.empty() ? 0 : _fbb.CreateString(_o->accelerator_name);
3387   auto _cache_directory = _o->cache_directory.empty() ? 0 : _fbb.CreateString(_o->cache_directory);
3388   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3389   auto _execution_preference = _o->execution_preference;
3390   auto _no_of_nnapi_instances_to_cache = _o->no_of_nnapi_instances_to_cache;
3391   auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
3392   auto _allow_nnapi_cpu_on_android_10_plus = _o->allow_nnapi_cpu_on_android_10_plus;
3393   auto _execution_priority = _o->execution_priority;
3394   auto _allow_dynamic_dimensions = _o->allow_dynamic_dimensions;
3395   auto _allow_fp16_precision_for_fp32 = _o->allow_fp16_precision_for_fp32;
3396   auto _use_burst_computation = _o->use_burst_computation;
3397   auto _support_library_handle = _o->support_library_handle;
3398   return tflite::CreateNNAPISettings(
3399       _fbb,
3400       _accelerator_name,
3401       _cache_directory,
3402       _model_token,
3403       _execution_preference,
3404       _no_of_nnapi_instances_to_cache,
3405       _fallback_settings,
3406       _allow_nnapi_cpu_on_android_10_plus,
3407       _execution_priority,
3408       _allow_dynamic_dimensions,
3409       _allow_fp16_precision_for_fp32,
3410       _use_burst_computation,
3411       _support_library_handle);
3412 }
3413 
3414 
3415 inline bool operator==(const GPUSettingsT &lhs, const GPUSettingsT &rhs) {
3416   return
3417       (lhs.is_precision_loss_allowed == rhs.is_precision_loss_allowed) &&
3418       (lhs.enable_quantized_inference == rhs.enable_quantized_inference) &&
3419       (lhs.force_backend == rhs.force_backend) &&
3420       (lhs.inference_priority1 == rhs.inference_priority1) &&
3421       (lhs.inference_priority2 == rhs.inference_priority2) &&
3422       (lhs.inference_priority3 == rhs.inference_priority3) &&
3423       (lhs.inference_preference == rhs.inference_preference) &&
3424       (lhs.cache_directory == rhs.cache_directory) &&
3425       (lhs.model_token == rhs.model_token);
3426 }
3427 
3428 inline bool operator!=(const GPUSettingsT &lhs, const GPUSettingsT &rhs) {
3429     return !(lhs == rhs);
3430 }
3431 
3432 
UnPack(const flatbuffers::resolver_function_t * _resolver)3433 inline GPUSettingsT *GPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3434   auto _o = std::unique_ptr<GPUSettingsT>(new GPUSettingsT());
3435   UnPackTo(_o.get(), _resolver);
3436   return _o.release();
3437 }
3438 
UnPackTo(GPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3439 inline void GPUSettings::UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3440   (void)_o;
3441   (void)_resolver;
3442   { auto _e = is_precision_loss_allowed(); _o->is_precision_loss_allowed = _e; }
3443   { auto _e = enable_quantized_inference(); _o->enable_quantized_inference = _e; }
3444   { auto _e = force_backend(); _o->force_backend = _e; }
3445   { auto _e = inference_priority1(); _o->inference_priority1 = _e; }
3446   { auto _e = inference_priority2(); _o->inference_priority2 = _e; }
3447   { auto _e = inference_priority3(); _o->inference_priority3 = _e; }
3448   { auto _e = inference_preference(); _o->inference_preference = _e; }
3449   { auto _e = cache_directory(); if (_e) _o->cache_directory = _e->str(); }
3450   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3451 }
3452 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3453 inline flatbuffers::Offset<GPUSettings> GPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3454   return CreateGPUSettings(_fbb, _o, _rehasher);
3455 }
3456 
CreateGPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3457 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3458   (void)_rehasher;
3459   (void)_o;
3460   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3461   auto _is_precision_loss_allowed = _o->is_precision_loss_allowed;
3462   auto _enable_quantized_inference = _o->enable_quantized_inference;
3463   auto _force_backend = _o->force_backend;
3464   auto _inference_priority1 = _o->inference_priority1;
3465   auto _inference_priority2 = _o->inference_priority2;
3466   auto _inference_priority3 = _o->inference_priority3;
3467   auto _inference_preference = _o->inference_preference;
3468   auto _cache_directory = _o->cache_directory.empty() ? 0 : _fbb.CreateString(_o->cache_directory);
3469   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3470   return tflite::CreateGPUSettings(
3471       _fbb,
3472       _is_precision_loss_allowed,
3473       _enable_quantized_inference,
3474       _force_backend,
3475       _inference_priority1,
3476       _inference_priority2,
3477       _inference_priority3,
3478       _inference_preference,
3479       _cache_directory,
3480       _model_token);
3481 }
3482 
3483 
3484 inline bool operator==(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs) {
3485   return
3486       (lhs.debug_level == rhs.debug_level) &&
3487       (lhs.powersave_level == rhs.powersave_level) &&
3488       (lhs.print_graph_profile == rhs.print_graph_profile) &&
3489       (lhs.print_graph_debug == rhs.print_graph_debug);
3490 }
3491 
3492 inline bool operator!=(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs) {
3493     return !(lhs == rhs);
3494 }
3495 
3496 
UnPack(const flatbuffers::resolver_function_t * _resolver)3497 inline HexagonSettingsT *HexagonSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3498   auto _o = std::unique_ptr<HexagonSettingsT>(new HexagonSettingsT());
3499   UnPackTo(_o.get(), _resolver);
3500   return _o.release();
3501 }
3502 
UnPackTo(HexagonSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3503 inline void HexagonSettings::UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3504   (void)_o;
3505   (void)_resolver;
3506   { auto _e = debug_level(); _o->debug_level = _e; }
3507   { auto _e = powersave_level(); _o->powersave_level = _e; }
3508   { auto _e = print_graph_profile(); _o->print_graph_profile = _e; }
3509   { auto _e = print_graph_debug(); _o->print_graph_debug = _e; }
3510 }
3511 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3512 inline flatbuffers::Offset<HexagonSettings> HexagonSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3513   return CreateHexagonSettings(_fbb, _o, _rehasher);
3514 }
3515 
CreateHexagonSettings(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3516 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3517   (void)_rehasher;
3518   (void)_o;
3519   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HexagonSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3520   auto _debug_level = _o->debug_level;
3521   auto _powersave_level = _o->powersave_level;
3522   auto _print_graph_profile = _o->print_graph_profile;
3523   auto _print_graph_debug = _o->print_graph_debug;
3524   return tflite::CreateHexagonSettings(
3525       _fbb,
3526       _debug_level,
3527       _powersave_level,
3528       _print_graph_profile,
3529       _print_graph_debug);
3530 }
3531 
3532 
3533 inline bool operator==(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs) {
3534   return
3535       (lhs.num_threads == rhs.num_threads) &&
3536       (lhs.flags == rhs.flags);
3537 }
3538 
3539 inline bool operator!=(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs) {
3540     return !(lhs == rhs);
3541 }
3542 
3543 
UnPack(const flatbuffers::resolver_function_t * _resolver)3544 inline XNNPackSettingsT *XNNPackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3545   auto _o = std::unique_ptr<XNNPackSettingsT>(new XNNPackSettingsT());
3546   UnPackTo(_o.get(), _resolver);
3547   return _o.release();
3548 }
3549 
UnPackTo(XNNPackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3550 inline void XNNPackSettings::UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3551   (void)_o;
3552   (void)_resolver;
3553   { auto _e = num_threads(); _o->num_threads = _e; }
3554   { auto _e = flags(); _o->flags = _e; }
3555 }
3556 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3557 inline flatbuffers::Offset<XNNPackSettings> XNNPackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3558   return CreateXNNPackSettings(_fbb, _o, _rehasher);
3559 }
3560 
CreateXNNPackSettings(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3561 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3562   (void)_rehasher;
3563   (void)_o;
3564   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const XNNPackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3565   auto _num_threads = _o->num_threads;
3566   auto _flags = _o->flags;
3567   return tflite::CreateXNNPackSettings(
3568       _fbb,
3569       _num_threads,
3570       _flags);
3571 }
3572 
3573 
3574 inline bool operator==(const CoreMLSettingsT &lhs, const CoreMLSettingsT &rhs) {
3575   return
3576       (lhs.enabled_devices == rhs.enabled_devices) &&
3577       (lhs.coreml_version == rhs.coreml_version) &&
3578       (lhs.max_delegated_partitions == rhs.max_delegated_partitions) &&
3579       (lhs.min_nodes_per_partition == rhs.min_nodes_per_partition);
3580 }
3581 
3582 inline bool operator!=(const CoreMLSettingsT &lhs, const CoreMLSettingsT &rhs) {
3583     return !(lhs == rhs);
3584 }
3585 
3586 
UnPack(const flatbuffers::resolver_function_t * _resolver)3587 inline CoreMLSettingsT *CoreMLSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3588   auto _o = std::unique_ptr<CoreMLSettingsT>(new CoreMLSettingsT());
3589   UnPackTo(_o.get(), _resolver);
3590   return _o.release();
3591 }
3592 
UnPackTo(CoreMLSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3593 inline void CoreMLSettings::UnPackTo(CoreMLSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3594   (void)_o;
3595   (void)_resolver;
3596   { auto _e = enabled_devices(); _o->enabled_devices = _e; }
3597   { auto _e = coreml_version(); _o->coreml_version = _e; }
3598   { auto _e = max_delegated_partitions(); _o->max_delegated_partitions = _e; }
3599   { auto _e = min_nodes_per_partition(); _o->min_nodes_per_partition = _e; }
3600 }
3601 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CoreMLSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3602 inline flatbuffers::Offset<CoreMLSettings> CoreMLSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoreMLSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3603   return CreateCoreMLSettings(_fbb, _o, _rehasher);
3604 }
3605 
CreateCoreMLSettings(flatbuffers::FlatBufferBuilder & _fbb,const CoreMLSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3606 inline flatbuffers::Offset<CoreMLSettings> CreateCoreMLSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoreMLSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3607   (void)_rehasher;
3608   (void)_o;
3609   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CoreMLSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3610   auto _enabled_devices = _o->enabled_devices;
3611   auto _coreml_version = _o->coreml_version;
3612   auto _max_delegated_partitions = _o->max_delegated_partitions;
3613   auto _min_nodes_per_partition = _o->min_nodes_per_partition;
3614   return tflite::CreateCoreMLSettings(
3615       _fbb,
3616       _enabled_devices,
3617       _coreml_version,
3618       _max_delegated_partitions,
3619       _min_nodes_per_partition);
3620 }
3621 
3622 
3623 inline bool operator==(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs) {
3624   return
3625       (lhs.platform_type == rhs.platform_type) &&
3626       (lhs.num_chips == rhs.num_chips) &&
3627       (lhs.device_paths == rhs.device_paths) &&
3628       (lhs.chip_family == rhs.chip_family);
3629 }
3630 
3631 inline bool operator!=(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs) {
3632     return !(lhs == rhs);
3633 }
3634 
3635 
UnPack(const flatbuffers::resolver_function_t * _resolver)3636 inline EdgeTpuDeviceSpecT *EdgeTpuDeviceSpec::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3637   auto _o = std::unique_ptr<EdgeTpuDeviceSpecT>(new EdgeTpuDeviceSpecT());
3638   UnPackTo(_o.get(), _resolver);
3639   return _o.release();
3640 }
3641 
UnPackTo(EdgeTpuDeviceSpecT * _o,const flatbuffers::resolver_function_t * _resolver)3642 inline void EdgeTpuDeviceSpec::UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3643   (void)_o;
3644   (void)_resolver;
3645   { auto _e = platform_type(); _o->platform_type = _e; }
3646   { auto _e = num_chips(); _o->num_chips = _e; }
3647   { auto _e = device_paths(); if (_e) { _o->device_paths.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->device_paths[_i] = _e->Get(_i)->str(); } } }
3648   { auto _e = chip_family(); _o->chip_family = _e; }
3649 }
3650 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)3651 inline flatbuffers::Offset<EdgeTpuDeviceSpec> EdgeTpuDeviceSpec::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3652   return CreateEdgeTpuDeviceSpec(_fbb, _o, _rehasher);
3653 }
3654 
CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)3655 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3656   (void)_rehasher;
3657   (void)_o;
3658   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuDeviceSpecT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3659   auto _platform_type = _o->platform_type;
3660   auto _num_chips = _o->num_chips;
3661   auto _device_paths = _o->device_paths.size() ? _fbb.CreateVectorOfStrings(_o->device_paths) : 0;
3662   auto _chip_family = _o->chip_family;
3663   return tflite::CreateEdgeTpuDeviceSpec(
3664       _fbb,
3665       _platform_type,
3666       _num_chips,
3667       _device_paths,
3668       _chip_family);
3669 }
3670 
3671 
3672 inline bool operator==(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs) {
3673   return
3674       (lhs.inactive_power_state == rhs.inactive_power_state) &&
3675       (lhs.inactive_timeout_us == rhs.inactive_timeout_us);
3676 }
3677 
3678 inline bool operator!=(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs) {
3679     return !(lhs == rhs);
3680 }
3681 
3682 
UnPack(const flatbuffers::resolver_function_t * _resolver)3683 inline EdgeTpuInactivePowerConfigT *EdgeTpuInactivePowerConfig::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3684   auto _o = std::unique_ptr<EdgeTpuInactivePowerConfigT>(new EdgeTpuInactivePowerConfigT());
3685   UnPackTo(_o.get(), _resolver);
3686   return _o.release();
3687 }
3688 
UnPackTo(EdgeTpuInactivePowerConfigT * _o,const flatbuffers::resolver_function_t * _resolver)3689 inline void EdgeTpuInactivePowerConfig::UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3690   (void)_o;
3691   (void)_resolver;
3692   { auto _e = inactive_power_state(); _o->inactive_power_state = _e; }
3693   { auto _e = inactive_timeout_us(); _o->inactive_timeout_us = _e; }
3694 }
3695 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)3696 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> EdgeTpuInactivePowerConfig::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3697   return CreateEdgeTpuInactivePowerConfig(_fbb, _o, _rehasher);
3698 }
3699 
CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)3700 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3701   (void)_rehasher;
3702   (void)_o;
3703   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuInactivePowerConfigT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3704   auto _inactive_power_state = _o->inactive_power_state;
3705   auto _inactive_timeout_us = _o->inactive_timeout_us;
3706   return tflite::CreateEdgeTpuInactivePowerConfig(
3707       _fbb,
3708       _inactive_power_state,
3709       _inactive_timeout_us);
3710 }
3711 
3712 
3713 inline bool operator==(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs) {
3714   return
3715       (lhs.inference_power_state == rhs.inference_power_state) &&
3716       (lhs.inactive_power_configs == rhs.inactive_power_configs) &&
3717       (lhs.inference_priority == rhs.inference_priority) &&
3718       ((lhs.edgetpu_device_spec == rhs.edgetpu_device_spec) || (lhs.edgetpu_device_spec && rhs.edgetpu_device_spec && *lhs.edgetpu_device_spec == *rhs.edgetpu_device_spec)) &&
3719       (lhs.model_token == rhs.model_token) &&
3720       (lhs.float_truncation_type == rhs.float_truncation_type) &&
3721       (lhs.qos_class == rhs.qos_class);
3722 }
3723 
3724 inline bool operator!=(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs) {
3725     return !(lhs == rhs);
3726 }
3727 
3728 
EdgeTpuSettingsT(const EdgeTpuSettingsT & o)3729 inline EdgeTpuSettingsT::EdgeTpuSettingsT(const EdgeTpuSettingsT &o)
3730       : inference_power_state(o.inference_power_state),
3731         inference_priority(o.inference_priority),
3732         edgetpu_device_spec((o.edgetpu_device_spec) ? new tflite::EdgeTpuDeviceSpecT(*o.edgetpu_device_spec) : nullptr),
3733         model_token(o.model_token),
3734         float_truncation_type(o.float_truncation_type),
3735         qos_class(o.qos_class) {
3736   inactive_power_configs.reserve(o.inactive_power_configs.size());
3737   for (const auto &v : o.inactive_power_configs) { inactive_power_configs.emplace_back((v) ? new tflite::EdgeTpuInactivePowerConfigT(*v) : nullptr); }
3738 }
3739 
3740 inline EdgeTpuSettingsT &EdgeTpuSettingsT::operator=(EdgeTpuSettingsT o) FLATBUFFERS_NOEXCEPT {
3741   std::swap(inference_power_state, o.inference_power_state);
3742   std::swap(inactive_power_configs, o.inactive_power_configs);
3743   std::swap(inference_priority, o.inference_priority);
3744   std::swap(edgetpu_device_spec, o.edgetpu_device_spec);
3745   std::swap(model_token, o.model_token);
3746   std::swap(float_truncation_type, o.float_truncation_type);
3747   std::swap(qos_class, o.qos_class);
3748   return *this;
3749 }
3750 
UnPack(const flatbuffers::resolver_function_t * _resolver)3751 inline EdgeTpuSettingsT *EdgeTpuSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3752   auto _o = std::unique_ptr<EdgeTpuSettingsT>(new EdgeTpuSettingsT());
3753   UnPackTo(_o.get(), _resolver);
3754   return _o.release();
3755 }
3756 
UnPackTo(EdgeTpuSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3757 inline void EdgeTpuSettings::UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3758   (void)_o;
3759   (void)_resolver;
3760   { auto _e = inference_power_state(); _o->inference_power_state = _e; }
3761   { auto _e = inactive_power_configs(); if (_e) { _o->inactive_power_configs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->inactive_power_configs[_i]) { _e->Get(_i)->UnPackTo(_o->inactive_power_configs[_i].get(), _resolver); } else { _o->inactive_power_configs[_i] = std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>(_e->Get(_i)->UnPack(_resolver)); }; } } }
3762   { auto _e = inference_priority(); _o->inference_priority = _e; }
3763   { auto _e = edgetpu_device_spec(); if (_e) { if(_o->edgetpu_device_spec) { _e->UnPackTo(_o->edgetpu_device_spec.get(), _resolver); } else { _o->edgetpu_device_spec = std::unique_ptr<tflite::EdgeTpuDeviceSpecT>(_e->UnPack(_resolver)); } } }
3764   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3765   { auto _e = float_truncation_type(); _o->float_truncation_type = _e; }
3766   { auto _e = qos_class(); _o->qos_class = _e; }
3767 }
3768 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3769 inline flatbuffers::Offset<EdgeTpuSettings> EdgeTpuSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3770   return CreateEdgeTpuSettings(_fbb, _o, _rehasher);
3771 }
3772 
CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3773 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3774   (void)_rehasher;
3775   (void)_o;
3776   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3777   auto _inference_power_state = _o->inference_power_state;
3778   auto _inactive_power_configs = _o->inactive_power_configs.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> (_o->inactive_power_configs.size(), [](size_t i, _VectorArgs *__va) { return CreateEdgeTpuInactivePowerConfig(*__va->__fbb, __va->__o->inactive_power_configs[i].get(), __va->__rehasher); }, &_va ) : 0;
3779   auto _inference_priority = _o->inference_priority;
3780   auto _edgetpu_device_spec = _o->edgetpu_device_spec ? CreateEdgeTpuDeviceSpec(_fbb, _o->edgetpu_device_spec.get(), _rehasher) : 0;
3781   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3782   auto _float_truncation_type = _o->float_truncation_type;
3783   auto _qos_class = _o->qos_class;
3784   return tflite::CreateEdgeTpuSettings(
3785       _fbb,
3786       _inference_power_state,
3787       _inactive_power_configs,
3788       _inference_priority,
3789       _edgetpu_device_spec,
3790       _model_token,
3791       _float_truncation_type,
3792       _qos_class);
3793 }
3794 
3795 
3796 inline bool operator==(const CoralSettingsT &lhs, const CoralSettingsT &rhs) {
3797   return
3798       (lhs.device == rhs.device) &&
3799       (lhs.performance == rhs.performance) &&
3800       (lhs.usb_always_dfu == rhs.usb_always_dfu) &&
3801       (lhs.usb_max_bulk_in_queue_length == rhs.usb_max_bulk_in_queue_length);
3802 }
3803 
3804 inline bool operator!=(const CoralSettingsT &lhs, const CoralSettingsT &rhs) {
3805     return !(lhs == rhs);
3806 }
3807 
3808 
UnPack(const flatbuffers::resolver_function_t * _resolver)3809 inline CoralSettingsT *CoralSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3810   auto _o = std::unique_ptr<CoralSettingsT>(new CoralSettingsT());
3811   UnPackTo(_o.get(), _resolver);
3812   return _o.release();
3813 }
3814 
UnPackTo(CoralSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3815 inline void CoralSettings::UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3816   (void)_o;
3817   (void)_resolver;
3818   { auto _e = device(); if (_e) _o->device = _e->str(); }
3819   { auto _e = performance(); _o->performance = _e; }
3820   { auto _e = usb_always_dfu(); _o->usb_always_dfu = _e; }
3821   { auto _e = usb_max_bulk_in_queue_length(); _o->usb_max_bulk_in_queue_length = _e; }
3822 }
3823 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3824 inline flatbuffers::Offset<CoralSettings> CoralSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3825   return CreateCoralSettings(_fbb, _o, _rehasher);
3826 }
3827 
CreateCoralSettings(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3828 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3829   (void)_rehasher;
3830   (void)_o;
3831   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CoralSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3832   auto _device = _o->device.empty() ? 0 : _fbb.CreateString(_o->device);
3833   auto _performance = _o->performance;
3834   auto _usb_always_dfu = _o->usb_always_dfu;
3835   auto _usb_max_bulk_in_queue_length = _o->usb_max_bulk_in_queue_length;
3836   return tflite::CreateCoralSettings(
3837       _fbb,
3838       _device,
3839       _performance,
3840       _usb_always_dfu,
3841       _usb_max_bulk_in_queue_length);
3842 }
3843 
3844 
3845 inline bool operator==(const CPUSettingsT &lhs, const CPUSettingsT &rhs) {
3846   return
3847       (lhs.num_threads == rhs.num_threads);
3848 }
3849 
3850 inline bool operator!=(const CPUSettingsT &lhs, const CPUSettingsT &rhs) {
3851     return !(lhs == rhs);
3852 }
3853 
3854 
UnPack(const flatbuffers::resolver_function_t * _resolver)3855 inline CPUSettingsT *CPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3856   auto _o = std::unique_ptr<CPUSettingsT>(new CPUSettingsT());
3857   UnPackTo(_o.get(), _resolver);
3858   return _o.release();
3859 }
3860 
UnPackTo(CPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3861 inline void CPUSettings::UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3862   (void)_o;
3863   (void)_resolver;
3864   { auto _e = num_threads(); _o->num_threads = _e; }
3865 }
3866 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3867 inline flatbuffers::Offset<CPUSettings> CPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3868   return CreateCPUSettings(_fbb, _o, _rehasher);
3869 }
3870 
CreateCPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3871 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3872   (void)_rehasher;
3873   (void)_o;
3874   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3875   auto _num_threads = _o->num_threads;
3876   return tflite::CreateCPUSettings(
3877       _fbb,
3878       _num_threads);
3879 }
3880 
3881 
3882 inline bool operator==(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs) {
3883   return
3884       (lhs.delegate == rhs.delegate) &&
3885       ((lhs.nnapi_settings == rhs.nnapi_settings) || (lhs.nnapi_settings && rhs.nnapi_settings && *lhs.nnapi_settings == *rhs.nnapi_settings)) &&
3886       ((lhs.gpu_settings == rhs.gpu_settings) || (lhs.gpu_settings && rhs.gpu_settings && *lhs.gpu_settings == *rhs.gpu_settings)) &&
3887       ((lhs.hexagon_settings == rhs.hexagon_settings) || (lhs.hexagon_settings && rhs.hexagon_settings && *lhs.hexagon_settings == *rhs.hexagon_settings)) &&
3888       ((lhs.xnnpack_settings == rhs.xnnpack_settings) || (lhs.xnnpack_settings && rhs.xnnpack_settings && *lhs.xnnpack_settings == *rhs.xnnpack_settings)) &&
3889       ((lhs.coreml_settings == rhs.coreml_settings) || (lhs.coreml_settings && rhs.coreml_settings && *lhs.coreml_settings == *rhs.coreml_settings)) &&
3890       ((lhs.cpu_settings == rhs.cpu_settings) || (lhs.cpu_settings && rhs.cpu_settings && *lhs.cpu_settings == *rhs.cpu_settings)) &&
3891       (lhs.max_delegated_partitions == rhs.max_delegated_partitions) &&
3892       ((lhs.edgetpu_settings == rhs.edgetpu_settings) || (lhs.edgetpu_settings && rhs.edgetpu_settings && *lhs.edgetpu_settings == *rhs.edgetpu_settings)) &&
3893       ((lhs.coral_settings == rhs.coral_settings) || (lhs.coral_settings && rhs.coral_settings && *lhs.coral_settings == *rhs.coral_settings)) &&
3894       ((lhs.fallback_settings == rhs.fallback_settings) || (lhs.fallback_settings && rhs.fallback_settings && *lhs.fallback_settings == *rhs.fallback_settings)) &&
3895       (lhs.disable_default_delegates == rhs.disable_default_delegates);
3896 }
3897 
3898 inline bool operator!=(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs) {
3899     return !(lhs == rhs);
3900 }
3901 
3902 
TFLiteSettingsT(const TFLiteSettingsT & o)3903 inline TFLiteSettingsT::TFLiteSettingsT(const TFLiteSettingsT &o)
3904       : delegate(o.delegate),
3905         nnapi_settings((o.nnapi_settings) ? new tflite::NNAPISettingsT(*o.nnapi_settings) : nullptr),
3906         gpu_settings((o.gpu_settings) ? new tflite::GPUSettingsT(*o.gpu_settings) : nullptr),
3907         hexagon_settings((o.hexagon_settings) ? new tflite::HexagonSettingsT(*o.hexagon_settings) : nullptr),
3908         xnnpack_settings((o.xnnpack_settings) ? new tflite::XNNPackSettingsT(*o.xnnpack_settings) : nullptr),
3909         coreml_settings((o.coreml_settings) ? new tflite::CoreMLSettingsT(*o.coreml_settings) : nullptr),
3910         cpu_settings((o.cpu_settings) ? new tflite::CPUSettingsT(*o.cpu_settings) : nullptr),
3911         max_delegated_partitions(o.max_delegated_partitions),
3912         edgetpu_settings((o.edgetpu_settings) ? new tflite::EdgeTpuSettingsT(*o.edgetpu_settings) : nullptr),
3913         coral_settings((o.coral_settings) ? new tflite::CoralSettingsT(*o.coral_settings) : nullptr),
3914         fallback_settings((o.fallback_settings) ? new tflite::FallbackSettingsT(*o.fallback_settings) : nullptr),
3915         disable_default_delegates(o.disable_default_delegates) {
3916 }
3917 
3918 inline TFLiteSettingsT &TFLiteSettingsT::operator=(TFLiteSettingsT o) FLATBUFFERS_NOEXCEPT {
3919   std::swap(delegate, o.delegate);
3920   std::swap(nnapi_settings, o.nnapi_settings);
3921   std::swap(gpu_settings, o.gpu_settings);
3922   std::swap(hexagon_settings, o.hexagon_settings);
3923   std::swap(xnnpack_settings, o.xnnpack_settings);
3924   std::swap(coreml_settings, o.coreml_settings);
3925   std::swap(cpu_settings, o.cpu_settings);
3926   std::swap(max_delegated_partitions, o.max_delegated_partitions);
3927   std::swap(edgetpu_settings, o.edgetpu_settings);
3928   std::swap(coral_settings, o.coral_settings);
3929   std::swap(fallback_settings, o.fallback_settings);
3930   std::swap(disable_default_delegates, o.disable_default_delegates);
3931   return *this;
3932 }
3933 
UnPack(const flatbuffers::resolver_function_t * _resolver)3934 inline TFLiteSettingsT *TFLiteSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3935   auto _o = std::unique_ptr<TFLiteSettingsT>(new TFLiteSettingsT());
3936   UnPackTo(_o.get(), _resolver);
3937   return _o.release();
3938 }
3939 
UnPackTo(TFLiteSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3940 inline void TFLiteSettings::UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3941   (void)_o;
3942   (void)_resolver;
3943   { auto _e = delegate(); _o->delegate = _e; }
3944   { auto _e = nnapi_settings(); if (_e) { if(_o->nnapi_settings) { _e->UnPackTo(_o->nnapi_settings.get(), _resolver); } else { _o->nnapi_settings = std::unique_ptr<tflite::NNAPISettingsT>(_e->UnPack(_resolver)); } } }
3945   { auto _e = gpu_settings(); if (_e) { if(_o->gpu_settings) { _e->UnPackTo(_o->gpu_settings.get(), _resolver); } else { _o->gpu_settings = std::unique_ptr<tflite::GPUSettingsT>(_e->UnPack(_resolver)); } } }
3946   { auto _e = hexagon_settings(); if (_e) { if(_o->hexagon_settings) { _e->UnPackTo(_o->hexagon_settings.get(), _resolver); } else { _o->hexagon_settings = std::unique_ptr<tflite::HexagonSettingsT>(_e->UnPack(_resolver)); } } }
3947   { auto _e = xnnpack_settings(); if (_e) { if(_o->xnnpack_settings) { _e->UnPackTo(_o->xnnpack_settings.get(), _resolver); } else { _o->xnnpack_settings = std::unique_ptr<tflite::XNNPackSettingsT>(_e->UnPack(_resolver)); } } }
3948   { auto _e = coreml_settings(); if (_e) { if(_o->coreml_settings) { _e->UnPackTo(_o->coreml_settings.get(), _resolver); } else { _o->coreml_settings = std::unique_ptr<tflite::CoreMLSettingsT>(_e->UnPack(_resolver)); } } }
3949   { auto _e = cpu_settings(); if (_e) { if(_o->cpu_settings) { _e->UnPackTo(_o->cpu_settings.get(), _resolver); } else { _o->cpu_settings = std::unique_ptr<tflite::CPUSettingsT>(_e->UnPack(_resolver)); } } }
3950   { auto _e = max_delegated_partitions(); _o->max_delegated_partitions = _e; }
3951   { auto _e = edgetpu_settings(); if (_e) { if(_o->edgetpu_settings) { _e->UnPackTo(_o->edgetpu_settings.get(), _resolver); } else { _o->edgetpu_settings = std::unique_ptr<tflite::EdgeTpuSettingsT>(_e->UnPack(_resolver)); } } }
3952   { auto _e = coral_settings(); if (_e) { if(_o->coral_settings) { _e->UnPackTo(_o->coral_settings.get(), _resolver); } else { _o->coral_settings = std::unique_ptr<tflite::CoralSettingsT>(_e->UnPack(_resolver)); } } }
3953   { auto _e = fallback_settings(); if (_e) { if(_o->fallback_settings) { _e->UnPackTo(_o->fallback_settings.get(), _resolver); } else { _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); } } }
3954   { auto _e = disable_default_delegates(); _o->disable_default_delegates = _e; }
3955 }
3956 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3957 inline flatbuffers::Offset<TFLiteSettings> TFLiteSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3958   return CreateTFLiteSettings(_fbb, _o, _rehasher);
3959 }
3960 
CreateTFLiteSettings(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3961 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3962   (void)_rehasher;
3963   (void)_o;
3964   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TFLiteSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3965   auto _delegate = _o->delegate;
3966   auto _nnapi_settings = _o->nnapi_settings ? CreateNNAPISettings(_fbb, _o->nnapi_settings.get(), _rehasher) : 0;
3967   auto _gpu_settings = _o->gpu_settings ? CreateGPUSettings(_fbb, _o->gpu_settings.get(), _rehasher) : 0;
3968   auto _hexagon_settings = _o->hexagon_settings ? CreateHexagonSettings(_fbb, _o->hexagon_settings.get(), _rehasher) : 0;
3969   auto _xnnpack_settings = _o->xnnpack_settings ? CreateXNNPackSettings(_fbb, _o->xnnpack_settings.get(), _rehasher) : 0;
3970   auto _coreml_settings = _o->coreml_settings ? CreateCoreMLSettings(_fbb, _o->coreml_settings.get(), _rehasher) : 0;
3971   auto _cpu_settings = _o->cpu_settings ? CreateCPUSettings(_fbb, _o->cpu_settings.get(), _rehasher) : 0;
3972   auto _max_delegated_partitions = _o->max_delegated_partitions;
3973   auto _edgetpu_settings = _o->edgetpu_settings ? CreateEdgeTpuSettings(_fbb, _o->edgetpu_settings.get(), _rehasher) : 0;
3974   auto _coral_settings = _o->coral_settings ? CreateCoralSettings(_fbb, _o->coral_settings.get(), _rehasher) : 0;
3975   auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
3976   auto _disable_default_delegates = _o->disable_default_delegates;
3977   return tflite::CreateTFLiteSettings(
3978       _fbb,
3979       _delegate,
3980       _nnapi_settings,
3981       _gpu_settings,
3982       _hexagon_settings,
3983       _xnnpack_settings,
3984       _coreml_settings,
3985       _cpu_settings,
3986       _max_delegated_partitions,
3987       _edgetpu_settings,
3988       _coral_settings,
3989       _fallback_settings,
3990       _disable_default_delegates);
3991 }
3992 
3993 
3994 inline bool operator==(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs) {
3995   return
3996       (lhs.allow_automatic_fallback_on_compilation_error == rhs.allow_automatic_fallback_on_compilation_error) &&
3997       (lhs.allow_automatic_fallback_on_execution_error == rhs.allow_automatic_fallback_on_execution_error);
3998 }
3999 
4000 inline bool operator!=(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs) {
4001     return !(lhs == rhs);
4002 }
4003 
4004 
UnPack(const flatbuffers::resolver_function_t * _resolver)4005 inline FallbackSettingsT *FallbackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4006   auto _o = std::unique_ptr<FallbackSettingsT>(new FallbackSettingsT());
4007   UnPackTo(_o.get(), _resolver);
4008   return _o.release();
4009 }
4010 
UnPackTo(FallbackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)4011 inline void FallbackSettings::UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4012   (void)_o;
4013   (void)_resolver;
4014   { auto _e = allow_automatic_fallback_on_compilation_error(); _o->allow_automatic_fallback_on_compilation_error = _e; }
4015   { auto _e = allow_automatic_fallback_on_execution_error(); _o->allow_automatic_fallback_on_execution_error = _e; }
4016 }
4017 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4018 inline flatbuffers::Offset<FallbackSettings> FallbackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4019   return CreateFallbackSettings(_fbb, _o, _rehasher);
4020 }
4021 
CreateFallbackSettings(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4022 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4023   (void)_rehasher;
4024   (void)_o;
4025   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FallbackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4026   auto _allow_automatic_fallback_on_compilation_error = _o->allow_automatic_fallback_on_compilation_error;
4027   auto _allow_automatic_fallback_on_execution_error = _o->allow_automatic_fallback_on_execution_error;
4028   return tflite::CreateFallbackSettings(
4029       _fbb,
4030       _allow_automatic_fallback_on_compilation_error,
4031       _allow_automatic_fallback_on_execution_error);
4032 }
4033 
4034 
4035 inline bool operator==(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs) {
4036   return
4037       (lhs.name == rhs.name) &&
4038       (lhs.values == rhs.values);
4039 }
4040 
4041 inline bool operator!=(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs) {
4042     return !(lhs == rhs);
4043 }
4044 
4045 
UnPack(const flatbuffers::resolver_function_t * _resolver)4046 inline BenchmarkMetricT *BenchmarkMetric::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4047   auto _o = std::unique_ptr<BenchmarkMetricT>(new BenchmarkMetricT());
4048   UnPackTo(_o.get(), _resolver);
4049   return _o.release();
4050 }
4051 
UnPackTo(BenchmarkMetricT * _o,const flatbuffers::resolver_function_t * _resolver)4052 inline void BenchmarkMetric::UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4053   (void)_o;
4054   (void)_resolver;
4055   { auto _e = name(); if (_e) _o->name = _e->str(); }
4056   { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } }
4057 }
4058 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)4059 inline flatbuffers::Offset<BenchmarkMetric> BenchmarkMetric::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4060   return CreateBenchmarkMetric(_fbb, _o, _rehasher);
4061 }
4062 
CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)4063 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4064   (void)_rehasher;
4065   (void)_o;
4066   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkMetricT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4067   auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name);
4068   auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0;
4069   return tflite::CreateBenchmarkMetric(
4070       _fbb,
4071       _name,
4072       _values);
4073 }
4074 
4075 
4076 inline bool operator==(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs) {
4077   return
4078       (lhs.initialization_time_us == rhs.initialization_time_us) &&
4079       (lhs.inference_time_us == rhs.inference_time_us) &&
4080       (lhs.max_memory_kb == rhs.max_memory_kb) &&
4081       (lhs.ok == rhs.ok) &&
4082       (lhs.metrics == rhs.metrics);
4083 }
4084 
4085 inline bool operator!=(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs) {
4086     return !(lhs == rhs);
4087 }
4088 
4089 
BenchmarkResultT(const BenchmarkResultT & o)4090 inline BenchmarkResultT::BenchmarkResultT(const BenchmarkResultT &o)
4091       : initialization_time_us(o.initialization_time_us),
4092         inference_time_us(o.inference_time_us),
4093         max_memory_kb(o.max_memory_kb),
4094         ok(o.ok) {
4095   metrics.reserve(o.metrics.size());
4096   for (const auto &v : o.metrics) { metrics.emplace_back((v) ? new tflite::BenchmarkMetricT(*v) : nullptr); }
4097 }
4098 
4099 inline BenchmarkResultT &BenchmarkResultT::operator=(BenchmarkResultT o) FLATBUFFERS_NOEXCEPT {
4100   std::swap(initialization_time_us, o.initialization_time_us);
4101   std::swap(inference_time_us, o.inference_time_us);
4102   std::swap(max_memory_kb, o.max_memory_kb);
4103   std::swap(ok, o.ok);
4104   std::swap(metrics, o.metrics);
4105   return *this;
4106 }
4107 
UnPack(const flatbuffers::resolver_function_t * _resolver)4108 inline BenchmarkResultT *BenchmarkResult::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4109   auto _o = std::unique_ptr<BenchmarkResultT>(new BenchmarkResultT());
4110   UnPackTo(_o.get(), _resolver);
4111   return _o.release();
4112 }
4113 
UnPackTo(BenchmarkResultT * _o,const flatbuffers::resolver_function_t * _resolver)4114 inline void BenchmarkResult::UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4115   (void)_o;
4116   (void)_resolver;
4117   { auto _e = initialization_time_us(); if (_e) { _o->initialization_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->initialization_time_us[_i] = _e->Get(_i); } } }
4118   { auto _e = inference_time_us(); if (_e) { _o->inference_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inference_time_us[_i] = _e->Get(_i); } } }
4119   { auto _e = max_memory_kb(); _o->max_memory_kb = _e; }
4120   { auto _e = ok(); _o->ok = _e; }
4121   { auto _e = metrics(); if (_e) { _o->metrics.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->metrics[_i]) { _e->Get(_i)->UnPackTo(_o->metrics[_i].get(), _resolver); } else { _o->metrics[_i] = std::unique_ptr<tflite::BenchmarkMetricT>(_e->Get(_i)->UnPack(_resolver)); }; } } }
4122 }
4123 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)4124 inline flatbuffers::Offset<BenchmarkResult> BenchmarkResult::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4125   return CreateBenchmarkResult(_fbb, _o, _rehasher);
4126 }
4127 
CreateBenchmarkResult(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)4128 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4129   (void)_rehasher;
4130   (void)_o;
4131   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkResultT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4132   auto _initialization_time_us = _o->initialization_time_us.size() ? _fbb.CreateVector(_o->initialization_time_us) : 0;
4133   auto _inference_time_us = _o->inference_time_us.size() ? _fbb.CreateVector(_o->inference_time_us) : 0;
4134   auto _max_memory_kb = _o->max_memory_kb;
4135   auto _ok = _o->ok;
4136   auto _metrics = _o->metrics.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>> (_o->metrics.size(), [](size_t i, _VectorArgs *__va) { return CreateBenchmarkMetric(*__va->__fbb, __va->__o->metrics[i].get(), __va->__rehasher); }, &_va ) : 0;
4137   return tflite::CreateBenchmarkResult(
4138       _fbb,
4139       _initialization_time_us,
4140       _inference_time_us,
4141       _max_memory_kb,
4142       _ok,
4143       _metrics);
4144 }
4145 
4146 
4147 inline bool operator==(const ErrorCodeT &lhs, const ErrorCodeT &rhs) {
4148   return
4149       (lhs.source == rhs.source) &&
4150       (lhs.tflite_error == rhs.tflite_error) &&
4151       (lhs.underlying_api_error == rhs.underlying_api_error);
4152 }
4153 
4154 inline bool operator!=(const ErrorCodeT &lhs, const ErrorCodeT &rhs) {
4155     return !(lhs == rhs);
4156 }
4157 
4158 
UnPack(const flatbuffers::resolver_function_t * _resolver)4159 inline ErrorCodeT *ErrorCode::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4160   auto _o = std::unique_ptr<ErrorCodeT>(new ErrorCodeT());
4161   UnPackTo(_o.get(), _resolver);
4162   return _o.release();
4163 }
4164 
UnPackTo(ErrorCodeT * _o,const flatbuffers::resolver_function_t * _resolver)4165 inline void ErrorCode::UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4166   (void)_o;
4167   (void)_resolver;
4168   { auto _e = source(); _o->source = _e; }
4169   { auto _e = tflite_error(); _o->tflite_error = _e; }
4170   { auto _e = underlying_api_error(); _o->underlying_api_error = _e; }
4171 }
4172 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)4173 inline flatbuffers::Offset<ErrorCode> ErrorCode::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4174   return CreateErrorCode(_fbb, _o, _rehasher);
4175 }
4176 
CreateErrorCode(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)4177 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4178   (void)_rehasher;
4179   (void)_o;
4180   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ErrorCodeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4181   auto _source = _o->source;
4182   auto _tflite_error = _o->tflite_error;
4183   auto _underlying_api_error = _o->underlying_api_error;
4184   return tflite::CreateErrorCode(
4185       _fbb,
4186       _source,
4187       _tflite_error,
4188       _underlying_api_error);
4189 }
4190 
4191 
4192 inline bool operator==(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs) {
4193   return
4194       (lhs.stage == rhs.stage) &&
4195       (lhs.exit_code == rhs.exit_code) &&
4196       (lhs.signal == rhs.signal) &&
4197       (lhs.error_code == rhs.error_code) &&
4198       (lhs.mini_benchmark_error_code == rhs.mini_benchmark_error_code);
4199 }
4200 
4201 inline bool operator!=(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs) {
4202     return !(lhs == rhs);
4203 }
4204 
4205 
BenchmarkErrorT(const BenchmarkErrorT & o)4206 inline BenchmarkErrorT::BenchmarkErrorT(const BenchmarkErrorT &o)
4207       : stage(o.stage),
4208         exit_code(o.exit_code),
4209         signal(o.signal),
4210         mini_benchmark_error_code(o.mini_benchmark_error_code) {
4211   error_code.reserve(o.error_code.size());
4212   for (const auto &v : o.error_code) { error_code.emplace_back((v) ? new tflite::ErrorCodeT(*v) : nullptr); }
4213 }
4214 
4215 inline BenchmarkErrorT &BenchmarkErrorT::operator=(BenchmarkErrorT o) FLATBUFFERS_NOEXCEPT {
4216   std::swap(stage, o.stage);
4217   std::swap(exit_code, o.exit_code);
4218   std::swap(signal, o.signal);
4219   std::swap(error_code, o.error_code);
4220   std::swap(mini_benchmark_error_code, o.mini_benchmark_error_code);
4221   return *this;
4222 }
4223 
UnPack(const flatbuffers::resolver_function_t * _resolver)4224 inline BenchmarkErrorT *BenchmarkError::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4225   auto _o = std::unique_ptr<BenchmarkErrorT>(new BenchmarkErrorT());
4226   UnPackTo(_o.get(), _resolver);
4227   return _o.release();
4228 }
4229 
UnPackTo(BenchmarkErrorT * _o,const flatbuffers::resolver_function_t * _resolver)4230 inline void BenchmarkError::UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4231   (void)_o;
4232   (void)_resolver;
4233   { auto _e = stage(); _o->stage = _e; }
4234   { auto _e = exit_code(); _o->exit_code = _e; }
4235   { auto _e = signal(); _o->signal = _e; }
4236   { auto _e = error_code(); if (_e) { _o->error_code.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->error_code[_i]) { _e->Get(_i)->UnPackTo(_o->error_code[_i].get(), _resolver); } else { _o->error_code[_i] = std::unique_ptr<tflite::ErrorCodeT>(_e->Get(_i)->UnPack(_resolver)); }; } } }
4237   { auto _e = mini_benchmark_error_code(); _o->mini_benchmark_error_code = _e; }
4238 }
4239 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)4240 inline flatbuffers::Offset<BenchmarkError> BenchmarkError::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4241   return CreateBenchmarkError(_fbb, _o, _rehasher);
4242 }
4243 
CreateBenchmarkError(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)4244 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4245   (void)_rehasher;
4246   (void)_o;
4247   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkErrorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4248   auto _stage = _o->stage;
4249   auto _exit_code = _o->exit_code;
4250   auto _signal = _o->signal;
4251   auto _error_code = _o->error_code.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>> (_o->error_code.size(), [](size_t i, _VectorArgs *__va) { return CreateErrorCode(*__va->__fbb, __va->__o->error_code[i].get(), __va->__rehasher); }, &_va ) : 0;
4252   auto _mini_benchmark_error_code = _o->mini_benchmark_error_code;
4253   return tflite::CreateBenchmarkError(
4254       _fbb,
4255       _stage,
4256       _exit_code,
4257       _signal,
4258       _error_code,
4259       _mini_benchmark_error_code);
4260 }
4261 
4262 
4263 inline bool operator==(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs) {
4264   return
4265       ((lhs.tflite_settings == rhs.tflite_settings) || (lhs.tflite_settings && rhs.tflite_settings && *lhs.tflite_settings == *rhs.tflite_settings)) &&
4266       (lhs.event_type == rhs.event_type) &&
4267       ((lhs.result == rhs.result) || (lhs.result && rhs.result && *lhs.result == *rhs.result)) &&
4268       ((lhs.error == rhs.error) || (lhs.error && rhs.error && *lhs.error == *rhs.error)) &&
4269       (lhs.boottime_us == rhs.boottime_us) &&
4270       (lhs.wallclock_us == rhs.wallclock_us);
4271 }
4272 
4273 inline bool operator!=(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs) {
4274     return !(lhs == rhs);
4275 }
4276 
4277 
BenchmarkEventT(const BenchmarkEventT & o)4278 inline BenchmarkEventT::BenchmarkEventT(const BenchmarkEventT &o)
4279       : tflite_settings((o.tflite_settings) ? new tflite::TFLiteSettingsT(*o.tflite_settings) : nullptr),
4280         event_type(o.event_type),
4281         result((o.result) ? new tflite::BenchmarkResultT(*o.result) : nullptr),
4282         error((o.error) ? new tflite::BenchmarkErrorT(*o.error) : nullptr),
4283         boottime_us(o.boottime_us),
4284         wallclock_us(o.wallclock_us) {
4285 }
4286 
4287 inline BenchmarkEventT &BenchmarkEventT::operator=(BenchmarkEventT o) FLATBUFFERS_NOEXCEPT {
4288   std::swap(tflite_settings, o.tflite_settings);
4289   std::swap(event_type, o.event_type);
4290   std::swap(result, o.result);
4291   std::swap(error, o.error);
4292   std::swap(boottime_us, o.boottime_us);
4293   std::swap(wallclock_us, o.wallclock_us);
4294   return *this;
4295 }
4296 
UnPack(const flatbuffers::resolver_function_t * _resolver)4297 inline BenchmarkEventT *BenchmarkEvent::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4298   auto _o = std::unique_ptr<BenchmarkEventT>(new BenchmarkEventT());
4299   UnPackTo(_o.get(), _resolver);
4300   return _o.release();
4301 }
4302 
UnPackTo(BenchmarkEventT * _o,const flatbuffers::resolver_function_t * _resolver)4303 inline void BenchmarkEvent::UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4304   (void)_o;
4305   (void)_resolver;
4306   { auto _e = tflite_settings(); if (_e) { if(_o->tflite_settings) { _e->UnPackTo(_o->tflite_settings.get(), _resolver); } else { _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); } } }
4307   { auto _e = event_type(); _o->event_type = _e; }
4308   { auto _e = result(); if (_e) { if(_o->result) { _e->UnPackTo(_o->result.get(), _resolver); } else { _o->result = std::unique_ptr<tflite::BenchmarkResultT>(_e->UnPack(_resolver)); } } }
4309   { auto _e = error(); if (_e) { if(_o->error) { _e->UnPackTo(_o->error.get(), _resolver); } else { _o->error = std::unique_ptr<tflite::BenchmarkErrorT>(_e->UnPack(_resolver)); } } }
4310   { auto _e = boottime_us(); _o->boottime_us = _e; }
4311   { auto _e = wallclock_us(); _o->wallclock_us = _e; }
4312 }
4313 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)4314 inline flatbuffers::Offset<BenchmarkEvent> BenchmarkEvent::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4315   return CreateBenchmarkEvent(_fbb, _o, _rehasher);
4316 }
4317 
CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)4318 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4319   (void)_rehasher;
4320   (void)_o;
4321   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkEventT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4322   auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
4323   auto _event_type = _o->event_type;
4324   auto _result = _o->result ? CreateBenchmarkResult(_fbb, _o->result.get(), _rehasher) : 0;
4325   auto _error = _o->error ? CreateBenchmarkError(_fbb, _o->error.get(), _rehasher) : 0;
4326   auto _boottime_us = _o->boottime_us;
4327   auto _wallclock_us = _o->wallclock_us;
4328   return tflite::CreateBenchmarkEvent(
4329       _fbb,
4330       _tflite_settings,
4331       _event_type,
4332       _result,
4333       _error,
4334       _boottime_us,
4335       _wallclock_us);
4336 }
4337 
4338 
4339 inline bool operator==(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs) {
4340   return
4341       (lhs.number_of_source_events == rhs.number_of_source_events) &&
4342       ((lhs.min_latency_event == rhs.min_latency_event) || (lhs.min_latency_event && rhs.min_latency_event && *lhs.min_latency_event == *rhs.min_latency_event)) &&
4343       (lhs.min_inference_time_us == rhs.min_inference_time_us);
4344 }
4345 
4346 inline bool operator!=(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs) {
4347     return !(lhs == rhs);
4348 }
4349 
4350 
BestAccelerationDecisionT(const BestAccelerationDecisionT & o)4351 inline BestAccelerationDecisionT::BestAccelerationDecisionT(const BestAccelerationDecisionT &o)
4352       : number_of_source_events(o.number_of_source_events),
4353         min_latency_event((o.min_latency_event) ? new tflite::BenchmarkEventT(*o.min_latency_event) : nullptr),
4354         min_inference_time_us(o.min_inference_time_us) {
4355 }
4356 
4357 inline BestAccelerationDecisionT &BestAccelerationDecisionT::operator=(BestAccelerationDecisionT o) FLATBUFFERS_NOEXCEPT {
4358   std::swap(number_of_source_events, o.number_of_source_events);
4359   std::swap(min_latency_event, o.min_latency_event);
4360   std::swap(min_inference_time_us, o.min_inference_time_us);
4361   return *this;
4362 }
4363 
UnPack(const flatbuffers::resolver_function_t * _resolver)4364 inline BestAccelerationDecisionT *BestAccelerationDecision::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4365   auto _o = std::unique_ptr<BestAccelerationDecisionT>(new BestAccelerationDecisionT());
4366   UnPackTo(_o.get(), _resolver);
4367   return _o.release();
4368 }
4369 
UnPackTo(BestAccelerationDecisionT * _o,const flatbuffers::resolver_function_t * _resolver)4370 inline void BestAccelerationDecision::UnPackTo(BestAccelerationDecisionT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4371   (void)_o;
4372   (void)_resolver;
4373   { auto _e = number_of_source_events(); _o->number_of_source_events = _e; }
4374   { auto _e = min_latency_event(); if (_e) { if(_o->min_latency_event) { _e->UnPackTo(_o->min_latency_event.get(), _resolver); } else { _o->min_latency_event = std::unique_ptr<tflite::BenchmarkEventT>(_e->UnPack(_resolver)); } } }
4375   { auto _e = min_inference_time_us(); _o->min_inference_time_us = _e; }
4376 }
4377 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BestAccelerationDecisionT * _o,const flatbuffers::rehasher_function_t * _rehasher)4378 inline flatbuffers::Offset<BestAccelerationDecision> BestAccelerationDecision::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4379   return CreateBestAccelerationDecision(_fbb, _o, _rehasher);
4380 }
4381 
CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder & _fbb,const BestAccelerationDecisionT * _o,const flatbuffers::rehasher_function_t * _rehasher)4382 inline flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4383   (void)_rehasher;
4384   (void)_o;
4385   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BestAccelerationDecisionT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4386   auto _number_of_source_events = _o->number_of_source_events;
4387   auto _min_latency_event = _o->min_latency_event ? CreateBenchmarkEvent(_fbb, _o->min_latency_event.get(), _rehasher) : 0;
4388   auto _min_inference_time_us = _o->min_inference_time_us;
4389   return tflite::CreateBestAccelerationDecision(
4390       _fbb,
4391       _number_of_source_events,
4392       _min_latency_event,
4393       _min_inference_time_us);
4394 }
4395 
4396 
4397 inline bool operator==(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs) {
4398   return
4399       (lhs.initialization_status == rhs.initialization_status);
4400 }
4401 
4402 inline bool operator!=(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs) {
4403     return !(lhs == rhs);
4404 }
4405 
4406 
UnPack(const flatbuffers::resolver_function_t * _resolver)4407 inline BenchmarkInitializationFailureT *BenchmarkInitializationFailure::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4408   auto _o = std::unique_ptr<BenchmarkInitializationFailureT>(new BenchmarkInitializationFailureT());
4409   UnPackTo(_o.get(), _resolver);
4410   return _o.release();
4411 }
4412 
UnPackTo(BenchmarkInitializationFailureT * _o,const flatbuffers::resolver_function_t * _resolver)4413 inline void BenchmarkInitializationFailure::UnPackTo(BenchmarkInitializationFailureT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4414   (void)_o;
4415   (void)_resolver;
4416   { auto _e = initialization_status(); _o->initialization_status = _e; }
4417 }
4418 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkInitializationFailureT * _o,const flatbuffers::rehasher_function_t * _rehasher)4419 inline flatbuffers::Offset<BenchmarkInitializationFailure> BenchmarkInitializationFailure::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4420   return CreateBenchmarkInitializationFailure(_fbb, _o, _rehasher);
4421 }
4422 
CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkInitializationFailureT * _o,const flatbuffers::rehasher_function_t * _rehasher)4423 inline flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4424   (void)_rehasher;
4425   (void)_o;
4426   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkInitializationFailureT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4427   auto _initialization_status = _o->initialization_status;
4428   return tflite::CreateBenchmarkInitializationFailure(
4429       _fbb,
4430       _initialization_status);
4431 }
4432 
4433 
4434 inline bool operator==(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs) {
4435   return
4436       (lhs.is_log_flushing_event == rhs.is_log_flushing_event) &&
4437       ((lhs.best_acceleration_decision == rhs.best_acceleration_decision) || (lhs.best_acceleration_decision && rhs.best_acceleration_decision && *lhs.best_acceleration_decision == *rhs.best_acceleration_decision)) &&
4438       ((lhs.initialization_failure == rhs.initialization_failure) || (lhs.initialization_failure && rhs.initialization_failure && *lhs.initialization_failure == *rhs.initialization_failure)) &&
4439       ((lhs.benchmark_event == rhs.benchmark_event) || (lhs.benchmark_event && rhs.benchmark_event && *lhs.benchmark_event == *rhs.benchmark_event));
4440 }
4441 
4442 inline bool operator!=(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs) {
4443     return !(lhs == rhs);
4444 }
4445 
4446 
MiniBenchmarkEventT(const MiniBenchmarkEventT & o)4447 inline MiniBenchmarkEventT::MiniBenchmarkEventT(const MiniBenchmarkEventT &o)
4448       : is_log_flushing_event(o.is_log_flushing_event),
4449         best_acceleration_decision((o.best_acceleration_decision) ? new tflite::BestAccelerationDecisionT(*o.best_acceleration_decision) : nullptr),
4450         initialization_failure((o.initialization_failure) ? new tflite::BenchmarkInitializationFailureT(*o.initialization_failure) : nullptr),
4451         benchmark_event((o.benchmark_event) ? new tflite::BenchmarkEventT(*o.benchmark_event) : nullptr) {
4452 }
4453 
4454 inline MiniBenchmarkEventT &MiniBenchmarkEventT::operator=(MiniBenchmarkEventT o) FLATBUFFERS_NOEXCEPT {
4455   std::swap(is_log_flushing_event, o.is_log_flushing_event);
4456   std::swap(best_acceleration_decision, o.best_acceleration_decision);
4457   std::swap(initialization_failure, o.initialization_failure);
4458   std::swap(benchmark_event, o.benchmark_event);
4459   return *this;
4460 }
4461 
UnPack(const flatbuffers::resolver_function_t * _resolver)4462 inline MiniBenchmarkEventT *MiniBenchmarkEvent::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4463   auto _o = std::unique_ptr<MiniBenchmarkEventT>(new MiniBenchmarkEventT());
4464   UnPackTo(_o.get(), _resolver);
4465   return _o.release();
4466 }
4467 
UnPackTo(MiniBenchmarkEventT * _o,const flatbuffers::resolver_function_t * _resolver)4468 inline void MiniBenchmarkEvent::UnPackTo(MiniBenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4469   (void)_o;
4470   (void)_resolver;
4471   { auto _e = is_log_flushing_event(); _o->is_log_flushing_event = _e; }
4472   { auto _e = best_acceleration_decision(); if (_e) { if(_o->best_acceleration_decision) { _e->UnPackTo(_o->best_acceleration_decision.get(), _resolver); } else { _o->best_acceleration_decision = std::unique_ptr<tflite::BestAccelerationDecisionT>(_e->UnPack(_resolver)); } } }
4473   { auto _e = initialization_failure(); if (_e) { if(_o->initialization_failure) { _e->UnPackTo(_o->initialization_failure.get(), _resolver); } else { _o->initialization_failure = std::unique_ptr<tflite::BenchmarkInitializationFailureT>(_e->UnPack(_resolver)); } } }
4474   { auto _e = benchmark_event(); if (_e) { if(_o->benchmark_event) { _e->UnPackTo(_o->benchmark_event.get(), _resolver); } else { _o->benchmark_event = std::unique_ptr<tflite::BenchmarkEventT>(_e->UnPack(_resolver)); } } }
4475 }
4476 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const MiniBenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)4477 inline flatbuffers::Offset<MiniBenchmarkEvent> MiniBenchmarkEvent::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4478   return CreateMiniBenchmarkEvent(_fbb, _o, _rehasher);
4479 }
4480 
CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder & _fbb,const MiniBenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)4481 inline flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4482   (void)_rehasher;
4483   (void)_o;
4484   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MiniBenchmarkEventT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4485   auto _is_log_flushing_event = _o->is_log_flushing_event;
4486   auto _best_acceleration_decision = _o->best_acceleration_decision ? CreateBestAccelerationDecision(_fbb, _o->best_acceleration_decision.get(), _rehasher) : 0;
4487   auto _initialization_failure = _o->initialization_failure ? CreateBenchmarkInitializationFailure(_fbb, _o->initialization_failure.get(), _rehasher) : 0;
4488   auto _benchmark_event = _o->benchmark_event ? CreateBenchmarkEvent(_fbb, _o->benchmark_event.get(), _rehasher) : 0;
4489   return tflite::CreateMiniBenchmarkEvent(
4490       _fbb,
4491       _is_log_flushing_event,
4492       _best_acceleration_decision,
4493       _initialization_failure,
4494       _benchmark_event);
4495 }
4496 
4497 
4498 inline bool operator==(const ModelFileT &lhs, const ModelFileT &rhs) {
4499   return
4500       (lhs.filename == rhs.filename) &&
4501       (lhs.fd == rhs.fd) &&
4502       (lhs.offset == rhs.offset) &&
4503       (lhs.length == rhs.length);
4504 }
4505 
4506 inline bool operator!=(const ModelFileT &lhs, const ModelFileT &rhs) {
4507     return !(lhs == rhs);
4508 }
4509 
4510 
UnPack(const flatbuffers::resolver_function_t * _resolver)4511 inline ModelFileT *ModelFile::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4512   auto _o = std::unique_ptr<ModelFileT>(new ModelFileT());
4513   UnPackTo(_o.get(), _resolver);
4514   return _o.release();
4515 }
4516 
UnPackTo(ModelFileT * _o,const flatbuffers::resolver_function_t * _resolver)4517 inline void ModelFile::UnPackTo(ModelFileT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4518   (void)_o;
4519   (void)_resolver;
4520   { auto _e = filename(); if (_e) _o->filename = _e->str(); }
4521   { auto _e = fd(); _o->fd = _e; }
4522   { auto _e = offset(); _o->offset = _e; }
4523   { auto _e = length(); _o->length = _e; }
4524 }
4525 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ModelFileT * _o,const flatbuffers::rehasher_function_t * _rehasher)4526 inline flatbuffers::Offset<ModelFile> ModelFile::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4527   return CreateModelFile(_fbb, _o, _rehasher);
4528 }
4529 
CreateModelFile(flatbuffers::FlatBufferBuilder & _fbb,const ModelFileT * _o,const flatbuffers::rehasher_function_t * _rehasher)4530 inline flatbuffers::Offset<ModelFile> CreateModelFile(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4531   (void)_rehasher;
4532   (void)_o;
4533   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ModelFileT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4534   auto _filename = _o->filename.empty() ? 0 : _fbb.CreateString(_o->filename);
4535   auto _fd = _o->fd;
4536   auto _offset = _o->offset;
4537   auto _length = _o->length;
4538   return tflite::CreateModelFile(
4539       _fbb,
4540       _filename,
4541       _fd,
4542       _offset,
4543       _length);
4544 }
4545 
4546 
4547 inline bool operator==(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs) {
4548   return
4549       (lhs.storage_file_path == rhs.storage_file_path) &&
4550       (lhs.data_directory_path == rhs.data_directory_path);
4551 }
4552 
4553 inline bool operator!=(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs) {
4554     return !(lhs == rhs);
4555 }
4556 
4557 
UnPack(const flatbuffers::resolver_function_t * _resolver)4558 inline BenchmarkStoragePathsT *BenchmarkStoragePaths::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4559   auto _o = std::unique_ptr<BenchmarkStoragePathsT>(new BenchmarkStoragePathsT());
4560   UnPackTo(_o.get(), _resolver);
4561   return _o.release();
4562 }
4563 
UnPackTo(BenchmarkStoragePathsT * _o,const flatbuffers::resolver_function_t * _resolver)4564 inline void BenchmarkStoragePaths::UnPackTo(BenchmarkStoragePathsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4565   (void)_o;
4566   (void)_resolver;
4567   { auto _e = storage_file_path(); if (_e) _o->storage_file_path = _e->str(); }
4568   { auto _e = data_directory_path(); if (_e) _o->data_directory_path = _e->str(); }
4569 }
4570 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkStoragePathsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4571 inline flatbuffers::Offset<BenchmarkStoragePaths> BenchmarkStoragePaths::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4572   return CreateBenchmarkStoragePaths(_fbb, _o, _rehasher);
4573 }
4574 
CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkStoragePathsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4575 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4576   (void)_rehasher;
4577   (void)_o;
4578   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkStoragePathsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4579   auto _storage_file_path = _o->storage_file_path.empty() ? 0 : _fbb.CreateString(_o->storage_file_path);
4580   auto _data_directory_path = _o->data_directory_path.empty() ? 0 : _fbb.CreateString(_o->data_directory_path);
4581   return tflite::CreateBenchmarkStoragePaths(
4582       _fbb,
4583       _storage_file_path,
4584       _data_directory_path);
4585 }
4586 
4587 
4588 inline bool operator==(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs) {
4589   return
4590       (lhs.settings_to_test == rhs.settings_to_test) &&
4591       ((lhs.model_file == rhs.model_file) || (lhs.model_file && rhs.model_file && *lhs.model_file == *rhs.model_file)) &&
4592       ((lhs.storage_paths == rhs.storage_paths) || (lhs.storage_paths && rhs.storage_paths && *lhs.storage_paths == *rhs.storage_paths));
4593 }
4594 
4595 inline bool operator!=(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs) {
4596     return !(lhs == rhs);
4597 }
4598 
4599 
MinibenchmarkSettingsT(const MinibenchmarkSettingsT & o)4600 inline MinibenchmarkSettingsT::MinibenchmarkSettingsT(const MinibenchmarkSettingsT &o)
4601       : model_file((o.model_file) ? new tflite::ModelFileT(*o.model_file) : nullptr),
4602         storage_paths((o.storage_paths) ? new tflite::BenchmarkStoragePathsT(*o.storage_paths) : nullptr) {
4603   settings_to_test.reserve(o.settings_to_test.size());
4604   for (const auto &v : o.settings_to_test) { settings_to_test.emplace_back((v) ? new tflite::TFLiteSettingsT(*v) : nullptr); }
4605 }
4606 
4607 inline MinibenchmarkSettingsT &MinibenchmarkSettingsT::operator=(MinibenchmarkSettingsT o) FLATBUFFERS_NOEXCEPT {
4608   std::swap(settings_to_test, o.settings_to_test);
4609   std::swap(model_file, o.model_file);
4610   std::swap(storage_paths, o.storage_paths);
4611   return *this;
4612 }
4613 
UnPack(const flatbuffers::resolver_function_t * _resolver)4614 inline MinibenchmarkSettingsT *MinibenchmarkSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4615   auto _o = std::unique_ptr<MinibenchmarkSettingsT>(new MinibenchmarkSettingsT());
4616   UnPackTo(_o.get(), _resolver);
4617   return _o.release();
4618 }
4619 
UnPackTo(MinibenchmarkSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)4620 inline void MinibenchmarkSettings::UnPackTo(MinibenchmarkSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4621   (void)_o;
4622   (void)_resolver;
4623   { auto _e = settings_to_test(); if (_e) { _o->settings_to_test.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->settings_to_test[_i]) { _e->Get(_i)->UnPackTo(_o->settings_to_test[_i].get(), _resolver); } else { _o->settings_to_test[_i] = std::unique_ptr<tflite::TFLiteSettingsT>(_e->Get(_i)->UnPack(_resolver)); }; } } }
4624   { auto _e = model_file(); if (_e) { if(_o->model_file) { _e->UnPackTo(_o->model_file.get(), _resolver); } else { _o->model_file = std::unique_ptr<tflite::ModelFileT>(_e->UnPack(_resolver)); } } }
4625   { auto _e = storage_paths(); if (_e) { if(_o->storage_paths) { _e->UnPackTo(_o->storage_paths.get(), _resolver); } else { _o->storage_paths = std::unique_ptr<tflite::BenchmarkStoragePathsT>(_e->UnPack(_resolver)); } } }
4626 }
4627 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const MinibenchmarkSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4628 inline flatbuffers::Offset<MinibenchmarkSettings> MinibenchmarkSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4629   return CreateMinibenchmarkSettings(_fbb, _o, _rehasher);
4630 }
4631 
CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder & _fbb,const MinibenchmarkSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4632 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4633   (void)_rehasher;
4634   (void)_o;
4635   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MinibenchmarkSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4636   auto _settings_to_test = _o->settings_to_test.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::TFLiteSettings>> (_o->settings_to_test.size(), [](size_t i, _VectorArgs *__va) { return CreateTFLiteSettings(*__va->__fbb, __va->__o->settings_to_test[i].get(), __va->__rehasher); }, &_va ) : 0;
4637   auto _model_file = _o->model_file ? CreateModelFile(_fbb, _o->model_file.get(), _rehasher) : 0;
4638   auto _storage_paths = _o->storage_paths ? CreateBenchmarkStoragePaths(_fbb, _o->storage_paths.get(), _rehasher) : 0;
4639   return tflite::CreateMinibenchmarkSettings(
4640       _fbb,
4641       _settings_to_test,
4642       _model_file,
4643       _storage_paths);
4644 }
4645 
4646 }  // namespace tflite
4647 
4648 #endif  // FLATBUFFERS_GENERATED_CONFIGURATION_TFLITE_H_
4649