• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 // clang-format off
16 // automatically generated by the FlatBuffers compiler, do not modify
17 
18 #ifndef FLATBUFFERS_GENERATED_CONFIGURATIONFORGENERATION_TFLITE_H_
19 #define FLATBUFFERS_GENERATED_CONFIGURATIONFORGENERATION_TFLITE_H_
20 
21 #include "flatbuffers/flatbuffers.h"
22 
23 namespace tflite {
24 
25 struct ComputeSettings;
26 struct ComputeSettingsT;
27 
28 struct NNAPISettings;
29 struct NNAPISettingsT;
30 
31 struct GPUSettings;
32 struct GPUSettingsT;
33 
34 struct HexagonSettings;
35 struct HexagonSettingsT;
36 
37 struct XNNPackSettings;
38 struct XNNPackSettingsT;
39 
40 struct EdgeTpuDeviceSpec;
41 struct EdgeTpuDeviceSpecT;
42 
43 struct EdgeTpuInactivePowerConfig;
44 struct EdgeTpuInactivePowerConfigT;
45 
46 struct EdgeTpuSettings;
47 struct EdgeTpuSettingsT;
48 
49 struct CoralSettings;
50 struct CoralSettingsT;
51 
52 struct CPUSettings;
53 struct CPUSettingsT;
54 
55 struct TFLiteSettings;
56 struct TFLiteSettingsT;
57 
58 struct FallbackSettings;
59 struct FallbackSettingsT;
60 
61 struct BenchmarkMetric;
62 struct BenchmarkMetricT;
63 
64 struct BenchmarkResult;
65 struct BenchmarkResultT;
66 
67 struct ErrorCode;
68 struct ErrorCodeT;
69 
70 struct BenchmarkError;
71 struct BenchmarkErrorT;
72 
73 struct BenchmarkEvent;
74 struct BenchmarkEventT;
75 
76 struct BestAccelerationDecision;
77 struct BestAccelerationDecisionT;
78 
79 struct BenchmarkInitializationFailure;
80 struct BenchmarkInitializationFailureT;
81 
82 struct MiniBenchmarkEvent;
83 struct MiniBenchmarkEventT;
84 
85 struct ModelFile;
86 struct ModelFileT;
87 
88 struct BenchmarkStoragePaths;
89 struct BenchmarkStoragePathsT;
90 
91 struct MinibenchmarkSettings;
92 struct MinibenchmarkSettingsT;
93 
94 bool operator==(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs);
95 bool operator!=(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs);
96 bool operator==(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs);
97 bool operator!=(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs);
98 bool operator==(const GPUSettingsT &lhs, const GPUSettingsT &rhs);
99 bool operator!=(const GPUSettingsT &lhs, const GPUSettingsT &rhs);
100 bool operator==(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs);
101 bool operator!=(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs);
102 bool operator==(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs);
103 bool operator!=(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs);
104 bool operator==(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs);
105 bool operator!=(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs);
106 bool operator==(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs);
107 bool operator!=(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs);
108 bool operator==(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs);
109 bool operator!=(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs);
110 bool operator==(const CoralSettingsT &lhs, const CoralSettingsT &rhs);
111 bool operator!=(const CoralSettingsT &lhs, const CoralSettingsT &rhs);
112 bool operator==(const CPUSettingsT &lhs, const CPUSettingsT &rhs);
113 bool operator!=(const CPUSettingsT &lhs, const CPUSettingsT &rhs);
114 bool operator==(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs);
115 bool operator!=(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs);
116 bool operator==(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs);
117 bool operator!=(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs);
118 bool operator==(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs);
119 bool operator!=(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs);
120 bool operator==(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs);
121 bool operator!=(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs);
122 bool operator==(const ErrorCodeT &lhs, const ErrorCodeT &rhs);
123 bool operator!=(const ErrorCodeT &lhs, const ErrorCodeT &rhs);
124 bool operator==(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs);
125 bool operator!=(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs);
126 bool operator==(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs);
127 bool operator!=(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs);
128 bool operator==(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs);
129 bool operator!=(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs);
130 bool operator==(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs);
131 bool operator!=(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs);
132 bool operator==(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs);
133 bool operator!=(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs);
134 bool operator==(const ModelFileT &lhs, const ModelFileT &rhs);
135 bool operator!=(const ModelFileT &lhs, const ModelFileT &rhs);
136 bool operator==(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs);
137 bool operator!=(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs);
138 bool operator==(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs);
139 bool operator!=(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs);
140 
141 enum ExecutionPreference {
142   ExecutionPreference_ANY = 0,
143   ExecutionPreference_LOW_LATENCY = 1,
144   ExecutionPreference_LOW_POWER = 2,
145   ExecutionPreference_FORCE_CPU = 3,
146   ExecutionPreference_MIN = ExecutionPreference_ANY,
147   ExecutionPreference_MAX = ExecutionPreference_FORCE_CPU
148 };
149 
EnumValuesExecutionPreference()150 inline const ExecutionPreference (&EnumValuesExecutionPreference())[4] {
151   static const ExecutionPreference values[] = {
152     ExecutionPreference_ANY,
153     ExecutionPreference_LOW_LATENCY,
154     ExecutionPreference_LOW_POWER,
155     ExecutionPreference_FORCE_CPU
156   };
157   return values;
158 }
159 
EnumNamesExecutionPreference()160 inline const char * const *EnumNamesExecutionPreference() {
161   static const char * const names[5] = {
162     "ANY",
163     "LOW_LATENCY",
164     "LOW_POWER",
165     "FORCE_CPU",
166     nullptr
167   };
168   return names;
169 }
170 
EnumNameExecutionPreference(ExecutionPreference e)171 inline const char *EnumNameExecutionPreference(ExecutionPreference e) {
172   if (flatbuffers::IsOutRange(e, ExecutionPreference_ANY, ExecutionPreference_FORCE_CPU)) return "";
173   const size_t index = static_cast<size_t>(e);
174   return EnumNamesExecutionPreference()[index];
175 }
176 
177 enum Delegate {
178   Delegate_NONE = 0,
179   Delegate_NNAPI = 1,
180   Delegate_GPU = 2,
181   Delegate_HEXAGON = 3,
182   Delegate_XNNPACK = 4,
183   Delegate_EDGETPU = 5,
184   Delegate_EDGETPU_CORAL = 6,
185   Delegate_MIN = Delegate_NONE,
186   Delegate_MAX = Delegate_EDGETPU_CORAL
187 };
188 
EnumValuesDelegate()189 inline const Delegate (&EnumValuesDelegate())[7] {
190   static const Delegate values[] = {
191     Delegate_NONE,
192     Delegate_NNAPI,
193     Delegate_GPU,
194     Delegate_HEXAGON,
195     Delegate_XNNPACK,
196     Delegate_EDGETPU,
197     Delegate_EDGETPU_CORAL
198   };
199   return values;
200 }
201 
EnumNamesDelegate()202 inline const char * const *EnumNamesDelegate() {
203   static const char * const names[8] = {
204     "NONE",
205     "NNAPI",
206     "GPU",
207     "HEXAGON",
208     "XNNPACK",
209     "EDGETPU",
210     "EDGETPU_CORAL",
211     nullptr
212   };
213   return names;
214 }
215 
EnumNameDelegate(Delegate e)216 inline const char *EnumNameDelegate(Delegate e) {
217   if (flatbuffers::IsOutRange(e, Delegate_NONE, Delegate_EDGETPU_CORAL)) return "";
218   const size_t index = static_cast<size_t>(e);
219   return EnumNamesDelegate()[index];
220 }
221 
222 enum NNAPIExecutionPreference {
223   NNAPIExecutionPreference_UNDEFINED = 0,
224   NNAPIExecutionPreference_NNAPI_LOW_POWER = 1,
225   NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER = 2,
226   NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED = 3,
227   NNAPIExecutionPreference_MIN = NNAPIExecutionPreference_UNDEFINED,
228   NNAPIExecutionPreference_MAX = NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
229 };
230 
EnumValuesNNAPIExecutionPreference()231 inline const NNAPIExecutionPreference (&EnumValuesNNAPIExecutionPreference())[4] {
232   static const NNAPIExecutionPreference values[] = {
233     NNAPIExecutionPreference_UNDEFINED,
234     NNAPIExecutionPreference_NNAPI_LOW_POWER,
235     NNAPIExecutionPreference_NNAPI_FAST_SINGLE_ANSWER,
236     NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED
237   };
238   return values;
239 }
240 
EnumNamesNNAPIExecutionPreference()241 inline const char * const *EnumNamesNNAPIExecutionPreference() {
242   static const char * const names[5] = {
243     "UNDEFINED",
244     "NNAPI_LOW_POWER",
245     "NNAPI_FAST_SINGLE_ANSWER",
246     "NNAPI_SUSTAINED_SPEED",
247     nullptr
248   };
249   return names;
250 }
251 
EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e)252 inline const char *EnumNameNNAPIExecutionPreference(NNAPIExecutionPreference e) {
253   if (flatbuffers::IsOutRange(e, NNAPIExecutionPreference_UNDEFINED, NNAPIExecutionPreference_NNAPI_SUSTAINED_SPEED)) return "";
254   const size_t index = static_cast<size_t>(e);
255   return EnumNamesNNAPIExecutionPreference()[index];
256 }
257 
258 enum NNAPIExecutionPriority {
259   NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED = 0,
260   NNAPIExecutionPriority_NNAPI_PRIORITY_LOW = 1,
261   NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM = 2,
262   NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH = 3,
263   NNAPIExecutionPriority_MIN = NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
264   NNAPIExecutionPriority_MAX = NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
265 };
266 
EnumValuesNNAPIExecutionPriority()267 inline const NNAPIExecutionPriority (&EnumValuesNNAPIExecutionPriority())[4] {
268   static const NNAPIExecutionPriority values[] = {
269     NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
270     NNAPIExecutionPriority_NNAPI_PRIORITY_LOW,
271     NNAPIExecutionPriority_NNAPI_PRIORITY_MEDIUM,
272     NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH
273   };
274   return values;
275 }
276 
EnumNamesNNAPIExecutionPriority()277 inline const char * const *EnumNamesNNAPIExecutionPriority() {
278   static const char * const names[5] = {
279     "NNAPI_PRIORITY_UNDEFINED",
280     "NNAPI_PRIORITY_LOW",
281     "NNAPI_PRIORITY_MEDIUM",
282     "NNAPI_PRIORITY_HIGH",
283     nullptr
284   };
285   return names;
286 }
287 
EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e)288 inline const char *EnumNameNNAPIExecutionPriority(NNAPIExecutionPriority e) {
289   if (flatbuffers::IsOutRange(e, NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED, NNAPIExecutionPriority_NNAPI_PRIORITY_HIGH)) return "";
290   const size_t index = static_cast<size_t>(e);
291   return EnumNamesNNAPIExecutionPriority()[index];
292 }
293 
294 enum GPUBackend {
295   GPUBackend_UNSET = 0,
296   GPUBackend_OPENCL = 1,
297   GPUBackend_OPENGL = 2,
298   GPUBackend_MIN = GPUBackend_UNSET,
299   GPUBackend_MAX = GPUBackend_OPENGL
300 };
301 
EnumValuesGPUBackend()302 inline const GPUBackend (&EnumValuesGPUBackend())[3] {
303   static const GPUBackend values[] = {
304     GPUBackend_UNSET,
305     GPUBackend_OPENCL,
306     GPUBackend_OPENGL
307   };
308   return values;
309 }
310 
EnumNamesGPUBackend()311 inline const char * const *EnumNamesGPUBackend() {
312   static const char * const names[4] = {
313     "UNSET",
314     "OPENCL",
315     "OPENGL",
316     nullptr
317   };
318   return names;
319 }
320 
EnumNameGPUBackend(GPUBackend e)321 inline const char *EnumNameGPUBackend(GPUBackend e) {
322   if (flatbuffers::IsOutRange(e, GPUBackend_UNSET, GPUBackend_OPENGL)) return "";
323   const size_t index = static_cast<size_t>(e);
324   return EnumNamesGPUBackend()[index];
325 }
326 
327 enum GPUInferencePriority {
328   GPUInferencePriority_GPU_PRIORITY_AUTO = 0,
329   GPUInferencePriority_GPU_PRIORITY_MAX_PRECISION = 1,
330   GPUInferencePriority_GPU_PRIORITY_MIN_LATENCY = 2,
331   GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE = 3,
332   GPUInferencePriority_MIN = GPUInferencePriority_GPU_PRIORITY_AUTO,
333   GPUInferencePriority_MAX = GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE
334 };
335 
EnumValuesGPUInferencePriority()336 inline const GPUInferencePriority (&EnumValuesGPUInferencePriority())[4] {
337   static const GPUInferencePriority values[] = {
338     GPUInferencePriority_GPU_PRIORITY_AUTO,
339     GPUInferencePriority_GPU_PRIORITY_MAX_PRECISION,
340     GPUInferencePriority_GPU_PRIORITY_MIN_LATENCY,
341     GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE
342   };
343   return values;
344 }
345 
EnumNamesGPUInferencePriority()346 inline const char * const *EnumNamesGPUInferencePriority() {
347   static const char * const names[5] = {
348     "GPU_PRIORITY_AUTO",
349     "GPU_PRIORITY_MAX_PRECISION",
350     "GPU_PRIORITY_MIN_LATENCY",
351     "GPU_PRIORITY_MIN_MEMORY_USAGE",
352     nullptr
353   };
354   return names;
355 }
356 
EnumNameGPUInferencePriority(GPUInferencePriority e)357 inline const char *EnumNameGPUInferencePriority(GPUInferencePriority e) {
358   if (flatbuffers::IsOutRange(e, GPUInferencePriority_GPU_PRIORITY_AUTO, GPUInferencePriority_GPU_PRIORITY_MIN_MEMORY_USAGE)) return "";
359   const size_t index = static_cast<size_t>(e);
360   return EnumNamesGPUInferencePriority()[index];
361 }
362 
363 enum GPUInferenceUsage {
364   GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER = 0,
365   GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED = 1,
366   GPUInferenceUsage_MIN = GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
367   GPUInferenceUsage_MAX = GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED
368 };
369 
EnumValuesGPUInferenceUsage()370 inline const GPUInferenceUsage (&EnumValuesGPUInferenceUsage())[2] {
371   static const GPUInferenceUsage values[] = {
372     GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
373     GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED
374   };
375   return values;
376 }
377 
EnumNamesGPUInferenceUsage()378 inline const char * const *EnumNamesGPUInferenceUsage() {
379   static const char * const names[3] = {
380     "GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER",
381     "GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED",
382     nullptr
383   };
384   return names;
385 }
386 
EnumNameGPUInferenceUsage(GPUInferenceUsage e)387 inline const char *EnumNameGPUInferenceUsage(GPUInferenceUsage e) {
388   if (flatbuffers::IsOutRange(e, GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER, GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_SUSTAINED_SPEED)) return "";
389   const size_t index = static_cast<size_t>(e);
390   return EnumNamesGPUInferenceUsage()[index];
391 }
392 
393 namespace EdgeTpuDeviceSpec_ {
394 
395 enum PlatformType {
396   PlatformType_MMIO = 0,
397   PlatformType_REFERENCE = 1,
398   PlatformType_SIMULATOR = 2,
399   PlatformType_REMOTE_SIMULATOR = 3,
400   PlatformType_MIN = PlatformType_MMIO,
401   PlatformType_MAX = PlatformType_REMOTE_SIMULATOR
402 };
403 
EnumValuesPlatformType()404 inline const PlatformType (&EnumValuesPlatformType())[4] {
405   static const PlatformType values[] = {
406     PlatformType_MMIO,
407     PlatformType_REFERENCE,
408     PlatformType_SIMULATOR,
409     PlatformType_REMOTE_SIMULATOR
410   };
411   return values;
412 }
413 
EnumNamesPlatformType()414 inline const char * const *EnumNamesPlatformType() {
415   static const char * const names[5] = {
416     "MMIO",
417     "REFERENCE",
418     "SIMULATOR",
419     "REMOTE_SIMULATOR",
420     nullptr
421   };
422   return names;
423 }
424 
EnumNamePlatformType(PlatformType e)425 inline const char *EnumNamePlatformType(PlatformType e) {
426   if (flatbuffers::IsOutRange(e, PlatformType_MMIO, PlatformType_REMOTE_SIMULATOR)) return "";
427   const size_t index = static_cast<size_t>(e);
428   return EnumNamesPlatformType()[index];
429 }
430 
431 }  // namespace EdgeTpuDeviceSpec_
432 
433 enum EdgeTpuPowerState {
434   EdgeTpuPowerState_UNDEFINED_POWERSTATE = 0,
435   EdgeTpuPowerState_TPU_CORE_OFF = 1,
436   EdgeTpuPowerState_READY = 2,
437   EdgeTpuPowerState_ACTIVE_MIN_POWER = 3,
438   EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER = 4,
439   EdgeTpuPowerState_ACTIVE_LOW_POWER = 5,
440   EdgeTpuPowerState_ACTIVE = 6,
441   EdgeTpuPowerState_OVER_DRIVE = 7,
442   EdgeTpuPowerState_MIN = EdgeTpuPowerState_UNDEFINED_POWERSTATE,
443   EdgeTpuPowerState_MAX = EdgeTpuPowerState_OVER_DRIVE
444 };
445 
EnumValuesEdgeTpuPowerState()446 inline const EdgeTpuPowerState (&EnumValuesEdgeTpuPowerState())[8] {
447   static const EdgeTpuPowerState values[] = {
448     EdgeTpuPowerState_UNDEFINED_POWERSTATE,
449     EdgeTpuPowerState_TPU_CORE_OFF,
450     EdgeTpuPowerState_READY,
451     EdgeTpuPowerState_ACTIVE_MIN_POWER,
452     EdgeTpuPowerState_ACTIVE_VERY_LOW_POWER,
453     EdgeTpuPowerState_ACTIVE_LOW_POWER,
454     EdgeTpuPowerState_ACTIVE,
455     EdgeTpuPowerState_OVER_DRIVE
456   };
457   return values;
458 }
459 
EnumNamesEdgeTpuPowerState()460 inline const char * const *EnumNamesEdgeTpuPowerState() {
461   static const char * const names[9] = {
462     "UNDEFINED_POWERSTATE",
463     "TPU_CORE_OFF",
464     "READY",
465     "ACTIVE_MIN_POWER",
466     "ACTIVE_VERY_LOW_POWER",
467     "ACTIVE_LOW_POWER",
468     "ACTIVE",
469     "OVER_DRIVE",
470     nullptr
471   };
472   return names;
473 }
474 
EnumNameEdgeTpuPowerState(EdgeTpuPowerState e)475 inline const char *EnumNameEdgeTpuPowerState(EdgeTpuPowerState e) {
476   if (flatbuffers::IsOutRange(e, EdgeTpuPowerState_UNDEFINED_POWERSTATE, EdgeTpuPowerState_OVER_DRIVE)) return "";
477   const size_t index = static_cast<size_t>(e);
478   return EnumNamesEdgeTpuPowerState()[index];
479 }
480 
481 namespace EdgeTpuSettings_ {
482 
483 enum FloatTruncationType {
484   FloatTruncationType_UNSPECIFIED = 0,
485   FloatTruncationType_NO_TRUNCATION = 1,
486   FloatTruncationType_BFLOAT16 = 2,
487   FloatTruncationType_HALF = 3,
488   FloatTruncationType_MIN = FloatTruncationType_UNSPECIFIED,
489   FloatTruncationType_MAX = FloatTruncationType_HALF
490 };
491 
EnumValuesFloatTruncationType()492 inline const FloatTruncationType (&EnumValuesFloatTruncationType())[4] {
493   static const FloatTruncationType values[] = {
494     FloatTruncationType_UNSPECIFIED,
495     FloatTruncationType_NO_TRUNCATION,
496     FloatTruncationType_BFLOAT16,
497     FloatTruncationType_HALF
498   };
499   return values;
500 }
501 
EnumNamesFloatTruncationType()502 inline const char * const *EnumNamesFloatTruncationType() {
503   static const char * const names[5] = {
504     "UNSPECIFIED",
505     "NO_TRUNCATION",
506     "BFLOAT16",
507     "HALF",
508     nullptr
509   };
510   return names;
511 }
512 
EnumNameFloatTruncationType(FloatTruncationType e)513 inline const char *EnumNameFloatTruncationType(FloatTruncationType e) {
514   if (flatbuffers::IsOutRange(e, FloatTruncationType_UNSPECIFIED, FloatTruncationType_HALF)) return "";
515   const size_t index = static_cast<size_t>(e);
516   return EnumNamesFloatTruncationType()[index];
517 }
518 
519 }  // namespace EdgeTpuSettings_
520 
521 namespace CoralSettings_ {
522 
523 enum Performance {
524   Performance_UNDEFINED = 0,
525   Performance_MAXIMUM = 1,
526   Performance_HIGH = 2,
527   Performance_MEDIUM = 3,
528   Performance_LOW = 4,
529   Performance_MIN = Performance_UNDEFINED,
530   Performance_MAX = Performance_LOW
531 };
532 
EnumValuesPerformance()533 inline const Performance (&EnumValuesPerformance())[5] {
534   static const Performance values[] = {
535     Performance_UNDEFINED,
536     Performance_MAXIMUM,
537     Performance_HIGH,
538     Performance_MEDIUM,
539     Performance_LOW
540   };
541   return values;
542 }
543 
EnumNamesPerformance()544 inline const char * const *EnumNamesPerformance() {
545   static const char * const names[6] = {
546     "UNDEFINED",
547     "MAXIMUM",
548     "HIGH",
549     "MEDIUM",
550     "LOW",
551     nullptr
552   };
553   return names;
554 }
555 
EnumNamePerformance(Performance e)556 inline const char *EnumNamePerformance(Performance e) {
557   if (flatbuffers::IsOutRange(e, Performance_UNDEFINED, Performance_LOW)) return "";
558   const size_t index = static_cast<size_t>(e);
559   return EnumNamesPerformance()[index];
560 }
561 
562 }  // namespace CoralSettings_
563 
564 enum BenchmarkEventType {
565   BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE = 0,
566   BenchmarkEventType_START = 1,
567   BenchmarkEventType_END = 2,
568   BenchmarkEventType_ERROR = 3,
569   BenchmarkEventType_LOGGED = 4,
570   BenchmarkEventType_RECOVERED_ERROR = 5,
571   BenchmarkEventType_MIN = BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
572   BenchmarkEventType_MAX = BenchmarkEventType_RECOVERED_ERROR
573 };
574 
EnumValuesBenchmarkEventType()575 inline const BenchmarkEventType (&EnumValuesBenchmarkEventType())[6] {
576   static const BenchmarkEventType values[] = {
577     BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
578     BenchmarkEventType_START,
579     BenchmarkEventType_END,
580     BenchmarkEventType_ERROR,
581     BenchmarkEventType_LOGGED,
582     BenchmarkEventType_RECOVERED_ERROR
583   };
584   return values;
585 }
586 
EnumNamesBenchmarkEventType()587 inline const char * const *EnumNamesBenchmarkEventType() {
588   static const char * const names[7] = {
589     "UNDEFINED_BENCHMARK_EVENT_TYPE",
590     "START",
591     "END",
592     "ERROR",
593     "LOGGED",
594     "RECOVERED_ERROR",
595     nullptr
596   };
597   return names;
598 }
599 
EnumNameBenchmarkEventType(BenchmarkEventType e)600 inline const char *EnumNameBenchmarkEventType(BenchmarkEventType e) {
601   if (flatbuffers::IsOutRange(e, BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE, BenchmarkEventType_RECOVERED_ERROR)) return "";
602   const size_t index = static_cast<size_t>(e);
603   return EnumNamesBenchmarkEventType()[index];
604 }
605 
606 enum BenchmarkStage {
607   BenchmarkStage_UNKNOWN = 0,
608   BenchmarkStage_INITIALIZATION = 1,
609   BenchmarkStage_INFERENCE = 2,
610   BenchmarkStage_MIN = BenchmarkStage_UNKNOWN,
611   BenchmarkStage_MAX = BenchmarkStage_INFERENCE
612 };
613 
EnumValuesBenchmarkStage()614 inline const BenchmarkStage (&EnumValuesBenchmarkStage())[3] {
615   static const BenchmarkStage values[] = {
616     BenchmarkStage_UNKNOWN,
617     BenchmarkStage_INITIALIZATION,
618     BenchmarkStage_INFERENCE
619   };
620   return values;
621 }
622 
EnumNamesBenchmarkStage()623 inline const char * const *EnumNamesBenchmarkStage() {
624   static const char * const names[4] = {
625     "UNKNOWN",
626     "INITIALIZATION",
627     "INFERENCE",
628     nullptr
629   };
630   return names;
631 }
632 
EnumNameBenchmarkStage(BenchmarkStage e)633 inline const char *EnumNameBenchmarkStage(BenchmarkStage e) {
634   if (flatbuffers::IsOutRange(e, BenchmarkStage_UNKNOWN, BenchmarkStage_INFERENCE)) return "";
635   const size_t index = static_cast<size_t>(e);
636   return EnumNamesBenchmarkStage()[index];
637 }
638 
639 struct ComputeSettingsT : public flatbuffers::NativeTable {
640   typedef ComputeSettings TableType;
641   tflite::ExecutionPreference preference;
642   std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings;
643   std::string model_namespace_for_statistics;
644   std::string model_identifier_for_statistics;
645   std::unique_ptr<tflite::MinibenchmarkSettingsT> settings_to_test_locally;
ComputeSettingsTComputeSettingsT646   ComputeSettingsT()
647       : preference(tflite::ExecutionPreference_ANY) {
648   }
649 };
650 
651 struct ComputeSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
652   typedef ComputeSettingsT NativeTableType;
653   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
654     VT_PREFERENCE = 4,
655     VT_TFLITE_SETTINGS = 6,
656     VT_MODEL_NAMESPACE_FOR_STATISTICS = 8,
657     VT_MODEL_IDENTIFIER_FOR_STATISTICS = 10,
658     VT_SETTINGS_TO_TEST_LOCALLY = 12
659   };
preferenceFLATBUFFERS_FINAL_CLASS660   tflite::ExecutionPreference preference() const {
661     return static_cast<tflite::ExecutionPreference>(GetField<int32_t>(VT_PREFERENCE, 0));
662   }
tflite_settingsFLATBUFFERS_FINAL_CLASS663   const tflite::TFLiteSettings *tflite_settings() const {
664     return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
665   }
model_namespace_for_statisticsFLATBUFFERS_FINAL_CLASS666   const flatbuffers::String *model_namespace_for_statistics() const {
667     return GetPointer<const flatbuffers::String *>(VT_MODEL_NAMESPACE_FOR_STATISTICS);
668   }
model_identifier_for_statisticsFLATBUFFERS_FINAL_CLASS669   const flatbuffers::String *model_identifier_for_statistics() const {
670     return GetPointer<const flatbuffers::String *>(VT_MODEL_IDENTIFIER_FOR_STATISTICS);
671   }
settings_to_test_locallyFLATBUFFERS_FINAL_CLASS672   const tflite::MinibenchmarkSettings *settings_to_test_locally() const {
673     return GetPointer<const tflite::MinibenchmarkSettings *>(VT_SETTINGS_TO_TEST_LOCALLY);
674   }
VerifyFLATBUFFERS_FINAL_CLASS675   bool Verify(flatbuffers::Verifier &verifier) const {
676     return VerifyTableStart(verifier) &&
677            VerifyField<int32_t>(verifier, VT_PREFERENCE) &&
678            VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
679            verifier.VerifyTable(tflite_settings()) &&
680            VerifyOffset(verifier, VT_MODEL_NAMESPACE_FOR_STATISTICS) &&
681            verifier.VerifyString(model_namespace_for_statistics()) &&
682            VerifyOffset(verifier, VT_MODEL_IDENTIFIER_FOR_STATISTICS) &&
683            verifier.VerifyString(model_identifier_for_statistics()) &&
684            VerifyOffset(verifier, VT_SETTINGS_TO_TEST_LOCALLY) &&
685            verifier.VerifyTable(settings_to_test_locally()) &&
686            verifier.EndTable();
687   }
688   ComputeSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
689   void UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
690   static flatbuffers::Offset<ComputeSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
691 };
692 
693 struct ComputeSettingsBuilder {
694   flatbuffers::FlatBufferBuilder &fbb_;
695   flatbuffers::uoffset_t start_;
add_preferenceComputeSettingsBuilder696   void add_preference(tflite::ExecutionPreference preference) {
697     fbb_.AddElement<int32_t>(ComputeSettings::VT_PREFERENCE, static_cast<int32_t>(preference), 0);
698   }
add_tflite_settingsComputeSettingsBuilder699   void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
700     fbb_.AddOffset(ComputeSettings::VT_TFLITE_SETTINGS, tflite_settings);
701   }
add_model_namespace_for_statisticsComputeSettingsBuilder702   void add_model_namespace_for_statistics(flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics) {
703     fbb_.AddOffset(ComputeSettings::VT_MODEL_NAMESPACE_FOR_STATISTICS, model_namespace_for_statistics);
704   }
add_model_identifier_for_statisticsComputeSettingsBuilder705   void add_model_identifier_for_statistics(flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics) {
706     fbb_.AddOffset(ComputeSettings::VT_MODEL_IDENTIFIER_FOR_STATISTICS, model_identifier_for_statistics);
707   }
add_settings_to_test_locallyComputeSettingsBuilder708   void add_settings_to_test_locally(flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally) {
709     fbb_.AddOffset(ComputeSettings::VT_SETTINGS_TO_TEST_LOCALLY, settings_to_test_locally);
710   }
ComputeSettingsBuilderComputeSettingsBuilder711   explicit ComputeSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
712         : fbb_(_fbb) {
713     start_ = fbb_.StartTable();
714   }
715   ComputeSettingsBuilder &operator=(const ComputeSettingsBuilder &);
FinishComputeSettingsBuilder716   flatbuffers::Offset<ComputeSettings> Finish() {
717     const auto end = fbb_.EndTable(start_);
718     auto o = flatbuffers::Offset<ComputeSettings>(end);
719     return o;
720   }
721 };
722 
723 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(
724     flatbuffers::FlatBufferBuilder &_fbb,
725     tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
726     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
727     flatbuffers::Offset<flatbuffers::String> model_namespace_for_statistics = 0,
728     flatbuffers::Offset<flatbuffers::String> model_identifier_for_statistics = 0,
729     flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally = 0) {
730   ComputeSettingsBuilder builder_(_fbb);
731   builder_.add_settings_to_test_locally(settings_to_test_locally);
732   builder_.add_model_identifier_for_statistics(model_identifier_for_statistics);
733   builder_.add_model_namespace_for_statistics(model_namespace_for_statistics);
734   builder_.add_tflite_settings(tflite_settings);
735   builder_.add_preference(preference);
736   return builder_.Finish();
737 }
738 
739 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettingsDirect(
740     flatbuffers::FlatBufferBuilder &_fbb,
741     tflite::ExecutionPreference preference = tflite::ExecutionPreference_ANY,
742     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
743     const char *model_namespace_for_statistics = nullptr,
744     const char *model_identifier_for_statistics = nullptr,
745     flatbuffers::Offset<tflite::MinibenchmarkSettings> settings_to_test_locally = 0) {
746   auto model_namespace_for_statistics__ = model_namespace_for_statistics ? _fbb.CreateString(model_namespace_for_statistics) : 0;
747   auto model_identifier_for_statistics__ = model_identifier_for_statistics ? _fbb.CreateString(model_identifier_for_statistics) : 0;
748   return tflite::CreateComputeSettings(
749       _fbb,
750       preference,
751       tflite_settings,
752       model_namespace_for_statistics__,
753       model_identifier_for_statistics__,
754       settings_to_test_locally);
755 }
756 
757 flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
758 
759 struct NNAPISettingsT : public flatbuffers::NativeTable {
760   typedef NNAPISettings TableType;
761   std::string accelerator_name;
762   std::string cache_directory;
763   std::string model_token;
764   tflite::NNAPIExecutionPreference execution_preference;
765   int32_t no_of_nnapi_instances_to_cache;
766   std::unique_ptr<tflite::FallbackSettingsT> fallback_settings;
767   bool allow_nnapi_cpu_on_android_10_plus;
768   tflite::NNAPIExecutionPriority execution_priority;
769   bool allow_dynamic_dimensions;
770   bool allow_fp16_precision_for_fp32;
771   bool use_burst_computation;
NNAPISettingsTNNAPISettingsT772   NNAPISettingsT()
773       : execution_preference(tflite::NNAPIExecutionPreference_UNDEFINED),
774         no_of_nnapi_instances_to_cache(0),
775         allow_nnapi_cpu_on_android_10_plus(false),
776         execution_priority(tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED),
777         allow_dynamic_dimensions(false),
778         allow_fp16_precision_for_fp32(false),
779         use_burst_computation(false) {
780   }
781 };
782 
783 struct NNAPISettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
784   typedef NNAPISettingsT NativeTableType;
785   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
786     VT_ACCELERATOR_NAME = 4,
787     VT_CACHE_DIRECTORY = 6,
788     VT_MODEL_TOKEN = 8,
789     VT_EXECUTION_PREFERENCE = 10,
790     VT_NO_OF_NNAPI_INSTANCES_TO_CACHE = 12,
791     VT_FALLBACK_SETTINGS = 14,
792     VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS = 16,
793     VT_EXECUTION_PRIORITY = 18,
794     VT_ALLOW_DYNAMIC_DIMENSIONS = 20,
795     VT_ALLOW_FP16_PRECISION_FOR_FP32 = 22,
796     VT_USE_BURST_COMPUTATION = 24
797   };
accelerator_nameFLATBUFFERS_FINAL_CLASS798   const flatbuffers::String *accelerator_name() const {
799     return GetPointer<const flatbuffers::String *>(VT_ACCELERATOR_NAME);
800   }
cache_directoryFLATBUFFERS_FINAL_CLASS801   const flatbuffers::String *cache_directory() const {
802     return GetPointer<const flatbuffers::String *>(VT_CACHE_DIRECTORY);
803   }
model_tokenFLATBUFFERS_FINAL_CLASS804   const flatbuffers::String *model_token() const {
805     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
806   }
execution_preferenceFLATBUFFERS_FINAL_CLASS807   tflite::NNAPIExecutionPreference execution_preference() const {
808     return static_cast<tflite::NNAPIExecutionPreference>(GetField<int32_t>(VT_EXECUTION_PREFERENCE, 0));
809   }
no_of_nnapi_instances_to_cacheFLATBUFFERS_FINAL_CLASS810   int32_t no_of_nnapi_instances_to_cache() const {
811     return GetField<int32_t>(VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, 0);
812   }
fallback_settingsFLATBUFFERS_FINAL_CLASS813   const tflite::FallbackSettings *fallback_settings() const {
814     return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
815   }
allow_nnapi_cpu_on_android_10_plusFLATBUFFERS_FINAL_CLASS816   bool allow_nnapi_cpu_on_android_10_plus() const {
817     return GetField<uint8_t>(VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, 0) != 0;
818   }
execution_priorityFLATBUFFERS_FINAL_CLASS819   tflite::NNAPIExecutionPriority execution_priority() const {
820     return static_cast<tflite::NNAPIExecutionPriority>(GetField<int32_t>(VT_EXECUTION_PRIORITY, 0));
821   }
allow_dynamic_dimensionsFLATBUFFERS_FINAL_CLASS822   bool allow_dynamic_dimensions() const {
823     return GetField<uint8_t>(VT_ALLOW_DYNAMIC_DIMENSIONS, 0) != 0;
824   }
allow_fp16_precision_for_fp32FLATBUFFERS_FINAL_CLASS825   bool allow_fp16_precision_for_fp32() const {
826     return GetField<uint8_t>(VT_ALLOW_FP16_PRECISION_FOR_FP32, 0) != 0;
827   }
use_burst_computationFLATBUFFERS_FINAL_CLASS828   bool use_burst_computation() const {
829     return GetField<uint8_t>(VT_USE_BURST_COMPUTATION, 0) != 0;
830   }
VerifyFLATBUFFERS_FINAL_CLASS831   bool Verify(flatbuffers::Verifier &verifier) const {
832     return VerifyTableStart(verifier) &&
833            VerifyOffset(verifier, VT_ACCELERATOR_NAME) &&
834            verifier.VerifyString(accelerator_name()) &&
835            VerifyOffset(verifier, VT_CACHE_DIRECTORY) &&
836            verifier.VerifyString(cache_directory()) &&
837            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
838            verifier.VerifyString(model_token()) &&
839            VerifyField<int32_t>(verifier, VT_EXECUTION_PREFERENCE) &&
840            VerifyField<int32_t>(verifier, VT_NO_OF_NNAPI_INSTANCES_TO_CACHE) &&
841            VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
842            verifier.VerifyTable(fallback_settings()) &&
843            VerifyField<uint8_t>(verifier, VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS) &&
844            VerifyField<int32_t>(verifier, VT_EXECUTION_PRIORITY) &&
845            VerifyField<uint8_t>(verifier, VT_ALLOW_DYNAMIC_DIMENSIONS) &&
846            VerifyField<uint8_t>(verifier, VT_ALLOW_FP16_PRECISION_FOR_FP32) &&
847            VerifyField<uint8_t>(verifier, VT_USE_BURST_COMPUTATION) &&
848            verifier.EndTable();
849   }
850   NNAPISettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
851   void UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
852   static flatbuffers::Offset<NNAPISettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
853 };
854 
855 struct NNAPISettingsBuilder {
856   flatbuffers::FlatBufferBuilder &fbb_;
857   flatbuffers::uoffset_t start_;
add_accelerator_nameNNAPISettingsBuilder858   void add_accelerator_name(flatbuffers::Offset<flatbuffers::String> accelerator_name) {
859     fbb_.AddOffset(NNAPISettings::VT_ACCELERATOR_NAME, accelerator_name);
860   }
add_cache_directoryNNAPISettingsBuilder861   void add_cache_directory(flatbuffers::Offset<flatbuffers::String> cache_directory) {
862     fbb_.AddOffset(NNAPISettings::VT_CACHE_DIRECTORY, cache_directory);
863   }
add_model_tokenNNAPISettingsBuilder864   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
865     fbb_.AddOffset(NNAPISettings::VT_MODEL_TOKEN, model_token);
866   }
add_execution_preferenceNNAPISettingsBuilder867   void add_execution_preference(tflite::NNAPIExecutionPreference execution_preference) {
868     fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PREFERENCE, static_cast<int32_t>(execution_preference), 0);
869   }
add_no_of_nnapi_instances_to_cacheNNAPISettingsBuilder870   void add_no_of_nnapi_instances_to_cache(int32_t no_of_nnapi_instances_to_cache) {
871     fbb_.AddElement<int32_t>(NNAPISettings::VT_NO_OF_NNAPI_INSTANCES_TO_CACHE, no_of_nnapi_instances_to_cache, 0);
872   }
add_fallback_settingsNNAPISettingsBuilder873   void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
874     fbb_.AddOffset(NNAPISettings::VT_FALLBACK_SETTINGS, fallback_settings);
875   }
add_allow_nnapi_cpu_on_android_10_plusNNAPISettingsBuilder876   void add_allow_nnapi_cpu_on_android_10_plus(bool allow_nnapi_cpu_on_android_10_plus) {
877     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_NNAPI_CPU_ON_ANDROID_10_PLUS, static_cast<uint8_t>(allow_nnapi_cpu_on_android_10_plus), 0);
878   }
add_execution_priorityNNAPISettingsBuilder879   void add_execution_priority(tflite::NNAPIExecutionPriority execution_priority) {
880     fbb_.AddElement<int32_t>(NNAPISettings::VT_EXECUTION_PRIORITY, static_cast<int32_t>(execution_priority), 0);
881   }
add_allow_dynamic_dimensionsNNAPISettingsBuilder882   void add_allow_dynamic_dimensions(bool allow_dynamic_dimensions) {
883     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_DYNAMIC_DIMENSIONS, static_cast<uint8_t>(allow_dynamic_dimensions), 0);
884   }
add_allow_fp16_precision_for_fp32NNAPISettingsBuilder885   void add_allow_fp16_precision_for_fp32(bool allow_fp16_precision_for_fp32) {
886     fbb_.AddElement<uint8_t>(NNAPISettings::VT_ALLOW_FP16_PRECISION_FOR_FP32, static_cast<uint8_t>(allow_fp16_precision_for_fp32), 0);
887   }
add_use_burst_computationNNAPISettingsBuilder888   void add_use_burst_computation(bool use_burst_computation) {
889     fbb_.AddElement<uint8_t>(NNAPISettings::VT_USE_BURST_COMPUTATION, static_cast<uint8_t>(use_burst_computation), 0);
890   }
NNAPISettingsBuilderNNAPISettingsBuilder891   explicit NNAPISettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
892         : fbb_(_fbb) {
893     start_ = fbb_.StartTable();
894   }
895   NNAPISettingsBuilder &operator=(const NNAPISettingsBuilder &);
FinishNNAPISettingsBuilder896   flatbuffers::Offset<NNAPISettings> Finish() {
897     const auto end = fbb_.EndTable(start_);
898     auto o = flatbuffers::Offset<NNAPISettings>(end);
899     return o;
900   }
901 };
902 
903 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(
904     flatbuffers::FlatBufferBuilder &_fbb,
905     flatbuffers::Offset<flatbuffers::String> accelerator_name = 0,
906     flatbuffers::Offset<flatbuffers::String> cache_directory = 0,
907     flatbuffers::Offset<flatbuffers::String> model_token = 0,
908     tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
909     int32_t no_of_nnapi_instances_to_cache = 0,
910     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
911     bool allow_nnapi_cpu_on_android_10_plus = false,
912     tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
913     bool allow_dynamic_dimensions = false,
914     bool allow_fp16_precision_for_fp32 = false,
915     bool use_burst_computation = false) {
916   NNAPISettingsBuilder builder_(_fbb);
917   builder_.add_execution_priority(execution_priority);
918   builder_.add_fallback_settings(fallback_settings);
919   builder_.add_no_of_nnapi_instances_to_cache(no_of_nnapi_instances_to_cache);
920   builder_.add_execution_preference(execution_preference);
921   builder_.add_model_token(model_token);
922   builder_.add_cache_directory(cache_directory);
923   builder_.add_accelerator_name(accelerator_name);
924   builder_.add_use_burst_computation(use_burst_computation);
925   builder_.add_allow_fp16_precision_for_fp32(allow_fp16_precision_for_fp32);
926   builder_.add_allow_dynamic_dimensions(allow_dynamic_dimensions);
927   builder_.add_allow_nnapi_cpu_on_android_10_plus(allow_nnapi_cpu_on_android_10_plus);
928   return builder_.Finish();
929 }
930 
931 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettingsDirect(
932     flatbuffers::FlatBufferBuilder &_fbb,
933     const char *accelerator_name = nullptr,
934     const char *cache_directory = nullptr,
935     const char *model_token = nullptr,
936     tflite::NNAPIExecutionPreference execution_preference = tflite::NNAPIExecutionPreference_UNDEFINED,
937     int32_t no_of_nnapi_instances_to_cache = 0,
938     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0,
939     bool allow_nnapi_cpu_on_android_10_plus = false,
940     tflite::NNAPIExecutionPriority execution_priority = tflite::NNAPIExecutionPriority_NNAPI_PRIORITY_UNDEFINED,
941     bool allow_dynamic_dimensions = false,
942     bool allow_fp16_precision_for_fp32 = false,
943     bool use_burst_computation = false) {
944   auto accelerator_name__ = accelerator_name ? _fbb.CreateString(accelerator_name) : 0;
945   auto cache_directory__ = cache_directory ? _fbb.CreateString(cache_directory) : 0;
946   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
947   return tflite::CreateNNAPISettings(
948       _fbb,
949       accelerator_name__,
950       cache_directory__,
951       model_token__,
952       execution_preference,
953       no_of_nnapi_instances_to_cache,
954       fallback_settings,
955       allow_nnapi_cpu_on_android_10_plus,
956       execution_priority,
957       allow_dynamic_dimensions,
958       allow_fp16_precision_for_fp32,
959       use_burst_computation);
960 }
961 
962 flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
963 
964 struct GPUSettingsT : public flatbuffers::NativeTable {
965   typedef GPUSettings TableType;
966   bool is_precision_loss_allowed;
967   bool enable_quantized_inference;
968   tflite::GPUBackend force_backend;
969   tflite::GPUInferencePriority inference_priority1;
970   tflite::GPUInferencePriority inference_priority2;
971   tflite::GPUInferencePriority inference_priority3;
972   tflite::GPUInferenceUsage inference_preference;
973   std::string cache_directory;
974   std::string model_token;
GPUSettingsTGPUSettingsT975   GPUSettingsT()
976       : is_precision_loss_allowed(false),
977         enable_quantized_inference(true),
978         force_backend(tflite::GPUBackend_UNSET),
979         inference_priority1(tflite::GPUInferencePriority_GPU_PRIORITY_AUTO),
980         inference_priority2(tflite::GPUInferencePriority_GPU_PRIORITY_AUTO),
981         inference_priority3(tflite::GPUInferencePriority_GPU_PRIORITY_AUTO),
982         inference_preference(tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER) {
983   }
984 };
985 
986 struct GPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
987   typedef GPUSettingsT NativeTableType;
988   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
989     VT_IS_PRECISION_LOSS_ALLOWED = 4,
990     VT_ENABLE_QUANTIZED_INFERENCE = 6,
991     VT_FORCE_BACKEND = 8,
992     VT_INFERENCE_PRIORITY1 = 10,
993     VT_INFERENCE_PRIORITY2 = 12,
994     VT_INFERENCE_PRIORITY3 = 14,
995     VT_INFERENCE_PREFERENCE = 16,
996     VT_CACHE_DIRECTORY = 18,
997     VT_MODEL_TOKEN = 20
998   };
is_precision_loss_allowedFLATBUFFERS_FINAL_CLASS999   bool is_precision_loss_allowed() const {
1000     return GetField<uint8_t>(VT_IS_PRECISION_LOSS_ALLOWED, 0) != 0;
1001   }
enable_quantized_inferenceFLATBUFFERS_FINAL_CLASS1002   bool enable_quantized_inference() const {
1003     return GetField<uint8_t>(VT_ENABLE_QUANTIZED_INFERENCE, 1) != 0;
1004   }
force_backendFLATBUFFERS_FINAL_CLASS1005   tflite::GPUBackend force_backend() const {
1006     return static_cast<tflite::GPUBackend>(GetField<int32_t>(VT_FORCE_BACKEND, 0));
1007   }
inference_priority1FLATBUFFERS_FINAL_CLASS1008   tflite::GPUInferencePriority inference_priority1() const {
1009     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY1, 0));
1010   }
inference_priority2FLATBUFFERS_FINAL_CLASS1011   tflite::GPUInferencePriority inference_priority2() const {
1012     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY2, 0));
1013   }
inference_priority3FLATBUFFERS_FINAL_CLASS1014   tflite::GPUInferencePriority inference_priority3() const {
1015     return static_cast<tflite::GPUInferencePriority>(GetField<int32_t>(VT_INFERENCE_PRIORITY3, 0));
1016   }
inference_preferenceFLATBUFFERS_FINAL_CLASS1017   tflite::GPUInferenceUsage inference_preference() const {
1018     return static_cast<tflite::GPUInferenceUsage>(GetField<int32_t>(VT_INFERENCE_PREFERENCE, 0));
1019   }
cache_directoryFLATBUFFERS_FINAL_CLASS1020   const flatbuffers::String *cache_directory() const {
1021     return GetPointer<const flatbuffers::String *>(VT_CACHE_DIRECTORY);
1022   }
model_tokenFLATBUFFERS_FINAL_CLASS1023   const flatbuffers::String *model_token() const {
1024     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
1025   }
VerifyFLATBUFFERS_FINAL_CLASS1026   bool Verify(flatbuffers::Verifier &verifier) const {
1027     return VerifyTableStart(verifier) &&
1028            VerifyField<uint8_t>(verifier, VT_IS_PRECISION_LOSS_ALLOWED) &&
1029            VerifyField<uint8_t>(verifier, VT_ENABLE_QUANTIZED_INFERENCE) &&
1030            VerifyField<int32_t>(verifier, VT_FORCE_BACKEND) &&
1031            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY1) &&
1032            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY2) &&
1033            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY3) &&
1034            VerifyField<int32_t>(verifier, VT_INFERENCE_PREFERENCE) &&
1035            VerifyOffset(verifier, VT_CACHE_DIRECTORY) &&
1036            verifier.VerifyString(cache_directory()) &&
1037            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
1038            verifier.VerifyString(model_token()) &&
1039            verifier.EndTable();
1040   }
1041   GPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1042   void UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1043   static flatbuffers::Offset<GPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1044 };
1045 
1046 struct GPUSettingsBuilder {
1047   flatbuffers::FlatBufferBuilder &fbb_;
1048   flatbuffers::uoffset_t start_;
add_is_precision_loss_allowedGPUSettingsBuilder1049   void add_is_precision_loss_allowed(bool is_precision_loss_allowed) {
1050     fbb_.AddElement<uint8_t>(GPUSettings::VT_IS_PRECISION_LOSS_ALLOWED, static_cast<uint8_t>(is_precision_loss_allowed), 0);
1051   }
add_enable_quantized_inferenceGPUSettingsBuilder1052   void add_enable_quantized_inference(bool enable_quantized_inference) {
1053     fbb_.AddElement<uint8_t>(GPUSettings::VT_ENABLE_QUANTIZED_INFERENCE, static_cast<uint8_t>(enable_quantized_inference), 1);
1054   }
add_force_backendGPUSettingsBuilder1055   void add_force_backend(tflite::GPUBackend force_backend) {
1056     fbb_.AddElement<int32_t>(GPUSettings::VT_FORCE_BACKEND, static_cast<int32_t>(force_backend), 0);
1057   }
add_inference_priority1GPUSettingsBuilder1058   void add_inference_priority1(tflite::GPUInferencePriority inference_priority1) {
1059     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY1, static_cast<int32_t>(inference_priority1), 0);
1060   }
add_inference_priority2GPUSettingsBuilder1061   void add_inference_priority2(tflite::GPUInferencePriority inference_priority2) {
1062     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY2, static_cast<int32_t>(inference_priority2), 0);
1063   }
add_inference_priority3GPUSettingsBuilder1064   void add_inference_priority3(tflite::GPUInferencePriority inference_priority3) {
1065     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PRIORITY3, static_cast<int32_t>(inference_priority3), 0);
1066   }
add_inference_preferenceGPUSettingsBuilder1067   void add_inference_preference(tflite::GPUInferenceUsage inference_preference) {
1068     fbb_.AddElement<int32_t>(GPUSettings::VT_INFERENCE_PREFERENCE, static_cast<int32_t>(inference_preference), 0);
1069   }
add_cache_directoryGPUSettingsBuilder1070   void add_cache_directory(flatbuffers::Offset<flatbuffers::String> cache_directory) {
1071     fbb_.AddOffset(GPUSettings::VT_CACHE_DIRECTORY, cache_directory);
1072   }
add_model_tokenGPUSettingsBuilder1073   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
1074     fbb_.AddOffset(GPUSettings::VT_MODEL_TOKEN, model_token);
1075   }
GPUSettingsBuilderGPUSettingsBuilder1076   explicit GPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1077         : fbb_(_fbb) {
1078     start_ = fbb_.StartTable();
1079   }
1080   GPUSettingsBuilder &operator=(const GPUSettingsBuilder &);
FinishGPUSettingsBuilder1081   flatbuffers::Offset<GPUSettings> Finish() {
1082     const auto end = fbb_.EndTable(start_);
1083     auto o = flatbuffers::Offset<GPUSettings>(end);
1084     return o;
1085   }
1086 };
1087 
1088 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(
1089     flatbuffers::FlatBufferBuilder &_fbb,
1090     bool is_precision_loss_allowed = false,
1091     bool enable_quantized_inference = true,
1092     tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET,
1093     tflite::GPUInferencePriority inference_priority1 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1094     tflite::GPUInferencePriority inference_priority2 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1095     tflite::GPUInferencePriority inference_priority3 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1096     tflite::GPUInferenceUsage inference_preference = tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
1097     flatbuffers::Offset<flatbuffers::String> cache_directory = 0,
1098     flatbuffers::Offset<flatbuffers::String> model_token = 0) {
1099   GPUSettingsBuilder builder_(_fbb);
1100   builder_.add_model_token(model_token);
1101   builder_.add_cache_directory(cache_directory);
1102   builder_.add_inference_preference(inference_preference);
1103   builder_.add_inference_priority3(inference_priority3);
1104   builder_.add_inference_priority2(inference_priority2);
1105   builder_.add_inference_priority1(inference_priority1);
1106   builder_.add_force_backend(force_backend);
1107   builder_.add_enable_quantized_inference(enable_quantized_inference);
1108   builder_.add_is_precision_loss_allowed(is_precision_loss_allowed);
1109   return builder_.Finish();
1110 }
1111 
1112 inline flatbuffers::Offset<GPUSettings> CreateGPUSettingsDirect(
1113     flatbuffers::FlatBufferBuilder &_fbb,
1114     bool is_precision_loss_allowed = false,
1115     bool enable_quantized_inference = true,
1116     tflite::GPUBackend force_backend = tflite::GPUBackend_UNSET,
1117     tflite::GPUInferencePriority inference_priority1 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1118     tflite::GPUInferencePriority inference_priority2 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1119     tflite::GPUInferencePriority inference_priority3 = tflite::GPUInferencePriority_GPU_PRIORITY_AUTO,
1120     tflite::GPUInferenceUsage inference_preference = tflite::GPUInferenceUsage_GPU_INFERENCE_PREFERENCE_FAST_SINGLE_ANSWER,
1121     const char *cache_directory = nullptr,
1122     const char *model_token = nullptr) {
1123   auto cache_directory__ = cache_directory ? _fbb.CreateString(cache_directory) : 0;
1124   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
1125   return tflite::CreateGPUSettings(
1126       _fbb,
1127       is_precision_loss_allowed,
1128       enable_quantized_inference,
1129       force_backend,
1130       inference_priority1,
1131       inference_priority2,
1132       inference_priority3,
1133       inference_preference,
1134       cache_directory__,
1135       model_token__);
1136 }
1137 
1138 flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1139 
1140 struct HexagonSettingsT : public flatbuffers::NativeTable {
1141   typedef HexagonSettings TableType;
1142   int32_t debug_level;
1143   int32_t powersave_level;
1144   bool print_graph_profile;
1145   bool print_graph_debug;
HexagonSettingsTHexagonSettingsT1146   HexagonSettingsT()
1147       : debug_level(0),
1148         powersave_level(0),
1149         print_graph_profile(false),
1150         print_graph_debug(false) {
1151   }
1152 };
1153 
1154 struct HexagonSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1155   typedef HexagonSettingsT NativeTableType;
1156   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1157     VT_DEBUG_LEVEL = 4,
1158     VT_POWERSAVE_LEVEL = 6,
1159     VT_PRINT_GRAPH_PROFILE = 8,
1160     VT_PRINT_GRAPH_DEBUG = 10
1161   };
debug_levelFLATBUFFERS_FINAL_CLASS1162   int32_t debug_level() const {
1163     return GetField<int32_t>(VT_DEBUG_LEVEL, 0);
1164   }
powersave_levelFLATBUFFERS_FINAL_CLASS1165   int32_t powersave_level() const {
1166     return GetField<int32_t>(VT_POWERSAVE_LEVEL, 0);
1167   }
print_graph_profileFLATBUFFERS_FINAL_CLASS1168   bool print_graph_profile() const {
1169     return GetField<uint8_t>(VT_PRINT_GRAPH_PROFILE, 0) != 0;
1170   }
print_graph_debugFLATBUFFERS_FINAL_CLASS1171   bool print_graph_debug() const {
1172     return GetField<uint8_t>(VT_PRINT_GRAPH_DEBUG, 0) != 0;
1173   }
VerifyFLATBUFFERS_FINAL_CLASS1174   bool Verify(flatbuffers::Verifier &verifier) const {
1175     return VerifyTableStart(verifier) &&
1176            VerifyField<int32_t>(verifier, VT_DEBUG_LEVEL) &&
1177            VerifyField<int32_t>(verifier, VT_POWERSAVE_LEVEL) &&
1178            VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_PROFILE) &&
1179            VerifyField<uint8_t>(verifier, VT_PRINT_GRAPH_DEBUG) &&
1180            verifier.EndTable();
1181   }
1182   HexagonSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1183   void UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1184   static flatbuffers::Offset<HexagonSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1185 };
1186 
1187 struct HexagonSettingsBuilder {
1188   flatbuffers::FlatBufferBuilder &fbb_;
1189   flatbuffers::uoffset_t start_;
add_debug_levelHexagonSettingsBuilder1190   void add_debug_level(int32_t debug_level) {
1191     fbb_.AddElement<int32_t>(HexagonSettings::VT_DEBUG_LEVEL, debug_level, 0);
1192   }
add_powersave_levelHexagonSettingsBuilder1193   void add_powersave_level(int32_t powersave_level) {
1194     fbb_.AddElement<int32_t>(HexagonSettings::VT_POWERSAVE_LEVEL, powersave_level, 0);
1195   }
add_print_graph_profileHexagonSettingsBuilder1196   void add_print_graph_profile(bool print_graph_profile) {
1197     fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_PROFILE, static_cast<uint8_t>(print_graph_profile), 0);
1198   }
add_print_graph_debugHexagonSettingsBuilder1199   void add_print_graph_debug(bool print_graph_debug) {
1200     fbb_.AddElement<uint8_t>(HexagonSettings::VT_PRINT_GRAPH_DEBUG, static_cast<uint8_t>(print_graph_debug), 0);
1201   }
HexagonSettingsBuilderHexagonSettingsBuilder1202   explicit HexagonSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1203         : fbb_(_fbb) {
1204     start_ = fbb_.StartTable();
1205   }
1206   HexagonSettingsBuilder &operator=(const HexagonSettingsBuilder &);
FinishHexagonSettingsBuilder1207   flatbuffers::Offset<HexagonSettings> Finish() {
1208     const auto end = fbb_.EndTable(start_);
1209     auto o = flatbuffers::Offset<HexagonSettings>(end);
1210     return o;
1211   }
1212 };
1213 
1214 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(
1215     flatbuffers::FlatBufferBuilder &_fbb,
1216     int32_t debug_level = 0,
1217     int32_t powersave_level = 0,
1218     bool print_graph_profile = false,
1219     bool print_graph_debug = false) {
1220   HexagonSettingsBuilder builder_(_fbb);
1221   builder_.add_powersave_level(powersave_level);
1222   builder_.add_debug_level(debug_level);
1223   builder_.add_print_graph_debug(print_graph_debug);
1224   builder_.add_print_graph_profile(print_graph_profile);
1225   return builder_.Finish();
1226 }
1227 
1228 flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1229 
1230 struct XNNPackSettingsT : public flatbuffers::NativeTable {
1231   typedef XNNPackSettings TableType;
1232   int32_t num_threads;
XNNPackSettingsTXNNPackSettingsT1233   XNNPackSettingsT()
1234       : num_threads(0) {
1235   }
1236 };
1237 
1238 struct XNNPackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1239   typedef XNNPackSettingsT NativeTableType;
1240   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1241     VT_NUM_THREADS = 4
1242   };
num_threadsFLATBUFFERS_FINAL_CLASS1243   int32_t num_threads() const {
1244     return GetField<int32_t>(VT_NUM_THREADS, 0);
1245   }
VerifyFLATBUFFERS_FINAL_CLASS1246   bool Verify(flatbuffers::Verifier &verifier) const {
1247     return VerifyTableStart(verifier) &&
1248            VerifyField<int32_t>(verifier, VT_NUM_THREADS) &&
1249            verifier.EndTable();
1250   }
1251   XNNPackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1252   void UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1253   static flatbuffers::Offset<XNNPackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1254 };
1255 
1256 struct XNNPackSettingsBuilder {
1257   flatbuffers::FlatBufferBuilder &fbb_;
1258   flatbuffers::uoffset_t start_;
add_num_threadsXNNPackSettingsBuilder1259   void add_num_threads(int32_t num_threads) {
1260     fbb_.AddElement<int32_t>(XNNPackSettings::VT_NUM_THREADS, num_threads, 0);
1261   }
XNNPackSettingsBuilderXNNPackSettingsBuilder1262   explicit XNNPackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1263         : fbb_(_fbb) {
1264     start_ = fbb_.StartTable();
1265   }
1266   XNNPackSettingsBuilder &operator=(const XNNPackSettingsBuilder &);
FinishXNNPackSettingsBuilder1267   flatbuffers::Offset<XNNPackSettings> Finish() {
1268     const auto end = fbb_.EndTable(start_);
1269     auto o = flatbuffers::Offset<XNNPackSettings>(end);
1270     return o;
1271   }
1272 };
1273 
1274 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(
1275     flatbuffers::FlatBufferBuilder &_fbb,
1276     int32_t num_threads = 0) {
1277   XNNPackSettingsBuilder builder_(_fbb);
1278   builder_.add_num_threads(num_threads);
1279   return builder_.Finish();
1280 }
1281 
1282 flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1283 
1284 struct EdgeTpuDeviceSpecT : public flatbuffers::NativeTable {
1285   typedef EdgeTpuDeviceSpec TableType;
1286   tflite::EdgeTpuDeviceSpec_::PlatformType platform_type;
1287   int32_t num_chips;
1288   std::vector<std::string> device_paths;
1289   int32_t chip_family;
EdgeTpuDeviceSpecTEdgeTpuDeviceSpecT1290   EdgeTpuDeviceSpecT()
1291       : platform_type(tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO),
1292         num_chips(0),
1293         chip_family(0) {
1294   }
1295 };
1296 
1297 struct EdgeTpuDeviceSpec FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1298   typedef EdgeTpuDeviceSpecT NativeTableType;
1299   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1300     VT_PLATFORM_TYPE = 4,
1301     VT_NUM_CHIPS = 6,
1302     VT_DEVICE_PATHS = 8,
1303     VT_CHIP_FAMILY = 10
1304   };
platform_typeFLATBUFFERS_FINAL_CLASS1305   tflite::EdgeTpuDeviceSpec_::PlatformType platform_type() const {
1306     return static_cast<tflite::EdgeTpuDeviceSpec_::PlatformType>(GetField<int32_t>(VT_PLATFORM_TYPE, 0));
1307   }
num_chipsFLATBUFFERS_FINAL_CLASS1308   int32_t num_chips() const {
1309     return GetField<int32_t>(VT_NUM_CHIPS, 0);
1310   }
device_pathsFLATBUFFERS_FINAL_CLASS1311   const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *device_paths() const {
1312     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(VT_DEVICE_PATHS);
1313   }
chip_familyFLATBUFFERS_FINAL_CLASS1314   int32_t chip_family() const {
1315     return GetField<int32_t>(VT_CHIP_FAMILY, 0);
1316   }
VerifyFLATBUFFERS_FINAL_CLASS1317   bool Verify(flatbuffers::Verifier &verifier) const {
1318     return VerifyTableStart(verifier) &&
1319            VerifyField<int32_t>(verifier, VT_PLATFORM_TYPE) &&
1320            VerifyField<int32_t>(verifier, VT_NUM_CHIPS) &&
1321            VerifyOffset(verifier, VT_DEVICE_PATHS) &&
1322            verifier.VerifyVector(device_paths()) &&
1323            verifier.VerifyVectorOfStrings(device_paths()) &&
1324            VerifyField<int32_t>(verifier, VT_CHIP_FAMILY) &&
1325            verifier.EndTable();
1326   }
1327   EdgeTpuDeviceSpecT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1328   void UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1329   static flatbuffers::Offset<EdgeTpuDeviceSpec> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1330 };
1331 
1332 struct EdgeTpuDeviceSpecBuilder {
1333   flatbuffers::FlatBufferBuilder &fbb_;
1334   flatbuffers::uoffset_t start_;
add_platform_typeEdgeTpuDeviceSpecBuilder1335   void add_platform_type(tflite::EdgeTpuDeviceSpec_::PlatformType platform_type) {
1336     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_PLATFORM_TYPE, static_cast<int32_t>(platform_type), 0);
1337   }
add_num_chipsEdgeTpuDeviceSpecBuilder1338   void add_num_chips(int32_t num_chips) {
1339     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_NUM_CHIPS, num_chips, 0);
1340   }
add_device_pathsEdgeTpuDeviceSpecBuilder1341   void add_device_paths(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths) {
1342     fbb_.AddOffset(EdgeTpuDeviceSpec::VT_DEVICE_PATHS, device_paths);
1343   }
add_chip_familyEdgeTpuDeviceSpecBuilder1344   void add_chip_family(int32_t chip_family) {
1345     fbb_.AddElement<int32_t>(EdgeTpuDeviceSpec::VT_CHIP_FAMILY, chip_family, 0);
1346   }
EdgeTpuDeviceSpecBuilderEdgeTpuDeviceSpecBuilder1347   explicit EdgeTpuDeviceSpecBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1348         : fbb_(_fbb) {
1349     start_ = fbb_.StartTable();
1350   }
1351   EdgeTpuDeviceSpecBuilder &operator=(const EdgeTpuDeviceSpecBuilder &);
FinishEdgeTpuDeviceSpecBuilder1352   flatbuffers::Offset<EdgeTpuDeviceSpec> Finish() {
1353     const auto end = fbb_.EndTable(start_);
1354     auto o = flatbuffers::Offset<EdgeTpuDeviceSpec>(end);
1355     return o;
1356   }
1357 };
1358 
1359 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(
1360     flatbuffers::FlatBufferBuilder &_fbb,
1361     tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1362     int32_t num_chips = 0,
1363     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> device_paths = 0,
1364     int32_t chip_family = 0) {
1365   EdgeTpuDeviceSpecBuilder builder_(_fbb);
1366   builder_.add_chip_family(chip_family);
1367   builder_.add_device_paths(device_paths);
1368   builder_.add_num_chips(num_chips);
1369   builder_.add_platform_type(platform_type);
1370   return builder_.Finish();
1371 }
1372 
1373 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpecDirect(
1374     flatbuffers::FlatBufferBuilder &_fbb,
1375     tflite::EdgeTpuDeviceSpec_::PlatformType platform_type = tflite::EdgeTpuDeviceSpec_::PlatformType_MMIO,
1376     int32_t num_chips = 0,
1377     const std::vector<flatbuffers::Offset<flatbuffers::String>> *device_paths = nullptr,
1378     int32_t chip_family = 0) {
1379   auto device_paths__ = device_paths ? _fbb.CreateVector<flatbuffers::Offset<flatbuffers::String>>(*device_paths) : 0;
1380   return tflite::CreateEdgeTpuDeviceSpec(
1381       _fbb,
1382       platform_type,
1383       num_chips,
1384       device_paths__,
1385       chip_family);
1386 }
1387 
1388 flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1389 
1390 struct EdgeTpuInactivePowerConfigT : public flatbuffers::NativeTable {
1391   typedef EdgeTpuInactivePowerConfig TableType;
1392   tflite::EdgeTpuPowerState inactive_power_state;
1393   int64_t inactive_timeout_us;
EdgeTpuInactivePowerConfigTEdgeTpuInactivePowerConfigT1394   EdgeTpuInactivePowerConfigT()
1395       : inactive_power_state(tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE),
1396         inactive_timeout_us(0) {
1397   }
1398 };
1399 
1400 struct EdgeTpuInactivePowerConfig FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1401   typedef EdgeTpuInactivePowerConfigT NativeTableType;
1402   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1403     VT_INACTIVE_POWER_STATE = 4,
1404     VT_INACTIVE_TIMEOUT_US = 6
1405   };
inactive_power_stateFLATBUFFERS_FINAL_CLASS1406   tflite::EdgeTpuPowerState inactive_power_state() const {
1407     return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INACTIVE_POWER_STATE, 0));
1408   }
inactive_timeout_usFLATBUFFERS_FINAL_CLASS1409   int64_t inactive_timeout_us() const {
1410     return GetField<int64_t>(VT_INACTIVE_TIMEOUT_US, 0);
1411   }
VerifyFLATBUFFERS_FINAL_CLASS1412   bool Verify(flatbuffers::Verifier &verifier) const {
1413     return VerifyTableStart(verifier) &&
1414            VerifyField<int32_t>(verifier, VT_INACTIVE_POWER_STATE) &&
1415            VerifyField<int64_t>(verifier, VT_INACTIVE_TIMEOUT_US) &&
1416            verifier.EndTable();
1417   }
1418   EdgeTpuInactivePowerConfigT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1419   void UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1420   static flatbuffers::Offset<EdgeTpuInactivePowerConfig> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1421 };
1422 
1423 struct EdgeTpuInactivePowerConfigBuilder {
1424   flatbuffers::FlatBufferBuilder &fbb_;
1425   flatbuffers::uoffset_t start_;
add_inactive_power_stateEdgeTpuInactivePowerConfigBuilder1426   void add_inactive_power_state(tflite::EdgeTpuPowerState inactive_power_state) {
1427     fbb_.AddElement<int32_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_POWER_STATE, static_cast<int32_t>(inactive_power_state), 0);
1428   }
add_inactive_timeout_usEdgeTpuInactivePowerConfigBuilder1429   void add_inactive_timeout_us(int64_t inactive_timeout_us) {
1430     fbb_.AddElement<int64_t>(EdgeTpuInactivePowerConfig::VT_INACTIVE_TIMEOUT_US, inactive_timeout_us, 0);
1431   }
EdgeTpuInactivePowerConfigBuilderEdgeTpuInactivePowerConfigBuilder1432   explicit EdgeTpuInactivePowerConfigBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1433         : fbb_(_fbb) {
1434     start_ = fbb_.StartTable();
1435   }
1436   EdgeTpuInactivePowerConfigBuilder &operator=(const EdgeTpuInactivePowerConfigBuilder &);
FinishEdgeTpuInactivePowerConfigBuilder1437   flatbuffers::Offset<EdgeTpuInactivePowerConfig> Finish() {
1438     const auto end = fbb_.EndTable(start_);
1439     auto o = flatbuffers::Offset<EdgeTpuInactivePowerConfig>(end);
1440     return o;
1441   }
1442 };
1443 
1444 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(
1445     flatbuffers::FlatBufferBuilder &_fbb,
1446     tflite::EdgeTpuPowerState inactive_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1447     int64_t inactive_timeout_us = 0) {
1448   EdgeTpuInactivePowerConfigBuilder builder_(_fbb);
1449   builder_.add_inactive_timeout_us(inactive_timeout_us);
1450   builder_.add_inactive_power_state(inactive_power_state);
1451   return builder_.Finish();
1452 }
1453 
1454 flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1455 
1456 struct EdgeTpuSettingsT : public flatbuffers::NativeTable {
1457   typedef EdgeTpuSettings TableType;
1458   tflite::EdgeTpuPowerState inference_power_state;
1459   std::vector<std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>> inactive_power_configs;
1460   int32_t inference_priority;
1461   std::unique_ptr<tflite::EdgeTpuDeviceSpecT> edgetpu_device_spec;
1462   std::string model_token;
1463   tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type;
EdgeTpuSettingsTEdgeTpuSettingsT1464   EdgeTpuSettingsT()
1465       : inference_power_state(tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE),
1466         inference_priority(-1),
1467         float_truncation_type(tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED) {
1468   }
1469 };
1470 
1471 struct EdgeTpuSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1472   typedef EdgeTpuSettingsT NativeTableType;
1473   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1474     VT_INFERENCE_POWER_STATE = 4,
1475     VT_INACTIVE_POWER_CONFIGS = 6,
1476     VT_INFERENCE_PRIORITY = 8,
1477     VT_EDGETPU_DEVICE_SPEC = 10,
1478     VT_MODEL_TOKEN = 12,
1479     VT_FLOAT_TRUNCATION_TYPE = 14
1480   };
inference_power_stateFLATBUFFERS_FINAL_CLASS1481   tflite::EdgeTpuPowerState inference_power_state() const {
1482     return static_cast<tflite::EdgeTpuPowerState>(GetField<int32_t>(VT_INFERENCE_POWER_STATE, 0));
1483   }
inactive_power_configsFLATBUFFERS_FINAL_CLASS1484   const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs() const {
1485     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *>(VT_INACTIVE_POWER_CONFIGS);
1486   }
inference_priorityFLATBUFFERS_FINAL_CLASS1487   int32_t inference_priority() const {
1488     return GetField<int32_t>(VT_INFERENCE_PRIORITY, -1);
1489   }
edgetpu_device_specFLATBUFFERS_FINAL_CLASS1490   const tflite::EdgeTpuDeviceSpec *edgetpu_device_spec() const {
1491     return GetPointer<const tflite::EdgeTpuDeviceSpec *>(VT_EDGETPU_DEVICE_SPEC);
1492   }
model_tokenFLATBUFFERS_FINAL_CLASS1493   const flatbuffers::String *model_token() const {
1494     return GetPointer<const flatbuffers::String *>(VT_MODEL_TOKEN);
1495   }
float_truncation_typeFLATBUFFERS_FINAL_CLASS1496   tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type() const {
1497     return static_cast<tflite::EdgeTpuSettings_::FloatTruncationType>(GetField<int32_t>(VT_FLOAT_TRUNCATION_TYPE, 0));
1498   }
VerifyFLATBUFFERS_FINAL_CLASS1499   bool Verify(flatbuffers::Verifier &verifier) const {
1500     return VerifyTableStart(verifier) &&
1501            VerifyField<int32_t>(verifier, VT_INFERENCE_POWER_STATE) &&
1502            VerifyOffset(verifier, VT_INACTIVE_POWER_CONFIGS) &&
1503            verifier.VerifyVector(inactive_power_configs()) &&
1504            verifier.VerifyVectorOfTables(inactive_power_configs()) &&
1505            VerifyField<int32_t>(verifier, VT_INFERENCE_PRIORITY) &&
1506            VerifyOffset(verifier, VT_EDGETPU_DEVICE_SPEC) &&
1507            verifier.VerifyTable(edgetpu_device_spec()) &&
1508            VerifyOffset(verifier, VT_MODEL_TOKEN) &&
1509            verifier.VerifyString(model_token()) &&
1510            VerifyField<int32_t>(verifier, VT_FLOAT_TRUNCATION_TYPE) &&
1511            verifier.EndTable();
1512   }
1513   EdgeTpuSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1514   void UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1515   static flatbuffers::Offset<EdgeTpuSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1516 };
1517 
1518 struct EdgeTpuSettingsBuilder {
1519   flatbuffers::FlatBufferBuilder &fbb_;
1520   flatbuffers::uoffset_t start_;
add_inference_power_stateEdgeTpuSettingsBuilder1521   void add_inference_power_state(tflite::EdgeTpuPowerState inference_power_state) {
1522     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_POWER_STATE, static_cast<int32_t>(inference_power_state), 0);
1523   }
add_inactive_power_configsEdgeTpuSettingsBuilder1524   void add_inactive_power_configs(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs) {
1525     fbb_.AddOffset(EdgeTpuSettings::VT_INACTIVE_POWER_CONFIGS, inactive_power_configs);
1526   }
add_inference_priorityEdgeTpuSettingsBuilder1527   void add_inference_priority(int32_t inference_priority) {
1528     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_INFERENCE_PRIORITY, inference_priority, -1);
1529   }
add_edgetpu_device_specEdgeTpuSettingsBuilder1530   void add_edgetpu_device_spec(flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec) {
1531     fbb_.AddOffset(EdgeTpuSettings::VT_EDGETPU_DEVICE_SPEC, edgetpu_device_spec);
1532   }
add_model_tokenEdgeTpuSettingsBuilder1533   void add_model_token(flatbuffers::Offset<flatbuffers::String> model_token) {
1534     fbb_.AddOffset(EdgeTpuSettings::VT_MODEL_TOKEN, model_token);
1535   }
add_float_truncation_typeEdgeTpuSettingsBuilder1536   void add_float_truncation_type(tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type) {
1537     fbb_.AddElement<int32_t>(EdgeTpuSettings::VT_FLOAT_TRUNCATION_TYPE, static_cast<int32_t>(float_truncation_type), 0);
1538   }
EdgeTpuSettingsBuilderEdgeTpuSettingsBuilder1539   explicit EdgeTpuSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1540         : fbb_(_fbb) {
1541     start_ = fbb_.StartTable();
1542   }
1543   EdgeTpuSettingsBuilder &operator=(const EdgeTpuSettingsBuilder &);
FinishEdgeTpuSettingsBuilder1544   flatbuffers::Offset<EdgeTpuSettings> Finish() {
1545     const auto end = fbb_.EndTable(start_);
1546     auto o = flatbuffers::Offset<EdgeTpuSettings>(end);
1547     return o;
1548   }
1549 };
1550 
1551 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(
1552     flatbuffers::FlatBufferBuilder &_fbb,
1553     tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1554     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>> inactive_power_configs = 0,
1555     int32_t inference_priority = -1,
1556     flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0,
1557     flatbuffers::Offset<flatbuffers::String> model_token = 0,
1558     tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type = tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED) {
1559   EdgeTpuSettingsBuilder builder_(_fbb);
1560   builder_.add_float_truncation_type(float_truncation_type);
1561   builder_.add_model_token(model_token);
1562   builder_.add_edgetpu_device_spec(edgetpu_device_spec);
1563   builder_.add_inference_priority(inference_priority);
1564   builder_.add_inactive_power_configs(inactive_power_configs);
1565   builder_.add_inference_power_state(inference_power_state);
1566   return builder_.Finish();
1567 }
1568 
1569 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettingsDirect(
1570     flatbuffers::FlatBufferBuilder &_fbb,
1571     tflite::EdgeTpuPowerState inference_power_state = tflite::EdgeTpuPowerState_UNDEFINED_POWERSTATE,
1572     const std::vector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> *inactive_power_configs = nullptr,
1573     int32_t inference_priority = -1,
1574     flatbuffers::Offset<tflite::EdgeTpuDeviceSpec> edgetpu_device_spec = 0,
1575     const char *model_token = nullptr,
1576     tflite::EdgeTpuSettings_::FloatTruncationType float_truncation_type = tflite::EdgeTpuSettings_::FloatTruncationType_UNSPECIFIED) {
1577   auto inactive_power_configs__ = inactive_power_configs ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>>(*inactive_power_configs) : 0;
1578   auto model_token__ = model_token ? _fbb.CreateString(model_token) : 0;
1579   return tflite::CreateEdgeTpuSettings(
1580       _fbb,
1581       inference_power_state,
1582       inactive_power_configs__,
1583       inference_priority,
1584       edgetpu_device_spec,
1585       model_token__,
1586       float_truncation_type);
1587 }
1588 
1589 flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1590 
1591 struct CoralSettingsT : public flatbuffers::NativeTable {
1592   typedef CoralSettings TableType;
1593   std::string device;
1594   tflite::CoralSettings_::Performance performance;
1595   bool usb_always_dfu;
1596   int32_t usb_max_bulk_in_queue_length;
CoralSettingsTCoralSettingsT1597   CoralSettingsT()
1598       : performance(tflite::CoralSettings_::Performance_UNDEFINED),
1599         usb_always_dfu(false),
1600         usb_max_bulk_in_queue_length(0) {
1601   }
1602 };
1603 
1604 struct CoralSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1605   typedef CoralSettingsT NativeTableType;
1606   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1607     VT_DEVICE = 4,
1608     VT_PERFORMANCE = 6,
1609     VT_USB_ALWAYS_DFU = 8,
1610     VT_USB_MAX_BULK_IN_QUEUE_LENGTH = 10
1611   };
deviceFLATBUFFERS_FINAL_CLASS1612   const flatbuffers::String *device() const {
1613     return GetPointer<const flatbuffers::String *>(VT_DEVICE);
1614   }
performanceFLATBUFFERS_FINAL_CLASS1615   tflite::CoralSettings_::Performance performance() const {
1616     return static_cast<tflite::CoralSettings_::Performance>(GetField<int32_t>(VT_PERFORMANCE, 0));
1617   }
usb_always_dfuFLATBUFFERS_FINAL_CLASS1618   bool usb_always_dfu() const {
1619     return GetField<uint8_t>(VT_USB_ALWAYS_DFU, 0) != 0;
1620   }
usb_max_bulk_in_queue_lengthFLATBUFFERS_FINAL_CLASS1621   int32_t usb_max_bulk_in_queue_length() const {
1622     return GetField<int32_t>(VT_USB_MAX_BULK_IN_QUEUE_LENGTH, 0);
1623   }
VerifyFLATBUFFERS_FINAL_CLASS1624   bool Verify(flatbuffers::Verifier &verifier) const {
1625     return VerifyTableStart(verifier) &&
1626            VerifyOffset(verifier, VT_DEVICE) &&
1627            verifier.VerifyString(device()) &&
1628            VerifyField<int32_t>(verifier, VT_PERFORMANCE) &&
1629            VerifyField<uint8_t>(verifier, VT_USB_ALWAYS_DFU) &&
1630            VerifyField<int32_t>(verifier, VT_USB_MAX_BULK_IN_QUEUE_LENGTH) &&
1631            verifier.EndTable();
1632   }
1633   CoralSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1634   void UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1635   static flatbuffers::Offset<CoralSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1636 };
1637 
1638 struct CoralSettingsBuilder {
1639   flatbuffers::FlatBufferBuilder &fbb_;
1640   flatbuffers::uoffset_t start_;
add_deviceCoralSettingsBuilder1641   void add_device(flatbuffers::Offset<flatbuffers::String> device) {
1642     fbb_.AddOffset(CoralSettings::VT_DEVICE, device);
1643   }
add_performanceCoralSettingsBuilder1644   void add_performance(tflite::CoralSettings_::Performance performance) {
1645     fbb_.AddElement<int32_t>(CoralSettings::VT_PERFORMANCE, static_cast<int32_t>(performance), 0);
1646   }
add_usb_always_dfuCoralSettingsBuilder1647   void add_usb_always_dfu(bool usb_always_dfu) {
1648     fbb_.AddElement<uint8_t>(CoralSettings::VT_USB_ALWAYS_DFU, static_cast<uint8_t>(usb_always_dfu), 0);
1649   }
add_usb_max_bulk_in_queue_lengthCoralSettingsBuilder1650   void add_usb_max_bulk_in_queue_length(int32_t usb_max_bulk_in_queue_length) {
1651     fbb_.AddElement<int32_t>(CoralSettings::VT_USB_MAX_BULK_IN_QUEUE_LENGTH, usb_max_bulk_in_queue_length, 0);
1652   }
CoralSettingsBuilderCoralSettingsBuilder1653   explicit CoralSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1654         : fbb_(_fbb) {
1655     start_ = fbb_.StartTable();
1656   }
1657   CoralSettingsBuilder &operator=(const CoralSettingsBuilder &);
FinishCoralSettingsBuilder1658   flatbuffers::Offset<CoralSettings> Finish() {
1659     const auto end = fbb_.EndTable(start_);
1660     auto o = flatbuffers::Offset<CoralSettings>(end);
1661     return o;
1662   }
1663 };
1664 
1665 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(
1666     flatbuffers::FlatBufferBuilder &_fbb,
1667     flatbuffers::Offset<flatbuffers::String> device = 0,
1668     tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1669     bool usb_always_dfu = false,
1670     int32_t usb_max_bulk_in_queue_length = 0) {
1671   CoralSettingsBuilder builder_(_fbb);
1672   builder_.add_usb_max_bulk_in_queue_length(usb_max_bulk_in_queue_length);
1673   builder_.add_performance(performance);
1674   builder_.add_device(device);
1675   builder_.add_usb_always_dfu(usb_always_dfu);
1676   return builder_.Finish();
1677 }
1678 
1679 inline flatbuffers::Offset<CoralSettings> CreateCoralSettingsDirect(
1680     flatbuffers::FlatBufferBuilder &_fbb,
1681     const char *device = nullptr,
1682     tflite::CoralSettings_::Performance performance = tflite::CoralSettings_::Performance_UNDEFINED,
1683     bool usb_always_dfu = false,
1684     int32_t usb_max_bulk_in_queue_length = 0) {
1685   auto device__ = device ? _fbb.CreateString(device) : 0;
1686   return tflite::CreateCoralSettings(
1687       _fbb,
1688       device__,
1689       performance,
1690       usb_always_dfu,
1691       usb_max_bulk_in_queue_length);
1692 }
1693 
1694 flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1695 
1696 struct CPUSettingsT : public flatbuffers::NativeTable {
1697   typedef CPUSettings TableType;
1698   int32_t num_threads;
CPUSettingsTCPUSettingsT1699   CPUSettingsT()
1700       : num_threads(-1) {
1701   }
1702 };
1703 
1704 struct CPUSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1705   typedef CPUSettingsT NativeTableType;
1706   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1707     VT_NUM_THREADS = 4
1708   };
num_threadsFLATBUFFERS_FINAL_CLASS1709   int32_t num_threads() const {
1710     return GetField<int32_t>(VT_NUM_THREADS, -1);
1711   }
VerifyFLATBUFFERS_FINAL_CLASS1712   bool Verify(flatbuffers::Verifier &verifier) const {
1713     return VerifyTableStart(verifier) &&
1714            VerifyField<int32_t>(verifier, VT_NUM_THREADS) &&
1715            verifier.EndTable();
1716   }
1717   CPUSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1718   void UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1719   static flatbuffers::Offset<CPUSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1720 };
1721 
1722 struct CPUSettingsBuilder {
1723   flatbuffers::FlatBufferBuilder &fbb_;
1724   flatbuffers::uoffset_t start_;
add_num_threadsCPUSettingsBuilder1725   void add_num_threads(int32_t num_threads) {
1726     fbb_.AddElement<int32_t>(CPUSettings::VT_NUM_THREADS, num_threads, -1);
1727   }
CPUSettingsBuilderCPUSettingsBuilder1728   explicit CPUSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1729         : fbb_(_fbb) {
1730     start_ = fbb_.StartTable();
1731   }
1732   CPUSettingsBuilder &operator=(const CPUSettingsBuilder &);
FinishCPUSettingsBuilder1733   flatbuffers::Offset<CPUSettings> Finish() {
1734     const auto end = fbb_.EndTable(start_);
1735     auto o = flatbuffers::Offset<CPUSettings>(end);
1736     return o;
1737   }
1738 };
1739 
1740 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(
1741     flatbuffers::FlatBufferBuilder &_fbb,
1742     int32_t num_threads = -1) {
1743   CPUSettingsBuilder builder_(_fbb);
1744   builder_.add_num_threads(num_threads);
1745   return builder_.Finish();
1746 }
1747 
1748 flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1749 
1750 struct TFLiteSettingsT : public flatbuffers::NativeTable {
1751   typedef TFLiteSettings TableType;
1752   tflite::Delegate delegate;
1753   std::unique_ptr<tflite::NNAPISettingsT> nnapi_settings;
1754   std::unique_ptr<tflite::GPUSettingsT> gpu_settings;
1755   std::unique_ptr<tflite::HexagonSettingsT> hexagon_settings;
1756   std::unique_ptr<tflite::XNNPackSettingsT> xnnpack_settings;
1757   std::unique_ptr<tflite::CPUSettingsT> cpu_settings;
1758   int32_t max_delegated_partitions;
1759   std::unique_ptr<tflite::EdgeTpuSettingsT> edgetpu_settings;
1760   std::unique_ptr<tflite::CoralSettingsT> coral_settings;
1761   std::unique_ptr<tflite::FallbackSettingsT> fallback_settings;
TFLiteSettingsTTFLiteSettingsT1762   TFLiteSettingsT()
1763       : delegate(tflite::Delegate_NONE),
1764         max_delegated_partitions(0) {
1765   }
1766 };
1767 
1768 struct TFLiteSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1769   typedef TFLiteSettingsT NativeTableType;
1770   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1771     VT_DELEGATE = 4,
1772     VT_NNAPI_SETTINGS = 6,
1773     VT_GPU_SETTINGS = 8,
1774     VT_HEXAGON_SETTINGS = 10,
1775     VT_XNNPACK_SETTINGS = 12,
1776     VT_CPU_SETTINGS = 14,
1777     VT_MAX_DELEGATED_PARTITIONS = 16,
1778     VT_EDGETPU_SETTINGS = 18,
1779     VT_CORAL_SETTINGS = 20,
1780     VT_FALLBACK_SETTINGS = 22
1781   };
delegateFLATBUFFERS_FINAL_CLASS1782   tflite::Delegate delegate() const {
1783     return static_cast<tflite::Delegate>(GetField<int32_t>(VT_DELEGATE, 0));
1784   }
nnapi_settingsFLATBUFFERS_FINAL_CLASS1785   const tflite::NNAPISettings *nnapi_settings() const {
1786     return GetPointer<const tflite::NNAPISettings *>(VT_NNAPI_SETTINGS);
1787   }
gpu_settingsFLATBUFFERS_FINAL_CLASS1788   const tflite::GPUSettings *gpu_settings() const {
1789     return GetPointer<const tflite::GPUSettings *>(VT_GPU_SETTINGS);
1790   }
hexagon_settingsFLATBUFFERS_FINAL_CLASS1791   const tflite::HexagonSettings *hexagon_settings() const {
1792     return GetPointer<const tflite::HexagonSettings *>(VT_HEXAGON_SETTINGS);
1793   }
xnnpack_settingsFLATBUFFERS_FINAL_CLASS1794   const tflite::XNNPackSettings *xnnpack_settings() const {
1795     return GetPointer<const tflite::XNNPackSettings *>(VT_XNNPACK_SETTINGS);
1796   }
cpu_settingsFLATBUFFERS_FINAL_CLASS1797   const tflite::CPUSettings *cpu_settings() const {
1798     return GetPointer<const tflite::CPUSettings *>(VT_CPU_SETTINGS);
1799   }
max_delegated_partitionsFLATBUFFERS_FINAL_CLASS1800   int32_t max_delegated_partitions() const {
1801     return GetField<int32_t>(VT_MAX_DELEGATED_PARTITIONS, 0);
1802   }
edgetpu_settingsFLATBUFFERS_FINAL_CLASS1803   const tflite::EdgeTpuSettings *edgetpu_settings() const {
1804     return GetPointer<const tflite::EdgeTpuSettings *>(VT_EDGETPU_SETTINGS);
1805   }
coral_settingsFLATBUFFERS_FINAL_CLASS1806   const tflite::CoralSettings *coral_settings() const {
1807     return GetPointer<const tflite::CoralSettings *>(VT_CORAL_SETTINGS);
1808   }
fallback_settingsFLATBUFFERS_FINAL_CLASS1809   const tflite::FallbackSettings *fallback_settings() const {
1810     return GetPointer<const tflite::FallbackSettings *>(VT_FALLBACK_SETTINGS);
1811   }
VerifyFLATBUFFERS_FINAL_CLASS1812   bool Verify(flatbuffers::Verifier &verifier) const {
1813     return VerifyTableStart(verifier) &&
1814            VerifyField<int32_t>(verifier, VT_DELEGATE) &&
1815            VerifyOffset(verifier, VT_NNAPI_SETTINGS) &&
1816            verifier.VerifyTable(nnapi_settings()) &&
1817            VerifyOffset(verifier, VT_GPU_SETTINGS) &&
1818            verifier.VerifyTable(gpu_settings()) &&
1819            VerifyOffset(verifier, VT_HEXAGON_SETTINGS) &&
1820            verifier.VerifyTable(hexagon_settings()) &&
1821            VerifyOffset(verifier, VT_XNNPACK_SETTINGS) &&
1822            verifier.VerifyTable(xnnpack_settings()) &&
1823            VerifyOffset(verifier, VT_CPU_SETTINGS) &&
1824            verifier.VerifyTable(cpu_settings()) &&
1825            VerifyField<int32_t>(verifier, VT_MAX_DELEGATED_PARTITIONS) &&
1826            VerifyOffset(verifier, VT_EDGETPU_SETTINGS) &&
1827            verifier.VerifyTable(edgetpu_settings()) &&
1828            VerifyOffset(verifier, VT_CORAL_SETTINGS) &&
1829            verifier.VerifyTable(coral_settings()) &&
1830            VerifyOffset(verifier, VT_FALLBACK_SETTINGS) &&
1831            verifier.VerifyTable(fallback_settings()) &&
1832            verifier.EndTable();
1833   }
1834   TFLiteSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1835   void UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1836   static flatbuffers::Offset<TFLiteSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1837 };
1838 
1839 struct TFLiteSettingsBuilder {
1840   flatbuffers::FlatBufferBuilder &fbb_;
1841   flatbuffers::uoffset_t start_;
add_delegateTFLiteSettingsBuilder1842   void add_delegate(tflite::Delegate delegate) {
1843     fbb_.AddElement<int32_t>(TFLiteSettings::VT_DELEGATE, static_cast<int32_t>(delegate), 0);
1844   }
add_nnapi_settingsTFLiteSettingsBuilder1845   void add_nnapi_settings(flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings) {
1846     fbb_.AddOffset(TFLiteSettings::VT_NNAPI_SETTINGS, nnapi_settings);
1847   }
add_gpu_settingsTFLiteSettingsBuilder1848   void add_gpu_settings(flatbuffers::Offset<tflite::GPUSettings> gpu_settings) {
1849     fbb_.AddOffset(TFLiteSettings::VT_GPU_SETTINGS, gpu_settings);
1850   }
add_hexagon_settingsTFLiteSettingsBuilder1851   void add_hexagon_settings(flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings) {
1852     fbb_.AddOffset(TFLiteSettings::VT_HEXAGON_SETTINGS, hexagon_settings);
1853   }
add_xnnpack_settingsTFLiteSettingsBuilder1854   void add_xnnpack_settings(flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings) {
1855     fbb_.AddOffset(TFLiteSettings::VT_XNNPACK_SETTINGS, xnnpack_settings);
1856   }
add_cpu_settingsTFLiteSettingsBuilder1857   void add_cpu_settings(flatbuffers::Offset<tflite::CPUSettings> cpu_settings) {
1858     fbb_.AddOffset(TFLiteSettings::VT_CPU_SETTINGS, cpu_settings);
1859   }
add_max_delegated_partitionsTFLiteSettingsBuilder1860   void add_max_delegated_partitions(int32_t max_delegated_partitions) {
1861     fbb_.AddElement<int32_t>(TFLiteSettings::VT_MAX_DELEGATED_PARTITIONS, max_delegated_partitions, 0);
1862   }
add_edgetpu_settingsTFLiteSettingsBuilder1863   void add_edgetpu_settings(flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings) {
1864     fbb_.AddOffset(TFLiteSettings::VT_EDGETPU_SETTINGS, edgetpu_settings);
1865   }
add_coral_settingsTFLiteSettingsBuilder1866   void add_coral_settings(flatbuffers::Offset<tflite::CoralSettings> coral_settings) {
1867     fbb_.AddOffset(TFLiteSettings::VT_CORAL_SETTINGS, coral_settings);
1868   }
add_fallback_settingsTFLiteSettingsBuilder1869   void add_fallback_settings(flatbuffers::Offset<tflite::FallbackSettings> fallback_settings) {
1870     fbb_.AddOffset(TFLiteSettings::VT_FALLBACK_SETTINGS, fallback_settings);
1871   }
TFLiteSettingsBuilderTFLiteSettingsBuilder1872   explicit TFLiteSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1873         : fbb_(_fbb) {
1874     start_ = fbb_.StartTable();
1875   }
1876   TFLiteSettingsBuilder &operator=(const TFLiteSettingsBuilder &);
FinishTFLiteSettingsBuilder1877   flatbuffers::Offset<TFLiteSettings> Finish() {
1878     const auto end = fbb_.EndTable(start_);
1879     auto o = flatbuffers::Offset<TFLiteSettings>(end);
1880     return o;
1881   }
1882 };
1883 
1884 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(
1885     flatbuffers::FlatBufferBuilder &_fbb,
1886     tflite::Delegate delegate = tflite::Delegate_NONE,
1887     flatbuffers::Offset<tflite::NNAPISettings> nnapi_settings = 0,
1888     flatbuffers::Offset<tflite::GPUSettings> gpu_settings = 0,
1889     flatbuffers::Offset<tflite::HexagonSettings> hexagon_settings = 0,
1890     flatbuffers::Offset<tflite::XNNPackSettings> xnnpack_settings = 0,
1891     flatbuffers::Offset<tflite::CPUSettings> cpu_settings = 0,
1892     int32_t max_delegated_partitions = 0,
1893     flatbuffers::Offset<tflite::EdgeTpuSettings> edgetpu_settings = 0,
1894     flatbuffers::Offset<tflite::CoralSettings> coral_settings = 0,
1895     flatbuffers::Offset<tflite::FallbackSettings> fallback_settings = 0) {
1896   TFLiteSettingsBuilder builder_(_fbb);
1897   builder_.add_fallback_settings(fallback_settings);
1898   builder_.add_coral_settings(coral_settings);
1899   builder_.add_edgetpu_settings(edgetpu_settings);
1900   builder_.add_max_delegated_partitions(max_delegated_partitions);
1901   builder_.add_cpu_settings(cpu_settings);
1902   builder_.add_xnnpack_settings(xnnpack_settings);
1903   builder_.add_hexagon_settings(hexagon_settings);
1904   builder_.add_gpu_settings(gpu_settings);
1905   builder_.add_nnapi_settings(nnapi_settings);
1906   builder_.add_delegate(delegate);
1907   return builder_.Finish();
1908 }
1909 
1910 flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1911 
1912 struct FallbackSettingsT : public flatbuffers::NativeTable {
1913   typedef FallbackSettings TableType;
1914   bool allow_automatic_fallback_on_compilation_error;
1915   bool allow_automatic_fallback_on_execution_error;
FallbackSettingsTFallbackSettingsT1916   FallbackSettingsT()
1917       : allow_automatic_fallback_on_compilation_error(false),
1918         allow_automatic_fallback_on_execution_error(false) {
1919   }
1920 };
1921 
1922 struct FallbackSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1923   typedef FallbackSettingsT NativeTableType;
1924   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1925     VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR = 4,
1926     VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR = 6
1927   };
allow_automatic_fallback_on_compilation_errorFLATBUFFERS_FINAL_CLASS1928   bool allow_automatic_fallback_on_compilation_error() const {
1929     return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, 0) != 0;
1930   }
allow_automatic_fallback_on_execution_errorFLATBUFFERS_FINAL_CLASS1931   bool allow_automatic_fallback_on_execution_error() const {
1932     return GetField<uint8_t>(VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, 0) != 0;
1933   }
VerifyFLATBUFFERS_FINAL_CLASS1934   bool Verify(flatbuffers::Verifier &verifier) const {
1935     return VerifyTableStart(verifier) &&
1936            VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR) &&
1937            VerifyField<uint8_t>(verifier, VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR) &&
1938            verifier.EndTable();
1939   }
1940   FallbackSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1941   void UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
1942   static flatbuffers::Offset<FallbackSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1943 };
1944 
1945 struct FallbackSettingsBuilder {
1946   flatbuffers::FlatBufferBuilder &fbb_;
1947   flatbuffers::uoffset_t start_;
add_allow_automatic_fallback_on_compilation_errorFallbackSettingsBuilder1948   void add_allow_automatic_fallback_on_compilation_error(bool allow_automatic_fallback_on_compilation_error) {
1949     fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_COMPILATION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_compilation_error), 0);
1950   }
add_allow_automatic_fallback_on_execution_errorFallbackSettingsBuilder1951   void add_allow_automatic_fallback_on_execution_error(bool allow_automatic_fallback_on_execution_error) {
1952     fbb_.AddElement<uint8_t>(FallbackSettings::VT_ALLOW_AUTOMATIC_FALLBACK_ON_EXECUTION_ERROR, static_cast<uint8_t>(allow_automatic_fallback_on_execution_error), 0);
1953   }
FallbackSettingsBuilderFallbackSettingsBuilder1954   explicit FallbackSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
1955         : fbb_(_fbb) {
1956     start_ = fbb_.StartTable();
1957   }
1958   FallbackSettingsBuilder &operator=(const FallbackSettingsBuilder &);
FinishFallbackSettingsBuilder1959   flatbuffers::Offset<FallbackSettings> Finish() {
1960     const auto end = fbb_.EndTable(start_);
1961     auto o = flatbuffers::Offset<FallbackSettings>(end);
1962     return o;
1963   }
1964 };
1965 
1966 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(
1967     flatbuffers::FlatBufferBuilder &_fbb,
1968     bool allow_automatic_fallback_on_compilation_error = false,
1969     bool allow_automatic_fallback_on_execution_error = false) {
1970   FallbackSettingsBuilder builder_(_fbb);
1971   builder_.add_allow_automatic_fallback_on_execution_error(allow_automatic_fallback_on_execution_error);
1972   builder_.add_allow_automatic_fallback_on_compilation_error(allow_automatic_fallback_on_compilation_error);
1973   return builder_.Finish();
1974 }
1975 
1976 flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
1977 
1978 struct BenchmarkMetricT : public flatbuffers::NativeTable {
1979   typedef BenchmarkMetric TableType;
1980   std::string name;
1981   std::vector<float> values;
BenchmarkMetricTBenchmarkMetricT1982   BenchmarkMetricT() {
1983   }
1984 };
1985 
1986 struct BenchmarkMetric FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
1987   typedef BenchmarkMetricT NativeTableType;
1988   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
1989     VT_NAME = 4,
1990     VT_VALUES = 6
1991   };
nameFLATBUFFERS_FINAL_CLASS1992   const flatbuffers::String *name() const {
1993     return GetPointer<const flatbuffers::String *>(VT_NAME);
1994   }
valuesFLATBUFFERS_FINAL_CLASS1995   const flatbuffers::Vector<float> *values() const {
1996     return GetPointer<const flatbuffers::Vector<float> *>(VT_VALUES);
1997   }
VerifyFLATBUFFERS_FINAL_CLASS1998   bool Verify(flatbuffers::Verifier &verifier) const {
1999     return VerifyTableStart(verifier) &&
2000            VerifyOffset(verifier, VT_NAME) &&
2001            verifier.VerifyString(name()) &&
2002            VerifyOffset(verifier, VT_VALUES) &&
2003            verifier.VerifyVector(values()) &&
2004            verifier.EndTable();
2005   }
2006   BenchmarkMetricT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2007   void UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2008   static flatbuffers::Offset<BenchmarkMetric> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2009 };
2010 
2011 struct BenchmarkMetricBuilder {
2012   flatbuffers::FlatBufferBuilder &fbb_;
2013   flatbuffers::uoffset_t start_;
add_nameBenchmarkMetricBuilder2014   void add_name(flatbuffers::Offset<flatbuffers::String> name) {
2015     fbb_.AddOffset(BenchmarkMetric::VT_NAME, name);
2016   }
add_valuesBenchmarkMetricBuilder2017   void add_values(flatbuffers::Offset<flatbuffers::Vector<float>> values) {
2018     fbb_.AddOffset(BenchmarkMetric::VT_VALUES, values);
2019   }
BenchmarkMetricBuilderBenchmarkMetricBuilder2020   explicit BenchmarkMetricBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2021         : fbb_(_fbb) {
2022     start_ = fbb_.StartTable();
2023   }
2024   BenchmarkMetricBuilder &operator=(const BenchmarkMetricBuilder &);
FinishBenchmarkMetricBuilder2025   flatbuffers::Offset<BenchmarkMetric> Finish() {
2026     const auto end = fbb_.EndTable(start_);
2027     auto o = flatbuffers::Offset<BenchmarkMetric>(end);
2028     return o;
2029   }
2030 };
2031 
2032 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(
2033     flatbuffers::FlatBufferBuilder &_fbb,
2034     flatbuffers::Offset<flatbuffers::String> name = 0,
2035     flatbuffers::Offset<flatbuffers::Vector<float>> values = 0) {
2036   BenchmarkMetricBuilder builder_(_fbb);
2037   builder_.add_values(values);
2038   builder_.add_name(name);
2039   return builder_.Finish();
2040 }
2041 
2042 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetricDirect(
2043     flatbuffers::FlatBufferBuilder &_fbb,
2044     const char *name = nullptr,
2045     const std::vector<float> *values = nullptr) {
2046   auto name__ = name ? _fbb.CreateString(name) : 0;
2047   auto values__ = values ? _fbb.CreateVector<float>(*values) : 0;
2048   return tflite::CreateBenchmarkMetric(
2049       _fbb,
2050       name__,
2051       values__);
2052 }
2053 
2054 flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2055 
2056 struct BenchmarkResultT : public flatbuffers::NativeTable {
2057   typedef BenchmarkResult TableType;
2058   std::vector<int64_t> initialization_time_us;
2059   std::vector<int64_t> inference_time_us;
2060   int32_t max_memory_kb;
2061   bool ok;
2062   std::vector<std::unique_ptr<tflite::BenchmarkMetricT>> metrics;
BenchmarkResultTBenchmarkResultT2063   BenchmarkResultT()
2064       : max_memory_kb(0),
2065         ok(false) {
2066   }
2067 };
2068 
2069 struct BenchmarkResult FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2070   typedef BenchmarkResultT NativeTableType;
2071   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2072     VT_INITIALIZATION_TIME_US = 4,
2073     VT_INFERENCE_TIME_US = 6,
2074     VT_MAX_MEMORY_KB = 8,
2075     VT_OK = 10,
2076     VT_METRICS = 12
2077   };
initialization_time_usFLATBUFFERS_FINAL_CLASS2078   const flatbuffers::Vector<int64_t> *initialization_time_us() const {
2079     return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INITIALIZATION_TIME_US);
2080   }
inference_time_usFLATBUFFERS_FINAL_CLASS2081   const flatbuffers::Vector<int64_t> *inference_time_us() const {
2082     return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INFERENCE_TIME_US);
2083   }
max_memory_kbFLATBUFFERS_FINAL_CLASS2084   int32_t max_memory_kb() const {
2085     return GetField<int32_t>(VT_MAX_MEMORY_KB, 0);
2086   }
okFLATBUFFERS_FINAL_CLASS2087   bool ok() const {
2088     return GetField<uint8_t>(VT_OK, 0) != 0;
2089   }
metricsFLATBUFFERS_FINAL_CLASS2090   const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics() const {
2091     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *>(VT_METRICS);
2092   }
VerifyFLATBUFFERS_FINAL_CLASS2093   bool Verify(flatbuffers::Verifier &verifier) const {
2094     return VerifyTableStart(verifier) &&
2095            VerifyOffset(verifier, VT_INITIALIZATION_TIME_US) &&
2096            verifier.VerifyVector(initialization_time_us()) &&
2097            VerifyOffset(verifier, VT_INFERENCE_TIME_US) &&
2098            verifier.VerifyVector(inference_time_us()) &&
2099            VerifyField<int32_t>(verifier, VT_MAX_MEMORY_KB) &&
2100            VerifyField<uint8_t>(verifier, VT_OK) &&
2101            VerifyOffset(verifier, VT_METRICS) &&
2102            verifier.VerifyVector(metrics()) &&
2103            verifier.VerifyVectorOfTables(metrics()) &&
2104            verifier.EndTable();
2105   }
2106   BenchmarkResultT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2107   void UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2108   static flatbuffers::Offset<BenchmarkResult> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2109 };
2110 
2111 struct BenchmarkResultBuilder {
2112   flatbuffers::FlatBufferBuilder &fbb_;
2113   flatbuffers::uoffset_t start_;
add_initialization_time_usBenchmarkResultBuilder2114   void add_initialization_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us) {
2115     fbb_.AddOffset(BenchmarkResult::VT_INITIALIZATION_TIME_US, initialization_time_us);
2116   }
add_inference_time_usBenchmarkResultBuilder2117   void add_inference_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us) {
2118     fbb_.AddOffset(BenchmarkResult::VT_INFERENCE_TIME_US, inference_time_us);
2119   }
add_max_memory_kbBenchmarkResultBuilder2120   void add_max_memory_kb(int32_t max_memory_kb) {
2121     fbb_.AddElement<int32_t>(BenchmarkResult::VT_MAX_MEMORY_KB, max_memory_kb, 0);
2122   }
add_okBenchmarkResultBuilder2123   void add_ok(bool ok) {
2124     fbb_.AddElement<uint8_t>(BenchmarkResult::VT_OK, static_cast<uint8_t>(ok), 0);
2125   }
add_metricsBenchmarkResultBuilder2126   void add_metrics(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics) {
2127     fbb_.AddOffset(BenchmarkResult::VT_METRICS, metrics);
2128   }
BenchmarkResultBuilderBenchmarkResultBuilder2129   explicit BenchmarkResultBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2130         : fbb_(_fbb) {
2131     start_ = fbb_.StartTable();
2132   }
2133   BenchmarkResultBuilder &operator=(const BenchmarkResultBuilder &);
FinishBenchmarkResultBuilder2134   flatbuffers::Offset<BenchmarkResult> Finish() {
2135     const auto end = fbb_.EndTable(start_);
2136     auto o = flatbuffers::Offset<BenchmarkResult>(end);
2137     return o;
2138   }
2139 };
2140 
2141 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(
2142     flatbuffers::FlatBufferBuilder &_fbb,
2143     flatbuffers::Offset<flatbuffers::Vector<int64_t>> initialization_time_us = 0,
2144     flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us = 0,
2145     int32_t max_memory_kb = 0,
2146     bool ok = false,
2147     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::BenchmarkMetric>>> metrics = 0) {
2148   BenchmarkResultBuilder builder_(_fbb);
2149   builder_.add_metrics(metrics);
2150   builder_.add_max_memory_kb(max_memory_kb);
2151   builder_.add_inference_time_us(inference_time_us);
2152   builder_.add_initialization_time_us(initialization_time_us);
2153   builder_.add_ok(ok);
2154   return builder_.Finish();
2155 }
2156 
2157 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResultDirect(
2158     flatbuffers::FlatBufferBuilder &_fbb,
2159     const std::vector<int64_t> *initialization_time_us = nullptr,
2160     const std::vector<int64_t> *inference_time_us = nullptr,
2161     int32_t max_memory_kb = 0,
2162     bool ok = false,
2163     const std::vector<flatbuffers::Offset<tflite::BenchmarkMetric>> *metrics = nullptr) {
2164   auto initialization_time_us__ = initialization_time_us ? _fbb.CreateVector<int64_t>(*initialization_time_us) : 0;
2165   auto inference_time_us__ = inference_time_us ? _fbb.CreateVector<int64_t>(*inference_time_us) : 0;
2166   auto metrics__ = metrics ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>>(*metrics) : 0;
2167   return tflite::CreateBenchmarkResult(
2168       _fbb,
2169       initialization_time_us__,
2170       inference_time_us__,
2171       max_memory_kb,
2172       ok,
2173       metrics__);
2174 }
2175 
2176 flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2177 
2178 struct ErrorCodeT : public flatbuffers::NativeTable {
2179   typedef ErrorCode TableType;
2180   tflite::Delegate source;
2181   int32_t tflite_error;
2182   int64_t underlying_api_error;
ErrorCodeTErrorCodeT2183   ErrorCodeT()
2184       : source(tflite::Delegate_NONE),
2185         tflite_error(0),
2186         underlying_api_error(0) {
2187   }
2188 };
2189 
2190 struct ErrorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2191   typedef ErrorCodeT NativeTableType;
2192   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2193     VT_SOURCE = 4,
2194     VT_TFLITE_ERROR = 6,
2195     VT_UNDERLYING_API_ERROR = 8
2196   };
sourceFLATBUFFERS_FINAL_CLASS2197   tflite::Delegate source() const {
2198     return static_cast<tflite::Delegate>(GetField<int32_t>(VT_SOURCE, 0));
2199   }
tflite_errorFLATBUFFERS_FINAL_CLASS2200   int32_t tflite_error() const {
2201     return GetField<int32_t>(VT_TFLITE_ERROR, 0);
2202   }
underlying_api_errorFLATBUFFERS_FINAL_CLASS2203   int64_t underlying_api_error() const {
2204     return GetField<int64_t>(VT_UNDERLYING_API_ERROR, 0);
2205   }
VerifyFLATBUFFERS_FINAL_CLASS2206   bool Verify(flatbuffers::Verifier &verifier) const {
2207     return VerifyTableStart(verifier) &&
2208            VerifyField<int32_t>(verifier, VT_SOURCE) &&
2209            VerifyField<int32_t>(verifier, VT_TFLITE_ERROR) &&
2210            VerifyField<int64_t>(verifier, VT_UNDERLYING_API_ERROR) &&
2211            verifier.EndTable();
2212   }
2213   ErrorCodeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2214   void UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2215   static flatbuffers::Offset<ErrorCode> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2216 };
2217 
2218 struct ErrorCodeBuilder {
2219   flatbuffers::FlatBufferBuilder &fbb_;
2220   flatbuffers::uoffset_t start_;
add_sourceErrorCodeBuilder2221   void add_source(tflite::Delegate source) {
2222     fbb_.AddElement<int32_t>(ErrorCode::VT_SOURCE, static_cast<int32_t>(source), 0);
2223   }
add_tflite_errorErrorCodeBuilder2224   void add_tflite_error(int32_t tflite_error) {
2225     fbb_.AddElement<int32_t>(ErrorCode::VT_TFLITE_ERROR, tflite_error, 0);
2226   }
add_underlying_api_errorErrorCodeBuilder2227   void add_underlying_api_error(int64_t underlying_api_error) {
2228     fbb_.AddElement<int64_t>(ErrorCode::VT_UNDERLYING_API_ERROR, underlying_api_error, 0);
2229   }
ErrorCodeBuilderErrorCodeBuilder2230   explicit ErrorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2231         : fbb_(_fbb) {
2232     start_ = fbb_.StartTable();
2233   }
2234   ErrorCodeBuilder &operator=(const ErrorCodeBuilder &);
FinishErrorCodeBuilder2235   flatbuffers::Offset<ErrorCode> Finish() {
2236     const auto end = fbb_.EndTable(start_);
2237     auto o = flatbuffers::Offset<ErrorCode>(end);
2238     return o;
2239   }
2240 };
2241 
2242 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(
2243     flatbuffers::FlatBufferBuilder &_fbb,
2244     tflite::Delegate source = tflite::Delegate_NONE,
2245     int32_t tflite_error = 0,
2246     int64_t underlying_api_error = 0) {
2247   ErrorCodeBuilder builder_(_fbb);
2248   builder_.add_underlying_api_error(underlying_api_error);
2249   builder_.add_tflite_error(tflite_error);
2250   builder_.add_source(source);
2251   return builder_.Finish();
2252 }
2253 
2254 flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2255 
2256 struct BenchmarkErrorT : public flatbuffers::NativeTable {
2257   typedef BenchmarkError TableType;
2258   tflite::BenchmarkStage stage;
2259   int32_t exit_code;
2260   int32_t signal;
2261   std::vector<std::unique_ptr<tflite::ErrorCodeT>> error_code;
2262   int32_t mini_benchmark_error_code;
BenchmarkErrorTBenchmarkErrorT2263   BenchmarkErrorT()
2264       : stage(tflite::BenchmarkStage_UNKNOWN),
2265         exit_code(0),
2266         signal(0),
2267         mini_benchmark_error_code(0) {
2268   }
2269 };
2270 
2271 struct BenchmarkError FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2272   typedef BenchmarkErrorT NativeTableType;
2273   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2274     VT_STAGE = 4,
2275     VT_EXIT_CODE = 6,
2276     VT_SIGNAL = 8,
2277     VT_ERROR_CODE = 10,
2278     VT_MINI_BENCHMARK_ERROR_CODE = 12
2279   };
stageFLATBUFFERS_FINAL_CLASS2280   tflite::BenchmarkStage stage() const {
2281     return static_cast<tflite::BenchmarkStage>(GetField<int32_t>(VT_STAGE, 0));
2282   }
exit_codeFLATBUFFERS_FINAL_CLASS2283   int32_t exit_code() const {
2284     return GetField<int32_t>(VT_EXIT_CODE, 0);
2285   }
signalFLATBUFFERS_FINAL_CLASS2286   int32_t signal() const {
2287     return GetField<int32_t>(VT_SIGNAL, 0);
2288   }
error_codeFLATBUFFERS_FINAL_CLASS2289   const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code() const {
2290     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>> *>(VT_ERROR_CODE);
2291   }
mini_benchmark_error_codeFLATBUFFERS_FINAL_CLASS2292   int32_t mini_benchmark_error_code() const {
2293     return GetField<int32_t>(VT_MINI_BENCHMARK_ERROR_CODE, 0);
2294   }
VerifyFLATBUFFERS_FINAL_CLASS2295   bool Verify(flatbuffers::Verifier &verifier) const {
2296     return VerifyTableStart(verifier) &&
2297            VerifyField<int32_t>(verifier, VT_STAGE) &&
2298            VerifyField<int32_t>(verifier, VT_EXIT_CODE) &&
2299            VerifyField<int32_t>(verifier, VT_SIGNAL) &&
2300            VerifyOffset(verifier, VT_ERROR_CODE) &&
2301            verifier.VerifyVector(error_code()) &&
2302            verifier.VerifyVectorOfTables(error_code()) &&
2303            VerifyField<int32_t>(verifier, VT_MINI_BENCHMARK_ERROR_CODE) &&
2304            verifier.EndTable();
2305   }
2306   BenchmarkErrorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2307   void UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2308   static flatbuffers::Offset<BenchmarkError> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2309 };
2310 
2311 struct BenchmarkErrorBuilder {
2312   flatbuffers::FlatBufferBuilder &fbb_;
2313   flatbuffers::uoffset_t start_;
add_stageBenchmarkErrorBuilder2314   void add_stage(tflite::BenchmarkStage stage) {
2315     fbb_.AddElement<int32_t>(BenchmarkError::VT_STAGE, static_cast<int32_t>(stage), 0);
2316   }
add_exit_codeBenchmarkErrorBuilder2317   void add_exit_code(int32_t exit_code) {
2318     fbb_.AddElement<int32_t>(BenchmarkError::VT_EXIT_CODE, exit_code, 0);
2319   }
add_signalBenchmarkErrorBuilder2320   void add_signal(int32_t signal) {
2321     fbb_.AddElement<int32_t>(BenchmarkError::VT_SIGNAL, signal, 0);
2322   }
add_error_codeBenchmarkErrorBuilder2323   void add_error_code(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code) {
2324     fbb_.AddOffset(BenchmarkError::VT_ERROR_CODE, error_code);
2325   }
add_mini_benchmark_error_codeBenchmarkErrorBuilder2326   void add_mini_benchmark_error_code(int32_t mini_benchmark_error_code) {
2327     fbb_.AddElement<int32_t>(BenchmarkError::VT_MINI_BENCHMARK_ERROR_CODE, mini_benchmark_error_code, 0);
2328   }
BenchmarkErrorBuilderBenchmarkErrorBuilder2329   explicit BenchmarkErrorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2330         : fbb_(_fbb) {
2331     start_ = fbb_.StartTable();
2332   }
2333   BenchmarkErrorBuilder &operator=(const BenchmarkErrorBuilder &);
FinishBenchmarkErrorBuilder2334   flatbuffers::Offset<BenchmarkError> Finish() {
2335     const auto end = fbb_.EndTable(start_);
2336     auto o = flatbuffers::Offset<BenchmarkError>(end);
2337     return o;
2338   }
2339 };
2340 
2341 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(
2342     flatbuffers::FlatBufferBuilder &_fbb,
2343     tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2344     int32_t exit_code = 0,
2345     int32_t signal = 0,
2346     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::ErrorCode>>> error_code = 0,
2347     int32_t mini_benchmark_error_code = 0) {
2348   BenchmarkErrorBuilder builder_(_fbb);
2349   builder_.add_mini_benchmark_error_code(mini_benchmark_error_code);
2350   builder_.add_error_code(error_code);
2351   builder_.add_signal(signal);
2352   builder_.add_exit_code(exit_code);
2353   builder_.add_stage(stage);
2354   return builder_.Finish();
2355 }
2356 
2357 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkErrorDirect(
2358     flatbuffers::FlatBufferBuilder &_fbb,
2359     tflite::BenchmarkStage stage = tflite::BenchmarkStage_UNKNOWN,
2360     int32_t exit_code = 0,
2361     int32_t signal = 0,
2362     const std::vector<flatbuffers::Offset<tflite::ErrorCode>> *error_code = nullptr,
2363     int32_t mini_benchmark_error_code = 0) {
2364   auto error_code__ = error_code ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>>(*error_code) : 0;
2365   return tflite::CreateBenchmarkError(
2366       _fbb,
2367       stage,
2368       exit_code,
2369       signal,
2370       error_code__,
2371       mini_benchmark_error_code);
2372 }
2373 
2374 flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2375 
2376 struct BenchmarkEventT : public flatbuffers::NativeTable {
2377   typedef BenchmarkEvent TableType;
2378   std::unique_ptr<tflite::TFLiteSettingsT> tflite_settings;
2379   tflite::BenchmarkEventType event_type;
2380   std::unique_ptr<tflite::BenchmarkResultT> result;
2381   std::unique_ptr<tflite::BenchmarkErrorT> error;
2382   int64_t boottime_us;
2383   int64_t wallclock_us;
BenchmarkEventTBenchmarkEventT2384   BenchmarkEventT()
2385       : event_type(tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE),
2386         boottime_us(0),
2387         wallclock_us(0) {
2388   }
2389 };
2390 
2391 struct BenchmarkEvent FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2392   typedef BenchmarkEventT NativeTableType;
2393   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2394     VT_TFLITE_SETTINGS = 4,
2395     VT_EVENT_TYPE = 6,
2396     VT_RESULT = 8,
2397     VT_ERROR = 10,
2398     VT_BOOTTIME_US = 12,
2399     VT_WALLCLOCK_US = 14
2400   };
tflite_settingsFLATBUFFERS_FINAL_CLASS2401   const tflite::TFLiteSettings *tflite_settings() const {
2402     return GetPointer<const tflite::TFLiteSettings *>(VT_TFLITE_SETTINGS);
2403   }
event_typeFLATBUFFERS_FINAL_CLASS2404   tflite::BenchmarkEventType event_type() const {
2405     return static_cast<tflite::BenchmarkEventType>(GetField<int32_t>(VT_EVENT_TYPE, 0));
2406   }
resultFLATBUFFERS_FINAL_CLASS2407   const tflite::BenchmarkResult *result() const {
2408     return GetPointer<const tflite::BenchmarkResult *>(VT_RESULT);
2409   }
errorFLATBUFFERS_FINAL_CLASS2410   const tflite::BenchmarkError *error() const {
2411     return GetPointer<const tflite::BenchmarkError *>(VT_ERROR);
2412   }
boottime_usFLATBUFFERS_FINAL_CLASS2413   int64_t boottime_us() const {
2414     return GetField<int64_t>(VT_BOOTTIME_US, 0);
2415   }
wallclock_usFLATBUFFERS_FINAL_CLASS2416   int64_t wallclock_us() const {
2417     return GetField<int64_t>(VT_WALLCLOCK_US, 0);
2418   }
VerifyFLATBUFFERS_FINAL_CLASS2419   bool Verify(flatbuffers::Verifier &verifier) const {
2420     return VerifyTableStart(verifier) &&
2421            VerifyOffset(verifier, VT_TFLITE_SETTINGS) &&
2422            verifier.VerifyTable(tflite_settings()) &&
2423            VerifyField<int32_t>(verifier, VT_EVENT_TYPE) &&
2424            VerifyOffset(verifier, VT_RESULT) &&
2425            verifier.VerifyTable(result()) &&
2426            VerifyOffset(verifier, VT_ERROR) &&
2427            verifier.VerifyTable(error()) &&
2428            VerifyField<int64_t>(verifier, VT_BOOTTIME_US) &&
2429            VerifyField<int64_t>(verifier, VT_WALLCLOCK_US) &&
2430            verifier.EndTable();
2431   }
2432   BenchmarkEventT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2433   void UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2434   static flatbuffers::Offset<BenchmarkEvent> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2435 };
2436 
2437 struct BenchmarkEventBuilder {
2438   flatbuffers::FlatBufferBuilder &fbb_;
2439   flatbuffers::uoffset_t start_;
add_tflite_settingsBenchmarkEventBuilder2440   void add_tflite_settings(flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings) {
2441     fbb_.AddOffset(BenchmarkEvent::VT_TFLITE_SETTINGS, tflite_settings);
2442   }
add_event_typeBenchmarkEventBuilder2443   void add_event_type(tflite::BenchmarkEventType event_type) {
2444     fbb_.AddElement<int32_t>(BenchmarkEvent::VT_EVENT_TYPE, static_cast<int32_t>(event_type), 0);
2445   }
add_resultBenchmarkEventBuilder2446   void add_result(flatbuffers::Offset<tflite::BenchmarkResult> result) {
2447     fbb_.AddOffset(BenchmarkEvent::VT_RESULT, result);
2448   }
add_errorBenchmarkEventBuilder2449   void add_error(flatbuffers::Offset<tflite::BenchmarkError> error) {
2450     fbb_.AddOffset(BenchmarkEvent::VT_ERROR, error);
2451   }
add_boottime_usBenchmarkEventBuilder2452   void add_boottime_us(int64_t boottime_us) {
2453     fbb_.AddElement<int64_t>(BenchmarkEvent::VT_BOOTTIME_US, boottime_us, 0);
2454   }
add_wallclock_usBenchmarkEventBuilder2455   void add_wallclock_us(int64_t wallclock_us) {
2456     fbb_.AddElement<int64_t>(BenchmarkEvent::VT_WALLCLOCK_US, wallclock_us, 0);
2457   }
BenchmarkEventBuilderBenchmarkEventBuilder2458   explicit BenchmarkEventBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2459         : fbb_(_fbb) {
2460     start_ = fbb_.StartTable();
2461   }
2462   BenchmarkEventBuilder &operator=(const BenchmarkEventBuilder &);
FinishBenchmarkEventBuilder2463   flatbuffers::Offset<BenchmarkEvent> Finish() {
2464     const auto end = fbb_.EndTable(start_);
2465     auto o = flatbuffers::Offset<BenchmarkEvent>(end);
2466     return o;
2467   }
2468 };
2469 
2470 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(
2471     flatbuffers::FlatBufferBuilder &_fbb,
2472     flatbuffers::Offset<tflite::TFLiteSettings> tflite_settings = 0,
2473     tflite::BenchmarkEventType event_type = tflite::BenchmarkEventType_UNDEFINED_BENCHMARK_EVENT_TYPE,
2474     flatbuffers::Offset<tflite::BenchmarkResult> result = 0,
2475     flatbuffers::Offset<tflite::BenchmarkError> error = 0,
2476     int64_t boottime_us = 0,
2477     int64_t wallclock_us = 0) {
2478   BenchmarkEventBuilder builder_(_fbb);
2479   builder_.add_wallclock_us(wallclock_us);
2480   builder_.add_boottime_us(boottime_us);
2481   builder_.add_error(error);
2482   builder_.add_result(result);
2483   builder_.add_event_type(event_type);
2484   builder_.add_tflite_settings(tflite_settings);
2485   return builder_.Finish();
2486 }
2487 
2488 flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2489 
2490 struct BestAccelerationDecisionT : public flatbuffers::NativeTable {
2491   typedef BestAccelerationDecision TableType;
2492   int32_t number_of_source_events;
2493   std::unique_ptr<tflite::BenchmarkEventT> min_latency_event;
2494   int64_t min_inference_time_us;
BestAccelerationDecisionTBestAccelerationDecisionT2495   BestAccelerationDecisionT()
2496       : number_of_source_events(0),
2497         min_inference_time_us(0) {
2498   }
2499 };
2500 
2501 struct BestAccelerationDecision FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2502   typedef BestAccelerationDecisionT NativeTableType;
2503   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2504     VT_NUMBER_OF_SOURCE_EVENTS = 4,
2505     VT_MIN_LATENCY_EVENT = 6,
2506     VT_MIN_INFERENCE_TIME_US = 8
2507   };
number_of_source_eventsFLATBUFFERS_FINAL_CLASS2508   int32_t number_of_source_events() const {
2509     return GetField<int32_t>(VT_NUMBER_OF_SOURCE_EVENTS, 0);
2510   }
min_latency_eventFLATBUFFERS_FINAL_CLASS2511   const tflite::BenchmarkEvent *min_latency_event() const {
2512     return GetPointer<const tflite::BenchmarkEvent *>(VT_MIN_LATENCY_EVENT);
2513   }
min_inference_time_usFLATBUFFERS_FINAL_CLASS2514   int64_t min_inference_time_us() const {
2515     return GetField<int64_t>(VT_MIN_INFERENCE_TIME_US, 0);
2516   }
VerifyFLATBUFFERS_FINAL_CLASS2517   bool Verify(flatbuffers::Verifier &verifier) const {
2518     return VerifyTableStart(verifier) &&
2519            VerifyField<int32_t>(verifier, VT_NUMBER_OF_SOURCE_EVENTS) &&
2520            VerifyOffset(verifier, VT_MIN_LATENCY_EVENT) &&
2521            verifier.VerifyTable(min_latency_event()) &&
2522            VerifyField<int64_t>(verifier, VT_MIN_INFERENCE_TIME_US) &&
2523            verifier.EndTable();
2524   }
2525   BestAccelerationDecisionT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2526   void UnPackTo(BestAccelerationDecisionT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2527   static flatbuffers::Offset<BestAccelerationDecision> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2528 };
2529 
2530 struct BestAccelerationDecisionBuilder {
2531   flatbuffers::FlatBufferBuilder &fbb_;
2532   flatbuffers::uoffset_t start_;
add_number_of_source_eventsBestAccelerationDecisionBuilder2533   void add_number_of_source_events(int32_t number_of_source_events) {
2534     fbb_.AddElement<int32_t>(BestAccelerationDecision::VT_NUMBER_OF_SOURCE_EVENTS, number_of_source_events, 0);
2535   }
add_min_latency_eventBestAccelerationDecisionBuilder2536   void add_min_latency_event(flatbuffers::Offset<tflite::BenchmarkEvent> min_latency_event) {
2537     fbb_.AddOffset(BestAccelerationDecision::VT_MIN_LATENCY_EVENT, min_latency_event);
2538   }
add_min_inference_time_usBestAccelerationDecisionBuilder2539   void add_min_inference_time_us(int64_t min_inference_time_us) {
2540     fbb_.AddElement<int64_t>(BestAccelerationDecision::VT_MIN_INFERENCE_TIME_US, min_inference_time_us, 0);
2541   }
BestAccelerationDecisionBuilderBestAccelerationDecisionBuilder2542   explicit BestAccelerationDecisionBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2543         : fbb_(_fbb) {
2544     start_ = fbb_.StartTable();
2545   }
2546   BestAccelerationDecisionBuilder &operator=(const BestAccelerationDecisionBuilder &);
FinishBestAccelerationDecisionBuilder2547   flatbuffers::Offset<BestAccelerationDecision> Finish() {
2548     const auto end = fbb_.EndTable(start_);
2549     auto o = flatbuffers::Offset<BestAccelerationDecision>(end);
2550     return o;
2551   }
2552 };
2553 
2554 inline flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(
2555     flatbuffers::FlatBufferBuilder &_fbb,
2556     int32_t number_of_source_events = 0,
2557     flatbuffers::Offset<tflite::BenchmarkEvent> min_latency_event = 0,
2558     int64_t min_inference_time_us = 0) {
2559   BestAccelerationDecisionBuilder builder_(_fbb);
2560   builder_.add_min_inference_time_us(min_inference_time_us);
2561   builder_.add_min_latency_event(min_latency_event);
2562   builder_.add_number_of_source_events(number_of_source_events);
2563   return builder_.Finish();
2564 }
2565 
2566 flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2567 
2568 struct BenchmarkInitializationFailureT : public flatbuffers::NativeTable {
2569   typedef BenchmarkInitializationFailure TableType;
2570   int32_t initialization_status;
BenchmarkInitializationFailureTBenchmarkInitializationFailureT2571   BenchmarkInitializationFailureT()
2572       : initialization_status(0) {
2573   }
2574 };
2575 
2576 struct BenchmarkInitializationFailure FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2577   typedef BenchmarkInitializationFailureT NativeTableType;
2578   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2579     VT_INITIALIZATION_STATUS = 4
2580   };
initialization_statusFLATBUFFERS_FINAL_CLASS2581   int32_t initialization_status() const {
2582     return GetField<int32_t>(VT_INITIALIZATION_STATUS, 0);
2583   }
VerifyFLATBUFFERS_FINAL_CLASS2584   bool Verify(flatbuffers::Verifier &verifier) const {
2585     return VerifyTableStart(verifier) &&
2586            VerifyField<int32_t>(verifier, VT_INITIALIZATION_STATUS) &&
2587            verifier.EndTable();
2588   }
2589   BenchmarkInitializationFailureT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2590   void UnPackTo(BenchmarkInitializationFailureT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2591   static flatbuffers::Offset<BenchmarkInitializationFailure> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2592 };
2593 
2594 struct BenchmarkInitializationFailureBuilder {
2595   flatbuffers::FlatBufferBuilder &fbb_;
2596   flatbuffers::uoffset_t start_;
add_initialization_statusBenchmarkInitializationFailureBuilder2597   void add_initialization_status(int32_t initialization_status) {
2598     fbb_.AddElement<int32_t>(BenchmarkInitializationFailure::VT_INITIALIZATION_STATUS, initialization_status, 0);
2599   }
BenchmarkInitializationFailureBuilderBenchmarkInitializationFailureBuilder2600   explicit BenchmarkInitializationFailureBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2601         : fbb_(_fbb) {
2602     start_ = fbb_.StartTable();
2603   }
2604   BenchmarkInitializationFailureBuilder &operator=(const BenchmarkInitializationFailureBuilder &);
FinishBenchmarkInitializationFailureBuilder2605   flatbuffers::Offset<BenchmarkInitializationFailure> Finish() {
2606     const auto end = fbb_.EndTable(start_);
2607     auto o = flatbuffers::Offset<BenchmarkInitializationFailure>(end);
2608     return o;
2609   }
2610 };
2611 
2612 inline flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(
2613     flatbuffers::FlatBufferBuilder &_fbb,
2614     int32_t initialization_status = 0) {
2615   BenchmarkInitializationFailureBuilder builder_(_fbb);
2616   builder_.add_initialization_status(initialization_status);
2617   return builder_.Finish();
2618 }
2619 
2620 flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2621 
2622 struct MiniBenchmarkEventT : public flatbuffers::NativeTable {
2623   typedef MiniBenchmarkEvent TableType;
2624   bool is_log_flushing_event;
2625   std::unique_ptr<tflite::BestAccelerationDecisionT> best_acceleration_decision;
2626   std::unique_ptr<tflite::BenchmarkInitializationFailureT> initialization_failure;
2627   std::unique_ptr<tflite::BenchmarkEventT> benchmark_event;
MiniBenchmarkEventTMiniBenchmarkEventT2628   MiniBenchmarkEventT()
2629       : is_log_flushing_event(false) {
2630   }
2631 };
2632 
2633 struct MiniBenchmarkEvent FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2634   typedef MiniBenchmarkEventT NativeTableType;
2635   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2636     VT_IS_LOG_FLUSHING_EVENT = 4,
2637     VT_BEST_ACCELERATION_DECISION = 6,
2638     VT_INITIALIZATION_FAILURE = 8,
2639     VT_BENCHMARK_EVENT = 10
2640   };
is_log_flushing_eventFLATBUFFERS_FINAL_CLASS2641   bool is_log_flushing_event() const {
2642     return GetField<uint8_t>(VT_IS_LOG_FLUSHING_EVENT, 0) != 0;
2643   }
best_acceleration_decisionFLATBUFFERS_FINAL_CLASS2644   const tflite::BestAccelerationDecision *best_acceleration_decision() const {
2645     return GetPointer<const tflite::BestAccelerationDecision *>(VT_BEST_ACCELERATION_DECISION);
2646   }
initialization_failureFLATBUFFERS_FINAL_CLASS2647   const tflite::BenchmarkInitializationFailure *initialization_failure() const {
2648     return GetPointer<const tflite::BenchmarkInitializationFailure *>(VT_INITIALIZATION_FAILURE);
2649   }
benchmark_eventFLATBUFFERS_FINAL_CLASS2650   const tflite::BenchmarkEvent *benchmark_event() const {
2651     return GetPointer<const tflite::BenchmarkEvent *>(VT_BENCHMARK_EVENT);
2652   }
VerifyFLATBUFFERS_FINAL_CLASS2653   bool Verify(flatbuffers::Verifier &verifier) const {
2654     return VerifyTableStart(verifier) &&
2655            VerifyField<uint8_t>(verifier, VT_IS_LOG_FLUSHING_EVENT) &&
2656            VerifyOffset(verifier, VT_BEST_ACCELERATION_DECISION) &&
2657            verifier.VerifyTable(best_acceleration_decision()) &&
2658            VerifyOffset(verifier, VT_INITIALIZATION_FAILURE) &&
2659            verifier.VerifyTable(initialization_failure()) &&
2660            VerifyOffset(verifier, VT_BENCHMARK_EVENT) &&
2661            verifier.VerifyTable(benchmark_event()) &&
2662            verifier.EndTable();
2663   }
2664   MiniBenchmarkEventT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2665   void UnPackTo(MiniBenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2666   static flatbuffers::Offset<MiniBenchmarkEvent> Pack(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2667 };
2668 
2669 struct MiniBenchmarkEventBuilder {
2670   flatbuffers::FlatBufferBuilder &fbb_;
2671   flatbuffers::uoffset_t start_;
add_is_log_flushing_eventMiniBenchmarkEventBuilder2672   void add_is_log_flushing_event(bool is_log_flushing_event) {
2673     fbb_.AddElement<uint8_t>(MiniBenchmarkEvent::VT_IS_LOG_FLUSHING_EVENT, static_cast<uint8_t>(is_log_flushing_event), 0);
2674   }
add_best_acceleration_decisionMiniBenchmarkEventBuilder2675   void add_best_acceleration_decision(flatbuffers::Offset<tflite::BestAccelerationDecision> best_acceleration_decision) {
2676     fbb_.AddOffset(MiniBenchmarkEvent::VT_BEST_ACCELERATION_DECISION, best_acceleration_decision);
2677   }
add_initialization_failureMiniBenchmarkEventBuilder2678   void add_initialization_failure(flatbuffers::Offset<tflite::BenchmarkInitializationFailure> initialization_failure) {
2679     fbb_.AddOffset(MiniBenchmarkEvent::VT_INITIALIZATION_FAILURE, initialization_failure);
2680   }
add_benchmark_eventMiniBenchmarkEventBuilder2681   void add_benchmark_event(flatbuffers::Offset<tflite::BenchmarkEvent> benchmark_event) {
2682     fbb_.AddOffset(MiniBenchmarkEvent::VT_BENCHMARK_EVENT, benchmark_event);
2683   }
MiniBenchmarkEventBuilderMiniBenchmarkEventBuilder2684   explicit MiniBenchmarkEventBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2685         : fbb_(_fbb) {
2686     start_ = fbb_.StartTable();
2687   }
2688   MiniBenchmarkEventBuilder &operator=(const MiniBenchmarkEventBuilder &);
FinishMiniBenchmarkEventBuilder2689   flatbuffers::Offset<MiniBenchmarkEvent> Finish() {
2690     const auto end = fbb_.EndTable(start_);
2691     auto o = flatbuffers::Offset<MiniBenchmarkEvent>(end);
2692     return o;
2693   }
2694 };
2695 
2696 inline flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(
2697     flatbuffers::FlatBufferBuilder &_fbb,
2698     bool is_log_flushing_event = false,
2699     flatbuffers::Offset<tflite::BestAccelerationDecision> best_acceleration_decision = 0,
2700     flatbuffers::Offset<tflite::BenchmarkInitializationFailure> initialization_failure = 0,
2701     flatbuffers::Offset<tflite::BenchmarkEvent> benchmark_event = 0) {
2702   MiniBenchmarkEventBuilder builder_(_fbb);
2703   builder_.add_benchmark_event(benchmark_event);
2704   builder_.add_initialization_failure(initialization_failure);
2705   builder_.add_best_acceleration_decision(best_acceleration_decision);
2706   builder_.add_is_log_flushing_event(is_log_flushing_event);
2707   return builder_.Finish();
2708 }
2709 
2710 flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2711 
2712 struct ModelFileT : public flatbuffers::NativeTable {
2713   typedef ModelFile TableType;
2714   std::string filename;
2715   int64_t fd;
2716   int64_t offset;
2717   int64_t length;
ModelFileTModelFileT2718   ModelFileT()
2719       : fd(0),
2720         offset(0),
2721         length(0) {
2722   }
2723 };
2724 
2725 struct ModelFile FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2726   typedef ModelFileT NativeTableType;
2727   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2728     VT_FILENAME = 4,
2729     VT_FD = 6,
2730     VT_OFFSET = 8,
2731     VT_LENGTH = 10
2732   };
filenameFLATBUFFERS_FINAL_CLASS2733   const flatbuffers::String *filename() const {
2734     return GetPointer<const flatbuffers::String *>(VT_FILENAME);
2735   }
fdFLATBUFFERS_FINAL_CLASS2736   int64_t fd() const {
2737     return GetField<int64_t>(VT_FD, 0);
2738   }
offsetFLATBUFFERS_FINAL_CLASS2739   int64_t offset() const {
2740     return GetField<int64_t>(VT_OFFSET, 0);
2741   }
lengthFLATBUFFERS_FINAL_CLASS2742   int64_t length() const {
2743     return GetField<int64_t>(VT_LENGTH, 0);
2744   }
VerifyFLATBUFFERS_FINAL_CLASS2745   bool Verify(flatbuffers::Verifier &verifier) const {
2746     return VerifyTableStart(verifier) &&
2747            VerifyOffset(verifier, VT_FILENAME) &&
2748            verifier.VerifyString(filename()) &&
2749            VerifyField<int64_t>(verifier, VT_FD) &&
2750            VerifyField<int64_t>(verifier, VT_OFFSET) &&
2751            VerifyField<int64_t>(verifier, VT_LENGTH) &&
2752            verifier.EndTable();
2753   }
2754   ModelFileT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2755   void UnPackTo(ModelFileT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2756   static flatbuffers::Offset<ModelFile> Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2757 };
2758 
2759 struct ModelFileBuilder {
2760   flatbuffers::FlatBufferBuilder &fbb_;
2761   flatbuffers::uoffset_t start_;
add_filenameModelFileBuilder2762   void add_filename(flatbuffers::Offset<flatbuffers::String> filename) {
2763     fbb_.AddOffset(ModelFile::VT_FILENAME, filename);
2764   }
add_fdModelFileBuilder2765   void add_fd(int64_t fd) {
2766     fbb_.AddElement<int64_t>(ModelFile::VT_FD, fd, 0);
2767   }
add_offsetModelFileBuilder2768   void add_offset(int64_t offset) {
2769     fbb_.AddElement<int64_t>(ModelFile::VT_OFFSET, offset, 0);
2770   }
add_lengthModelFileBuilder2771   void add_length(int64_t length) {
2772     fbb_.AddElement<int64_t>(ModelFile::VT_LENGTH, length, 0);
2773   }
ModelFileBuilderModelFileBuilder2774   explicit ModelFileBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2775         : fbb_(_fbb) {
2776     start_ = fbb_.StartTable();
2777   }
2778   ModelFileBuilder &operator=(const ModelFileBuilder &);
FinishModelFileBuilder2779   flatbuffers::Offset<ModelFile> Finish() {
2780     const auto end = fbb_.EndTable(start_);
2781     auto o = flatbuffers::Offset<ModelFile>(end);
2782     return o;
2783   }
2784 };
2785 
2786 inline flatbuffers::Offset<ModelFile> CreateModelFile(
2787     flatbuffers::FlatBufferBuilder &_fbb,
2788     flatbuffers::Offset<flatbuffers::String> filename = 0,
2789     int64_t fd = 0,
2790     int64_t offset = 0,
2791     int64_t length = 0) {
2792   ModelFileBuilder builder_(_fbb);
2793   builder_.add_length(length);
2794   builder_.add_offset(offset);
2795   builder_.add_fd(fd);
2796   builder_.add_filename(filename);
2797   return builder_.Finish();
2798 }
2799 
2800 inline flatbuffers::Offset<ModelFile> CreateModelFileDirect(
2801     flatbuffers::FlatBufferBuilder &_fbb,
2802     const char *filename = nullptr,
2803     int64_t fd = 0,
2804     int64_t offset = 0,
2805     int64_t length = 0) {
2806   auto filename__ = filename ? _fbb.CreateString(filename) : 0;
2807   return tflite::CreateModelFile(
2808       _fbb,
2809       filename__,
2810       fd,
2811       offset,
2812       length);
2813 }
2814 
2815 flatbuffers::Offset<ModelFile> CreateModelFile(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2816 
2817 struct BenchmarkStoragePathsT : public flatbuffers::NativeTable {
2818   typedef BenchmarkStoragePaths TableType;
2819   std::string storage_file_path;
2820   std::string data_directory_path;
BenchmarkStoragePathsTBenchmarkStoragePathsT2821   BenchmarkStoragePathsT() {
2822   }
2823 };
2824 
2825 struct BenchmarkStoragePaths FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2826   typedef BenchmarkStoragePathsT NativeTableType;
2827   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2828     VT_STORAGE_FILE_PATH = 4,
2829     VT_DATA_DIRECTORY_PATH = 6
2830   };
storage_file_pathFLATBUFFERS_FINAL_CLASS2831   const flatbuffers::String *storage_file_path() const {
2832     return GetPointer<const flatbuffers::String *>(VT_STORAGE_FILE_PATH);
2833   }
data_directory_pathFLATBUFFERS_FINAL_CLASS2834   const flatbuffers::String *data_directory_path() const {
2835     return GetPointer<const flatbuffers::String *>(VT_DATA_DIRECTORY_PATH);
2836   }
VerifyFLATBUFFERS_FINAL_CLASS2837   bool Verify(flatbuffers::Verifier &verifier) const {
2838     return VerifyTableStart(verifier) &&
2839            VerifyOffset(verifier, VT_STORAGE_FILE_PATH) &&
2840            verifier.VerifyString(storage_file_path()) &&
2841            VerifyOffset(verifier, VT_DATA_DIRECTORY_PATH) &&
2842            verifier.VerifyString(data_directory_path()) &&
2843            verifier.EndTable();
2844   }
2845   BenchmarkStoragePathsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2846   void UnPackTo(BenchmarkStoragePathsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2847   static flatbuffers::Offset<BenchmarkStoragePaths> Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2848 };
2849 
2850 struct BenchmarkStoragePathsBuilder {
2851   flatbuffers::FlatBufferBuilder &fbb_;
2852   flatbuffers::uoffset_t start_;
add_storage_file_pathBenchmarkStoragePathsBuilder2853   void add_storage_file_path(flatbuffers::Offset<flatbuffers::String> storage_file_path) {
2854     fbb_.AddOffset(BenchmarkStoragePaths::VT_STORAGE_FILE_PATH, storage_file_path);
2855   }
add_data_directory_pathBenchmarkStoragePathsBuilder2856   void add_data_directory_path(flatbuffers::Offset<flatbuffers::String> data_directory_path) {
2857     fbb_.AddOffset(BenchmarkStoragePaths::VT_DATA_DIRECTORY_PATH, data_directory_path);
2858   }
BenchmarkStoragePathsBuilderBenchmarkStoragePathsBuilder2859   explicit BenchmarkStoragePathsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2860         : fbb_(_fbb) {
2861     start_ = fbb_.StartTable();
2862   }
2863   BenchmarkStoragePathsBuilder &operator=(const BenchmarkStoragePathsBuilder &);
FinishBenchmarkStoragePathsBuilder2864   flatbuffers::Offset<BenchmarkStoragePaths> Finish() {
2865     const auto end = fbb_.EndTable(start_);
2866     auto o = flatbuffers::Offset<BenchmarkStoragePaths>(end);
2867     return o;
2868   }
2869 };
2870 
2871 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(
2872     flatbuffers::FlatBufferBuilder &_fbb,
2873     flatbuffers::Offset<flatbuffers::String> storage_file_path = 0,
2874     flatbuffers::Offset<flatbuffers::String> data_directory_path = 0) {
2875   BenchmarkStoragePathsBuilder builder_(_fbb);
2876   builder_.add_data_directory_path(data_directory_path);
2877   builder_.add_storage_file_path(storage_file_path);
2878   return builder_.Finish();
2879 }
2880 
2881 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePathsDirect(
2882     flatbuffers::FlatBufferBuilder &_fbb,
2883     const char *storage_file_path = nullptr,
2884     const char *data_directory_path = nullptr) {
2885   auto storage_file_path__ = storage_file_path ? _fbb.CreateString(storage_file_path) : 0;
2886   auto data_directory_path__ = data_directory_path ? _fbb.CreateString(data_directory_path) : 0;
2887   return tflite::CreateBenchmarkStoragePaths(
2888       _fbb,
2889       storage_file_path__,
2890       data_directory_path__);
2891 }
2892 
2893 flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2894 
2895 struct MinibenchmarkSettingsT : public flatbuffers::NativeTable {
2896   typedef MinibenchmarkSettings TableType;
2897   std::vector<std::unique_ptr<tflite::TFLiteSettingsT>> settings_to_test;
2898   std::unique_ptr<tflite::ModelFileT> model_file;
2899   std::unique_ptr<tflite::BenchmarkStoragePathsT> storage_paths;
MinibenchmarkSettingsTMinibenchmarkSettingsT2900   MinibenchmarkSettingsT() {
2901   }
2902 };
2903 
2904 struct MinibenchmarkSettings FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
2905   typedef MinibenchmarkSettingsT NativeTableType;
2906   enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
2907     VT_SETTINGS_TO_TEST = 4,
2908     VT_MODEL_FILE = 6,
2909     VT_STORAGE_PATHS = 8
2910   };
settings_to_testFLATBUFFERS_FINAL_CLASS2911   const flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>> *settings_to_test() const {
2912     return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>> *>(VT_SETTINGS_TO_TEST);
2913   }
model_fileFLATBUFFERS_FINAL_CLASS2914   const tflite::ModelFile *model_file() const {
2915     return GetPointer<const tflite::ModelFile *>(VT_MODEL_FILE);
2916   }
storage_pathsFLATBUFFERS_FINAL_CLASS2917   const tflite::BenchmarkStoragePaths *storage_paths() const {
2918     return GetPointer<const tflite::BenchmarkStoragePaths *>(VT_STORAGE_PATHS);
2919   }
VerifyFLATBUFFERS_FINAL_CLASS2920   bool Verify(flatbuffers::Verifier &verifier) const {
2921     return VerifyTableStart(verifier) &&
2922            VerifyOffset(verifier, VT_SETTINGS_TO_TEST) &&
2923            verifier.VerifyVector(settings_to_test()) &&
2924            verifier.VerifyVectorOfTables(settings_to_test()) &&
2925            VerifyOffset(verifier, VT_MODEL_FILE) &&
2926            verifier.VerifyTable(model_file()) &&
2927            VerifyOffset(verifier, VT_STORAGE_PATHS) &&
2928            verifier.VerifyTable(storage_paths()) &&
2929            verifier.EndTable();
2930   }
2931   MinibenchmarkSettingsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2932   void UnPackTo(MinibenchmarkSettingsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const;
2933   static flatbuffers::Offset<MinibenchmarkSettings> Pack(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2934 };
2935 
2936 struct MinibenchmarkSettingsBuilder {
2937   flatbuffers::FlatBufferBuilder &fbb_;
2938   flatbuffers::uoffset_t start_;
add_settings_to_testMinibenchmarkSettingsBuilder2939   void add_settings_to_test(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>>> settings_to_test) {
2940     fbb_.AddOffset(MinibenchmarkSettings::VT_SETTINGS_TO_TEST, settings_to_test);
2941   }
add_model_fileMinibenchmarkSettingsBuilder2942   void add_model_file(flatbuffers::Offset<tflite::ModelFile> model_file) {
2943     fbb_.AddOffset(MinibenchmarkSettings::VT_MODEL_FILE, model_file);
2944   }
add_storage_pathsMinibenchmarkSettingsBuilder2945   void add_storage_paths(flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths) {
2946     fbb_.AddOffset(MinibenchmarkSettings::VT_STORAGE_PATHS, storage_paths);
2947   }
MinibenchmarkSettingsBuilderMinibenchmarkSettingsBuilder2948   explicit MinibenchmarkSettingsBuilder(flatbuffers::FlatBufferBuilder &_fbb)
2949         : fbb_(_fbb) {
2950     start_ = fbb_.StartTable();
2951   }
2952   MinibenchmarkSettingsBuilder &operator=(const MinibenchmarkSettingsBuilder &);
FinishMinibenchmarkSettingsBuilder2953   flatbuffers::Offset<MinibenchmarkSettings> Finish() {
2954     const auto end = fbb_.EndTable(start_);
2955     auto o = flatbuffers::Offset<MinibenchmarkSettings>(end);
2956     return o;
2957   }
2958 };
2959 
2960 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(
2961     flatbuffers::FlatBufferBuilder &_fbb,
2962     flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<tflite::TFLiteSettings>>> settings_to_test = 0,
2963     flatbuffers::Offset<tflite::ModelFile> model_file = 0,
2964     flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths = 0) {
2965   MinibenchmarkSettingsBuilder builder_(_fbb);
2966   builder_.add_storage_paths(storage_paths);
2967   builder_.add_model_file(model_file);
2968   builder_.add_settings_to_test(settings_to_test);
2969   return builder_.Finish();
2970 }
2971 
2972 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettingsDirect(
2973     flatbuffers::FlatBufferBuilder &_fbb,
2974     const std::vector<flatbuffers::Offset<tflite::TFLiteSettings>> *settings_to_test = nullptr,
2975     flatbuffers::Offset<tflite::ModelFile> model_file = 0,
2976     flatbuffers::Offset<tflite::BenchmarkStoragePaths> storage_paths = 0) {
2977   auto settings_to_test__ = settings_to_test ? _fbb.CreateVector<flatbuffers::Offset<tflite::TFLiteSettings>>(*settings_to_test) : 0;
2978   return tflite::CreateMinibenchmarkSettings(
2979       _fbb,
2980       settings_to_test__,
2981       model_file,
2982       storage_paths);
2983 }
2984 
2985 flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
2986 
2987 
2988 inline bool operator==(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs) {
2989   return
2990       (lhs.preference == rhs.preference) &&
2991       ((lhs.tflite_settings == rhs.tflite_settings) || (lhs.tflite_settings && rhs.tflite_settings && *lhs.tflite_settings == *rhs.tflite_settings)) &&
2992       (lhs.model_namespace_for_statistics == rhs.model_namespace_for_statistics) &&
2993       (lhs.model_identifier_for_statistics == rhs.model_identifier_for_statistics) &&
2994       ((lhs.settings_to_test_locally == rhs.settings_to_test_locally) || (lhs.settings_to_test_locally && rhs.settings_to_test_locally && *lhs.settings_to_test_locally == *rhs.settings_to_test_locally));
2995 }
2996 
2997 inline bool operator!=(const ComputeSettingsT &lhs, const ComputeSettingsT &rhs) {
2998     return !(lhs == rhs);
2999 }
3000 
3001 
UnPack(const flatbuffers::resolver_function_t * _resolver)3002 inline ComputeSettingsT *ComputeSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3003   auto _o = new ComputeSettingsT();
3004   UnPackTo(_o, _resolver);
3005   return _o;
3006 }
3007 
UnPackTo(ComputeSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3008 inline void ComputeSettings::UnPackTo(ComputeSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3009   (void)_o;
3010   (void)_resolver;
3011   { auto _e = preference(); _o->preference = _e; }
3012   { auto _e = tflite_settings(); if (_e) _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); }
3013   { auto _e = model_namespace_for_statistics(); if (_e) _o->model_namespace_for_statistics = _e->str(); }
3014   { auto _e = model_identifier_for_statistics(); if (_e) _o->model_identifier_for_statistics = _e->str(); }
3015   { auto _e = settings_to_test_locally(); if (_e) _o->settings_to_test_locally = std::unique_ptr<tflite::MinibenchmarkSettingsT>(_e->UnPack(_resolver)); }
3016 }
3017 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3018 inline flatbuffers::Offset<ComputeSettings> ComputeSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3019   return CreateComputeSettings(_fbb, _o, _rehasher);
3020 }
3021 
CreateComputeSettings(flatbuffers::FlatBufferBuilder & _fbb,const ComputeSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3022 inline flatbuffers::Offset<ComputeSettings> CreateComputeSettings(flatbuffers::FlatBufferBuilder &_fbb, const ComputeSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3023   (void)_rehasher;
3024   (void)_o;
3025   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ComputeSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3026   auto _preference = _o->preference;
3027   auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
3028   auto _model_namespace_for_statistics = _o->model_namespace_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_namespace_for_statistics);
3029   auto _model_identifier_for_statistics = _o->model_identifier_for_statistics.empty() ? 0 : _fbb.CreateString(_o->model_identifier_for_statistics);
3030   auto _settings_to_test_locally = _o->settings_to_test_locally ? CreateMinibenchmarkSettings(_fbb, _o->settings_to_test_locally.get(), _rehasher) : 0;
3031   return tflite::CreateComputeSettings(
3032       _fbb,
3033       _preference,
3034       _tflite_settings,
3035       _model_namespace_for_statistics,
3036       _model_identifier_for_statistics,
3037       _settings_to_test_locally);
3038 }
3039 
3040 
3041 inline bool operator==(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs) {
3042   return
3043       (lhs.accelerator_name == rhs.accelerator_name) &&
3044       (lhs.cache_directory == rhs.cache_directory) &&
3045       (lhs.model_token == rhs.model_token) &&
3046       (lhs.execution_preference == rhs.execution_preference) &&
3047       (lhs.no_of_nnapi_instances_to_cache == rhs.no_of_nnapi_instances_to_cache) &&
3048       ((lhs.fallback_settings == rhs.fallback_settings) || (lhs.fallback_settings && rhs.fallback_settings && *lhs.fallback_settings == *rhs.fallback_settings)) &&
3049       (lhs.allow_nnapi_cpu_on_android_10_plus == rhs.allow_nnapi_cpu_on_android_10_plus) &&
3050       (lhs.execution_priority == rhs.execution_priority) &&
3051       (lhs.allow_dynamic_dimensions == rhs.allow_dynamic_dimensions) &&
3052       (lhs.allow_fp16_precision_for_fp32 == rhs.allow_fp16_precision_for_fp32) &&
3053       (lhs.use_burst_computation == rhs.use_burst_computation);
3054 }
3055 
3056 inline bool operator!=(const NNAPISettingsT &lhs, const NNAPISettingsT &rhs) {
3057     return !(lhs == rhs);
3058 }
3059 
3060 
UnPack(const flatbuffers::resolver_function_t * _resolver)3061 inline NNAPISettingsT *NNAPISettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3062   auto _o = new NNAPISettingsT();
3063   UnPackTo(_o, _resolver);
3064   return _o;
3065 }
3066 
UnPackTo(NNAPISettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3067 inline void NNAPISettings::UnPackTo(NNAPISettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3068   (void)_o;
3069   (void)_resolver;
3070   { auto _e = accelerator_name(); if (_e) _o->accelerator_name = _e->str(); }
3071   { auto _e = cache_directory(); if (_e) _o->cache_directory = _e->str(); }
3072   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3073   { auto _e = execution_preference(); _o->execution_preference = _e; }
3074   { auto _e = no_of_nnapi_instances_to_cache(); _o->no_of_nnapi_instances_to_cache = _e; }
3075   { auto _e = fallback_settings(); if (_e) _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); }
3076   { auto _e = allow_nnapi_cpu_on_android_10_plus(); _o->allow_nnapi_cpu_on_android_10_plus = _e; }
3077   { auto _e = execution_priority(); _o->execution_priority = _e; }
3078   { auto _e = allow_dynamic_dimensions(); _o->allow_dynamic_dimensions = _e; }
3079   { auto _e = allow_fp16_precision_for_fp32(); _o->allow_fp16_precision_for_fp32 = _e; }
3080   { auto _e = use_burst_computation(); _o->use_burst_computation = _e; }
3081 }
3082 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3083 inline flatbuffers::Offset<NNAPISettings> NNAPISettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3084   return CreateNNAPISettings(_fbb, _o, _rehasher);
3085 }
3086 
CreateNNAPISettings(flatbuffers::FlatBufferBuilder & _fbb,const NNAPISettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3087 inline flatbuffers::Offset<NNAPISettings> CreateNNAPISettings(flatbuffers::FlatBufferBuilder &_fbb, const NNAPISettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3088   (void)_rehasher;
3089   (void)_o;
3090   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NNAPISettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3091   auto _accelerator_name = _o->accelerator_name.empty() ? 0 : _fbb.CreateString(_o->accelerator_name);
3092   auto _cache_directory = _o->cache_directory.empty() ? 0 : _fbb.CreateString(_o->cache_directory);
3093   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3094   auto _execution_preference = _o->execution_preference;
3095   auto _no_of_nnapi_instances_to_cache = _o->no_of_nnapi_instances_to_cache;
3096   auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
3097   auto _allow_nnapi_cpu_on_android_10_plus = _o->allow_nnapi_cpu_on_android_10_plus;
3098   auto _execution_priority = _o->execution_priority;
3099   auto _allow_dynamic_dimensions = _o->allow_dynamic_dimensions;
3100   auto _allow_fp16_precision_for_fp32 = _o->allow_fp16_precision_for_fp32;
3101   auto _use_burst_computation = _o->use_burst_computation;
3102   return tflite::CreateNNAPISettings(
3103       _fbb,
3104       _accelerator_name,
3105       _cache_directory,
3106       _model_token,
3107       _execution_preference,
3108       _no_of_nnapi_instances_to_cache,
3109       _fallback_settings,
3110       _allow_nnapi_cpu_on_android_10_plus,
3111       _execution_priority,
3112       _allow_dynamic_dimensions,
3113       _allow_fp16_precision_for_fp32,
3114       _use_burst_computation);
3115 }
3116 
3117 
3118 inline bool operator==(const GPUSettingsT &lhs, const GPUSettingsT &rhs) {
3119   return
3120       (lhs.is_precision_loss_allowed == rhs.is_precision_loss_allowed) &&
3121       (lhs.enable_quantized_inference == rhs.enable_quantized_inference) &&
3122       (lhs.force_backend == rhs.force_backend) &&
3123       (lhs.inference_priority1 == rhs.inference_priority1) &&
3124       (lhs.inference_priority2 == rhs.inference_priority2) &&
3125       (lhs.inference_priority3 == rhs.inference_priority3) &&
3126       (lhs.inference_preference == rhs.inference_preference) &&
3127       (lhs.cache_directory == rhs.cache_directory) &&
3128       (lhs.model_token == rhs.model_token);
3129 }
3130 
3131 inline bool operator!=(const GPUSettingsT &lhs, const GPUSettingsT &rhs) {
3132     return !(lhs == rhs);
3133 }
3134 
3135 
UnPack(const flatbuffers::resolver_function_t * _resolver)3136 inline GPUSettingsT *GPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3137   auto _o = new GPUSettingsT();
3138   UnPackTo(_o, _resolver);
3139   return _o;
3140 }
3141 
UnPackTo(GPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3142 inline void GPUSettings::UnPackTo(GPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3143   (void)_o;
3144   (void)_resolver;
3145   { auto _e = is_precision_loss_allowed(); _o->is_precision_loss_allowed = _e; }
3146   { auto _e = enable_quantized_inference(); _o->enable_quantized_inference = _e; }
3147   { auto _e = force_backend(); _o->force_backend = _e; }
3148   { auto _e = inference_priority1(); _o->inference_priority1 = _e; }
3149   { auto _e = inference_priority2(); _o->inference_priority2 = _e; }
3150   { auto _e = inference_priority3(); _o->inference_priority3 = _e; }
3151   { auto _e = inference_preference(); _o->inference_preference = _e; }
3152   { auto _e = cache_directory(); if (_e) _o->cache_directory = _e->str(); }
3153   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3154 }
3155 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3156 inline flatbuffers::Offset<GPUSettings> GPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3157   return CreateGPUSettings(_fbb, _o, _rehasher);
3158 }
3159 
CreateGPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const GPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3160 inline flatbuffers::Offset<GPUSettings> CreateGPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const GPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3161   (void)_rehasher;
3162   (void)_o;
3163   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3164   auto _is_precision_loss_allowed = _o->is_precision_loss_allowed;
3165   auto _enable_quantized_inference = _o->enable_quantized_inference;
3166   auto _force_backend = _o->force_backend;
3167   auto _inference_priority1 = _o->inference_priority1;
3168   auto _inference_priority2 = _o->inference_priority2;
3169   auto _inference_priority3 = _o->inference_priority3;
3170   auto _inference_preference = _o->inference_preference;
3171   auto _cache_directory = _o->cache_directory.empty() ? 0 : _fbb.CreateString(_o->cache_directory);
3172   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3173   return tflite::CreateGPUSettings(
3174       _fbb,
3175       _is_precision_loss_allowed,
3176       _enable_quantized_inference,
3177       _force_backend,
3178       _inference_priority1,
3179       _inference_priority2,
3180       _inference_priority3,
3181       _inference_preference,
3182       _cache_directory,
3183       _model_token);
3184 }
3185 
3186 
3187 inline bool operator==(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs) {
3188   return
3189       (lhs.debug_level == rhs.debug_level) &&
3190       (lhs.powersave_level == rhs.powersave_level) &&
3191       (lhs.print_graph_profile == rhs.print_graph_profile) &&
3192       (lhs.print_graph_debug == rhs.print_graph_debug);
3193 }
3194 
3195 inline bool operator!=(const HexagonSettingsT &lhs, const HexagonSettingsT &rhs) {
3196     return !(lhs == rhs);
3197 }
3198 
3199 
UnPack(const flatbuffers::resolver_function_t * _resolver)3200 inline HexagonSettingsT *HexagonSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3201   auto _o = new HexagonSettingsT();
3202   UnPackTo(_o, _resolver);
3203   return _o;
3204 }
3205 
UnPackTo(HexagonSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3206 inline void HexagonSettings::UnPackTo(HexagonSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3207   (void)_o;
3208   (void)_resolver;
3209   { auto _e = debug_level(); _o->debug_level = _e; }
3210   { auto _e = powersave_level(); _o->powersave_level = _e; }
3211   { auto _e = print_graph_profile(); _o->print_graph_profile = _e; }
3212   { auto _e = print_graph_debug(); _o->print_graph_debug = _e; }
3213 }
3214 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3215 inline flatbuffers::Offset<HexagonSettings> HexagonSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3216   return CreateHexagonSettings(_fbb, _o, _rehasher);
3217 }
3218 
CreateHexagonSettings(flatbuffers::FlatBufferBuilder & _fbb,const HexagonSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3219 inline flatbuffers::Offset<HexagonSettings> CreateHexagonSettings(flatbuffers::FlatBufferBuilder &_fbb, const HexagonSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3220   (void)_rehasher;
3221   (void)_o;
3222   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HexagonSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3223   auto _debug_level = _o->debug_level;
3224   auto _powersave_level = _o->powersave_level;
3225   auto _print_graph_profile = _o->print_graph_profile;
3226   auto _print_graph_debug = _o->print_graph_debug;
3227   return tflite::CreateHexagonSettings(
3228       _fbb,
3229       _debug_level,
3230       _powersave_level,
3231       _print_graph_profile,
3232       _print_graph_debug);
3233 }
3234 
3235 
3236 inline bool operator==(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs) {
3237   return
3238       (lhs.num_threads == rhs.num_threads);
3239 }
3240 
3241 inline bool operator!=(const XNNPackSettingsT &lhs, const XNNPackSettingsT &rhs) {
3242     return !(lhs == rhs);
3243 }
3244 
3245 
UnPack(const flatbuffers::resolver_function_t * _resolver)3246 inline XNNPackSettingsT *XNNPackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3247   auto _o = new XNNPackSettingsT();
3248   UnPackTo(_o, _resolver);
3249   return _o;
3250 }
3251 
UnPackTo(XNNPackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3252 inline void XNNPackSettings::UnPackTo(XNNPackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3253   (void)_o;
3254   (void)_resolver;
3255   { auto _e = num_threads(); _o->num_threads = _e; }
3256 }
3257 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3258 inline flatbuffers::Offset<XNNPackSettings> XNNPackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3259   return CreateXNNPackSettings(_fbb, _o, _rehasher);
3260 }
3261 
CreateXNNPackSettings(flatbuffers::FlatBufferBuilder & _fbb,const XNNPackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3262 inline flatbuffers::Offset<XNNPackSettings> CreateXNNPackSettings(flatbuffers::FlatBufferBuilder &_fbb, const XNNPackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3263   (void)_rehasher;
3264   (void)_o;
3265   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const XNNPackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3266   auto _num_threads = _o->num_threads;
3267   return tflite::CreateXNNPackSettings(
3268       _fbb,
3269       _num_threads);
3270 }
3271 
3272 
3273 inline bool operator==(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs) {
3274   return
3275       (lhs.platform_type == rhs.platform_type) &&
3276       (lhs.num_chips == rhs.num_chips) &&
3277       (lhs.device_paths == rhs.device_paths) &&
3278       (lhs.chip_family == rhs.chip_family);
3279 }
3280 
3281 inline bool operator!=(const EdgeTpuDeviceSpecT &lhs, const EdgeTpuDeviceSpecT &rhs) {
3282     return !(lhs == rhs);
3283 }
3284 
3285 
UnPack(const flatbuffers::resolver_function_t * _resolver)3286 inline EdgeTpuDeviceSpecT *EdgeTpuDeviceSpec::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3287   auto _o = new EdgeTpuDeviceSpecT();
3288   UnPackTo(_o, _resolver);
3289   return _o;
3290 }
3291 
UnPackTo(EdgeTpuDeviceSpecT * _o,const flatbuffers::resolver_function_t * _resolver)3292 inline void EdgeTpuDeviceSpec::UnPackTo(EdgeTpuDeviceSpecT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3293   (void)_o;
3294   (void)_resolver;
3295   { auto _e = platform_type(); _o->platform_type = _e; }
3296   { auto _e = num_chips(); _o->num_chips = _e; }
3297   { auto _e = device_paths(); if (_e) { _o->device_paths.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->device_paths[_i] = _e->Get(_i)->str(); } } }
3298   { auto _e = chip_family(); _o->chip_family = _e; }
3299 }
3300 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)3301 inline flatbuffers::Offset<EdgeTpuDeviceSpec> EdgeTpuDeviceSpec::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3302   return CreateEdgeTpuDeviceSpec(_fbb, _o, _rehasher);
3303 }
3304 
CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuDeviceSpecT * _o,const flatbuffers::rehasher_function_t * _rehasher)3305 inline flatbuffers::Offset<EdgeTpuDeviceSpec> CreateEdgeTpuDeviceSpec(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuDeviceSpecT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3306   (void)_rehasher;
3307   (void)_o;
3308   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuDeviceSpecT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3309   auto _platform_type = _o->platform_type;
3310   auto _num_chips = _o->num_chips;
3311   auto _device_paths = _o->device_paths.size() ? _fbb.CreateVectorOfStrings(_o->device_paths) : 0;
3312   auto _chip_family = _o->chip_family;
3313   return tflite::CreateEdgeTpuDeviceSpec(
3314       _fbb,
3315       _platform_type,
3316       _num_chips,
3317       _device_paths,
3318       _chip_family);
3319 }
3320 
3321 
3322 inline bool operator==(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs) {
3323   return
3324       (lhs.inactive_power_state == rhs.inactive_power_state) &&
3325       (lhs.inactive_timeout_us == rhs.inactive_timeout_us);
3326 }
3327 
3328 inline bool operator!=(const EdgeTpuInactivePowerConfigT &lhs, const EdgeTpuInactivePowerConfigT &rhs) {
3329     return !(lhs == rhs);
3330 }
3331 
3332 
UnPack(const flatbuffers::resolver_function_t * _resolver)3333 inline EdgeTpuInactivePowerConfigT *EdgeTpuInactivePowerConfig::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3334   auto _o = new EdgeTpuInactivePowerConfigT();
3335   UnPackTo(_o, _resolver);
3336   return _o;
3337 }
3338 
UnPackTo(EdgeTpuInactivePowerConfigT * _o,const flatbuffers::resolver_function_t * _resolver)3339 inline void EdgeTpuInactivePowerConfig::UnPackTo(EdgeTpuInactivePowerConfigT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3340   (void)_o;
3341   (void)_resolver;
3342   { auto _e = inactive_power_state(); _o->inactive_power_state = _e; }
3343   { auto _e = inactive_timeout_us(); _o->inactive_timeout_us = _e; }
3344 }
3345 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)3346 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> EdgeTpuInactivePowerConfig::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3347   return CreateEdgeTpuInactivePowerConfig(_fbb, _o, _rehasher);
3348 }
3349 
CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuInactivePowerConfigT * _o,const flatbuffers::rehasher_function_t * _rehasher)3350 inline flatbuffers::Offset<EdgeTpuInactivePowerConfig> CreateEdgeTpuInactivePowerConfig(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuInactivePowerConfigT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3351   (void)_rehasher;
3352   (void)_o;
3353   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuInactivePowerConfigT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3354   auto _inactive_power_state = _o->inactive_power_state;
3355   auto _inactive_timeout_us = _o->inactive_timeout_us;
3356   return tflite::CreateEdgeTpuInactivePowerConfig(
3357       _fbb,
3358       _inactive_power_state,
3359       _inactive_timeout_us);
3360 }
3361 
3362 
3363 inline bool operator==(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs) {
3364   return
3365       (lhs.inference_power_state == rhs.inference_power_state) &&
3366       (lhs.inactive_power_configs == rhs.inactive_power_configs) &&
3367       (lhs.inference_priority == rhs.inference_priority) &&
3368       ((lhs.edgetpu_device_spec == rhs.edgetpu_device_spec) || (lhs.edgetpu_device_spec && rhs.edgetpu_device_spec && *lhs.edgetpu_device_spec == *rhs.edgetpu_device_spec)) &&
3369       (lhs.model_token == rhs.model_token) &&
3370       (lhs.float_truncation_type == rhs.float_truncation_type);
3371 }
3372 
3373 inline bool operator!=(const EdgeTpuSettingsT &lhs, const EdgeTpuSettingsT &rhs) {
3374     return !(lhs == rhs);
3375 }
3376 
3377 
UnPack(const flatbuffers::resolver_function_t * _resolver)3378 inline EdgeTpuSettingsT *EdgeTpuSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3379   auto _o = new EdgeTpuSettingsT();
3380   UnPackTo(_o, _resolver);
3381   return _o;
3382 }
3383 
UnPackTo(EdgeTpuSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3384 inline void EdgeTpuSettings::UnPackTo(EdgeTpuSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3385   (void)_o;
3386   (void)_resolver;
3387   { auto _e = inference_power_state(); _o->inference_power_state = _e; }
3388   { auto _e = inactive_power_configs(); if (_e) { _o->inactive_power_configs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inactive_power_configs[_i] = std::unique_ptr<tflite::EdgeTpuInactivePowerConfigT>(_e->Get(_i)->UnPack(_resolver)); } } }
3389   { auto _e = inference_priority(); _o->inference_priority = _e; }
3390   { auto _e = edgetpu_device_spec(); if (_e) _o->edgetpu_device_spec = std::unique_ptr<tflite::EdgeTpuDeviceSpecT>(_e->UnPack(_resolver)); }
3391   { auto _e = model_token(); if (_e) _o->model_token = _e->str(); }
3392   { auto _e = float_truncation_type(); _o->float_truncation_type = _e; }
3393 }
3394 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3395 inline flatbuffers::Offset<EdgeTpuSettings> EdgeTpuSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3396   return CreateEdgeTpuSettings(_fbb, _o, _rehasher);
3397 }
3398 
CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder & _fbb,const EdgeTpuSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3399 inline flatbuffers::Offset<EdgeTpuSettings> CreateEdgeTpuSettings(flatbuffers::FlatBufferBuilder &_fbb, const EdgeTpuSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3400   (void)_rehasher;
3401   (void)_o;
3402   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EdgeTpuSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3403   auto _inference_power_state = _o->inference_power_state;
3404   auto _inactive_power_configs = _o->inactive_power_configs.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::EdgeTpuInactivePowerConfig>> (_o->inactive_power_configs.size(), [](size_t i, _VectorArgs *__va) { return CreateEdgeTpuInactivePowerConfig(*__va->__fbb, __va->__o->inactive_power_configs[i].get(), __va->__rehasher); }, &_va ) : 0;
3405   auto _inference_priority = _o->inference_priority;
3406   auto _edgetpu_device_spec = _o->edgetpu_device_spec ? CreateEdgeTpuDeviceSpec(_fbb, _o->edgetpu_device_spec.get(), _rehasher) : 0;
3407   auto _model_token = _o->model_token.empty() ? 0 : _fbb.CreateString(_o->model_token);
3408   auto _float_truncation_type = _o->float_truncation_type;
3409   return tflite::CreateEdgeTpuSettings(
3410       _fbb,
3411       _inference_power_state,
3412       _inactive_power_configs,
3413       _inference_priority,
3414       _edgetpu_device_spec,
3415       _model_token,
3416       _float_truncation_type);
3417 }
3418 
3419 
3420 inline bool operator==(const CoralSettingsT &lhs, const CoralSettingsT &rhs) {
3421   return
3422       (lhs.device == rhs.device) &&
3423       (lhs.performance == rhs.performance) &&
3424       (lhs.usb_always_dfu == rhs.usb_always_dfu) &&
3425       (lhs.usb_max_bulk_in_queue_length == rhs.usb_max_bulk_in_queue_length);
3426 }
3427 
3428 inline bool operator!=(const CoralSettingsT &lhs, const CoralSettingsT &rhs) {
3429     return !(lhs == rhs);
3430 }
3431 
3432 
UnPack(const flatbuffers::resolver_function_t * _resolver)3433 inline CoralSettingsT *CoralSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3434   auto _o = new CoralSettingsT();
3435   UnPackTo(_o, _resolver);
3436   return _o;
3437 }
3438 
UnPackTo(CoralSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3439 inline void CoralSettings::UnPackTo(CoralSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3440   (void)_o;
3441   (void)_resolver;
3442   { auto _e = device(); if (_e) _o->device = _e->str(); }
3443   { auto _e = performance(); _o->performance = _e; }
3444   { auto _e = usb_always_dfu(); _o->usb_always_dfu = _e; }
3445   { auto _e = usb_max_bulk_in_queue_length(); _o->usb_max_bulk_in_queue_length = _e; }
3446 }
3447 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3448 inline flatbuffers::Offset<CoralSettings> CoralSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3449   return CreateCoralSettings(_fbb, _o, _rehasher);
3450 }
3451 
CreateCoralSettings(flatbuffers::FlatBufferBuilder & _fbb,const CoralSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3452 inline flatbuffers::Offset<CoralSettings> CreateCoralSettings(flatbuffers::FlatBufferBuilder &_fbb, const CoralSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3453   (void)_rehasher;
3454   (void)_o;
3455   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CoralSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3456   auto _device = _o->device.empty() ? 0 : _fbb.CreateString(_o->device);
3457   auto _performance = _o->performance;
3458   auto _usb_always_dfu = _o->usb_always_dfu;
3459   auto _usb_max_bulk_in_queue_length = _o->usb_max_bulk_in_queue_length;
3460   return tflite::CreateCoralSettings(
3461       _fbb,
3462       _device,
3463       _performance,
3464       _usb_always_dfu,
3465       _usb_max_bulk_in_queue_length);
3466 }
3467 
3468 
3469 inline bool operator==(const CPUSettingsT &lhs, const CPUSettingsT &rhs) {
3470   return
3471       (lhs.num_threads == rhs.num_threads);
3472 }
3473 
3474 inline bool operator!=(const CPUSettingsT &lhs, const CPUSettingsT &rhs) {
3475     return !(lhs == rhs);
3476 }
3477 
3478 
UnPack(const flatbuffers::resolver_function_t * _resolver)3479 inline CPUSettingsT *CPUSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3480   auto _o = new CPUSettingsT();
3481   UnPackTo(_o, _resolver);
3482   return _o;
3483 }
3484 
UnPackTo(CPUSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3485 inline void CPUSettings::UnPackTo(CPUSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3486   (void)_o;
3487   (void)_resolver;
3488   { auto _e = num_threads(); _o->num_threads = _e; }
3489 }
3490 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3491 inline flatbuffers::Offset<CPUSettings> CPUSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3492   return CreateCPUSettings(_fbb, _o, _rehasher);
3493 }
3494 
CreateCPUSettings(flatbuffers::FlatBufferBuilder & _fbb,const CPUSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3495 inline flatbuffers::Offset<CPUSettings> CreateCPUSettings(flatbuffers::FlatBufferBuilder &_fbb, const CPUSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3496   (void)_rehasher;
3497   (void)_o;
3498   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CPUSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3499   auto _num_threads = _o->num_threads;
3500   return tflite::CreateCPUSettings(
3501       _fbb,
3502       _num_threads);
3503 }
3504 
3505 
3506 inline bool operator==(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs) {
3507   return
3508       (lhs.delegate == rhs.delegate) &&
3509       ((lhs.nnapi_settings == rhs.nnapi_settings) || (lhs.nnapi_settings && rhs.nnapi_settings && *lhs.nnapi_settings == *rhs.nnapi_settings)) &&
3510       ((lhs.gpu_settings == rhs.gpu_settings) || (lhs.gpu_settings && rhs.gpu_settings && *lhs.gpu_settings == *rhs.gpu_settings)) &&
3511       ((lhs.hexagon_settings == rhs.hexagon_settings) || (lhs.hexagon_settings && rhs.hexagon_settings && *lhs.hexagon_settings == *rhs.hexagon_settings)) &&
3512       ((lhs.xnnpack_settings == rhs.xnnpack_settings) || (lhs.xnnpack_settings && rhs.xnnpack_settings && *lhs.xnnpack_settings == *rhs.xnnpack_settings)) &&
3513       ((lhs.cpu_settings == rhs.cpu_settings) || (lhs.cpu_settings && rhs.cpu_settings && *lhs.cpu_settings == *rhs.cpu_settings)) &&
3514       (lhs.max_delegated_partitions == rhs.max_delegated_partitions) &&
3515       ((lhs.edgetpu_settings == rhs.edgetpu_settings) || (lhs.edgetpu_settings && rhs.edgetpu_settings && *lhs.edgetpu_settings == *rhs.edgetpu_settings)) &&
3516       ((lhs.coral_settings == rhs.coral_settings) || (lhs.coral_settings && rhs.coral_settings && *lhs.coral_settings == *rhs.coral_settings)) &&
3517       ((lhs.fallback_settings == rhs.fallback_settings) || (lhs.fallback_settings && rhs.fallback_settings && *lhs.fallback_settings == *rhs.fallback_settings));
3518 }
3519 
3520 inline bool operator!=(const TFLiteSettingsT &lhs, const TFLiteSettingsT &rhs) {
3521     return !(lhs == rhs);
3522 }
3523 
3524 
UnPack(const flatbuffers::resolver_function_t * _resolver)3525 inline TFLiteSettingsT *TFLiteSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3526   auto _o = new TFLiteSettingsT();
3527   UnPackTo(_o, _resolver);
3528   return _o;
3529 }
3530 
UnPackTo(TFLiteSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3531 inline void TFLiteSettings::UnPackTo(TFLiteSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3532   (void)_o;
3533   (void)_resolver;
3534   { auto _e = delegate(); _o->delegate = _e; }
3535   { auto _e = nnapi_settings(); if (_e) _o->nnapi_settings = std::unique_ptr<tflite::NNAPISettingsT>(_e->UnPack(_resolver)); }
3536   { auto _e = gpu_settings(); if (_e) _o->gpu_settings = std::unique_ptr<tflite::GPUSettingsT>(_e->UnPack(_resolver)); }
3537   { auto _e = hexagon_settings(); if (_e) _o->hexagon_settings = std::unique_ptr<tflite::HexagonSettingsT>(_e->UnPack(_resolver)); }
3538   { auto _e = xnnpack_settings(); if (_e) _o->xnnpack_settings = std::unique_ptr<tflite::XNNPackSettingsT>(_e->UnPack(_resolver)); }
3539   { auto _e = cpu_settings(); if (_e) _o->cpu_settings = std::unique_ptr<tflite::CPUSettingsT>(_e->UnPack(_resolver)); }
3540   { auto _e = max_delegated_partitions(); _o->max_delegated_partitions = _e; }
3541   { auto _e = edgetpu_settings(); if (_e) _o->edgetpu_settings = std::unique_ptr<tflite::EdgeTpuSettingsT>(_e->UnPack(_resolver)); }
3542   { auto _e = coral_settings(); if (_e) _o->coral_settings = std::unique_ptr<tflite::CoralSettingsT>(_e->UnPack(_resolver)); }
3543   { auto _e = fallback_settings(); if (_e) _o->fallback_settings = std::unique_ptr<tflite::FallbackSettingsT>(_e->UnPack(_resolver)); }
3544 }
3545 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3546 inline flatbuffers::Offset<TFLiteSettings> TFLiteSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3547   return CreateTFLiteSettings(_fbb, _o, _rehasher);
3548 }
3549 
CreateTFLiteSettings(flatbuffers::FlatBufferBuilder & _fbb,const TFLiteSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3550 inline flatbuffers::Offset<TFLiteSettings> CreateTFLiteSettings(flatbuffers::FlatBufferBuilder &_fbb, const TFLiteSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3551   (void)_rehasher;
3552   (void)_o;
3553   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TFLiteSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3554   auto _delegate = _o->delegate;
3555   auto _nnapi_settings = _o->nnapi_settings ? CreateNNAPISettings(_fbb, _o->nnapi_settings.get(), _rehasher) : 0;
3556   auto _gpu_settings = _o->gpu_settings ? CreateGPUSettings(_fbb, _o->gpu_settings.get(), _rehasher) : 0;
3557   auto _hexagon_settings = _o->hexagon_settings ? CreateHexagonSettings(_fbb, _o->hexagon_settings.get(), _rehasher) : 0;
3558   auto _xnnpack_settings = _o->xnnpack_settings ? CreateXNNPackSettings(_fbb, _o->xnnpack_settings.get(), _rehasher) : 0;
3559   auto _cpu_settings = _o->cpu_settings ? CreateCPUSettings(_fbb, _o->cpu_settings.get(), _rehasher) : 0;
3560   auto _max_delegated_partitions = _o->max_delegated_partitions;
3561   auto _edgetpu_settings = _o->edgetpu_settings ? CreateEdgeTpuSettings(_fbb, _o->edgetpu_settings.get(), _rehasher) : 0;
3562   auto _coral_settings = _o->coral_settings ? CreateCoralSettings(_fbb, _o->coral_settings.get(), _rehasher) : 0;
3563   auto _fallback_settings = _o->fallback_settings ? CreateFallbackSettings(_fbb, _o->fallback_settings.get(), _rehasher) : 0;
3564   return tflite::CreateTFLiteSettings(
3565       _fbb,
3566       _delegate,
3567       _nnapi_settings,
3568       _gpu_settings,
3569       _hexagon_settings,
3570       _xnnpack_settings,
3571       _cpu_settings,
3572       _max_delegated_partitions,
3573       _edgetpu_settings,
3574       _coral_settings,
3575       _fallback_settings);
3576 }
3577 
3578 
3579 inline bool operator==(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs) {
3580   return
3581       (lhs.allow_automatic_fallback_on_compilation_error == rhs.allow_automatic_fallback_on_compilation_error) &&
3582       (lhs.allow_automatic_fallback_on_execution_error == rhs.allow_automatic_fallback_on_execution_error);
3583 }
3584 
3585 inline bool operator!=(const FallbackSettingsT &lhs, const FallbackSettingsT &rhs) {
3586     return !(lhs == rhs);
3587 }
3588 
3589 
UnPack(const flatbuffers::resolver_function_t * _resolver)3590 inline FallbackSettingsT *FallbackSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3591   auto _o = new FallbackSettingsT();
3592   UnPackTo(_o, _resolver);
3593   return _o;
3594 }
3595 
UnPackTo(FallbackSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)3596 inline void FallbackSettings::UnPackTo(FallbackSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3597   (void)_o;
3598   (void)_resolver;
3599   { auto _e = allow_automatic_fallback_on_compilation_error(); _o->allow_automatic_fallback_on_compilation_error = _e; }
3600   { auto _e = allow_automatic_fallback_on_execution_error(); _o->allow_automatic_fallback_on_execution_error = _e; }
3601 }
3602 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3603 inline flatbuffers::Offset<FallbackSettings> FallbackSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3604   return CreateFallbackSettings(_fbb, _o, _rehasher);
3605 }
3606 
CreateFallbackSettings(flatbuffers::FlatBufferBuilder & _fbb,const FallbackSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)3607 inline flatbuffers::Offset<FallbackSettings> CreateFallbackSettings(flatbuffers::FlatBufferBuilder &_fbb, const FallbackSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3608   (void)_rehasher;
3609   (void)_o;
3610   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FallbackSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3611   auto _allow_automatic_fallback_on_compilation_error = _o->allow_automatic_fallback_on_compilation_error;
3612   auto _allow_automatic_fallback_on_execution_error = _o->allow_automatic_fallback_on_execution_error;
3613   return tflite::CreateFallbackSettings(
3614       _fbb,
3615       _allow_automatic_fallback_on_compilation_error,
3616       _allow_automatic_fallback_on_execution_error);
3617 }
3618 
3619 
3620 inline bool operator==(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs) {
3621   return
3622       (lhs.name == rhs.name) &&
3623       (lhs.values == rhs.values);
3624 }
3625 
3626 inline bool operator!=(const BenchmarkMetricT &lhs, const BenchmarkMetricT &rhs) {
3627     return !(lhs == rhs);
3628 }
3629 
3630 
UnPack(const flatbuffers::resolver_function_t * _resolver)3631 inline BenchmarkMetricT *BenchmarkMetric::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3632   auto _o = new BenchmarkMetricT();
3633   UnPackTo(_o, _resolver);
3634   return _o;
3635 }
3636 
UnPackTo(BenchmarkMetricT * _o,const flatbuffers::resolver_function_t * _resolver)3637 inline void BenchmarkMetric::UnPackTo(BenchmarkMetricT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3638   (void)_o;
3639   (void)_resolver;
3640   { auto _e = name(); if (_e) _o->name = _e->str(); }
3641   { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } }
3642 }
3643 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)3644 inline flatbuffers::Offset<BenchmarkMetric> BenchmarkMetric::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3645   return CreateBenchmarkMetric(_fbb, _o, _rehasher);
3646 }
3647 
CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkMetricT * _o,const flatbuffers::rehasher_function_t * _rehasher)3648 inline flatbuffers::Offset<BenchmarkMetric> CreateBenchmarkMetric(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkMetricT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3649   (void)_rehasher;
3650   (void)_o;
3651   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkMetricT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3652   auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name);
3653   auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0;
3654   return tflite::CreateBenchmarkMetric(
3655       _fbb,
3656       _name,
3657       _values);
3658 }
3659 
3660 
3661 inline bool operator==(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs) {
3662   return
3663       (lhs.initialization_time_us == rhs.initialization_time_us) &&
3664       (lhs.inference_time_us == rhs.inference_time_us) &&
3665       (lhs.max_memory_kb == rhs.max_memory_kb) &&
3666       (lhs.ok == rhs.ok) &&
3667       (lhs.metrics == rhs.metrics);
3668 }
3669 
3670 inline bool operator!=(const BenchmarkResultT &lhs, const BenchmarkResultT &rhs) {
3671     return !(lhs == rhs);
3672 }
3673 
3674 
UnPack(const flatbuffers::resolver_function_t * _resolver)3675 inline BenchmarkResultT *BenchmarkResult::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3676   auto _o = new BenchmarkResultT();
3677   UnPackTo(_o, _resolver);
3678   return _o;
3679 }
3680 
UnPackTo(BenchmarkResultT * _o,const flatbuffers::resolver_function_t * _resolver)3681 inline void BenchmarkResult::UnPackTo(BenchmarkResultT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3682   (void)_o;
3683   (void)_resolver;
3684   { auto _e = initialization_time_us(); if (_e) { _o->initialization_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->initialization_time_us[_i] = _e->Get(_i); } } }
3685   { auto _e = inference_time_us(); if (_e) { _o->inference_time_us.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inference_time_us[_i] = _e->Get(_i); } } }
3686   { auto _e = max_memory_kb(); _o->max_memory_kb = _e; }
3687   { auto _e = ok(); _o->ok = _e; }
3688   { auto _e = metrics(); if (_e) { _o->metrics.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->metrics[_i] = std::unique_ptr<tflite::BenchmarkMetricT>(_e->Get(_i)->UnPack(_resolver)); } } }
3689 }
3690 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)3691 inline flatbuffers::Offset<BenchmarkResult> BenchmarkResult::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3692   return CreateBenchmarkResult(_fbb, _o, _rehasher);
3693 }
3694 
CreateBenchmarkResult(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkResultT * _o,const flatbuffers::rehasher_function_t * _rehasher)3695 inline flatbuffers::Offset<BenchmarkResult> CreateBenchmarkResult(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkResultT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3696   (void)_rehasher;
3697   (void)_o;
3698   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkResultT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3699   auto _initialization_time_us = _o->initialization_time_us.size() ? _fbb.CreateVector(_o->initialization_time_us) : 0;
3700   auto _inference_time_us = _o->inference_time_us.size() ? _fbb.CreateVector(_o->inference_time_us) : 0;
3701   auto _max_memory_kb = _o->max_memory_kb;
3702   auto _ok = _o->ok;
3703   auto _metrics = _o->metrics.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::BenchmarkMetric>> (_o->metrics.size(), [](size_t i, _VectorArgs *__va) { return CreateBenchmarkMetric(*__va->__fbb, __va->__o->metrics[i].get(), __va->__rehasher); }, &_va ) : 0;
3704   return tflite::CreateBenchmarkResult(
3705       _fbb,
3706       _initialization_time_us,
3707       _inference_time_us,
3708       _max_memory_kb,
3709       _ok,
3710       _metrics);
3711 }
3712 
3713 
3714 inline bool operator==(const ErrorCodeT &lhs, const ErrorCodeT &rhs) {
3715   return
3716       (lhs.source == rhs.source) &&
3717       (lhs.tflite_error == rhs.tflite_error) &&
3718       (lhs.underlying_api_error == rhs.underlying_api_error);
3719 }
3720 
3721 inline bool operator!=(const ErrorCodeT &lhs, const ErrorCodeT &rhs) {
3722     return !(lhs == rhs);
3723 }
3724 
3725 
UnPack(const flatbuffers::resolver_function_t * _resolver)3726 inline ErrorCodeT *ErrorCode::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3727   auto _o = new ErrorCodeT();
3728   UnPackTo(_o, _resolver);
3729   return _o;
3730 }
3731 
UnPackTo(ErrorCodeT * _o,const flatbuffers::resolver_function_t * _resolver)3732 inline void ErrorCode::UnPackTo(ErrorCodeT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3733   (void)_o;
3734   (void)_resolver;
3735   { auto _e = source(); _o->source = _e; }
3736   { auto _e = tflite_error(); _o->tflite_error = _e; }
3737   { auto _e = underlying_api_error(); _o->underlying_api_error = _e; }
3738 }
3739 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)3740 inline flatbuffers::Offset<ErrorCode> ErrorCode::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3741   return CreateErrorCode(_fbb, _o, _rehasher);
3742 }
3743 
CreateErrorCode(flatbuffers::FlatBufferBuilder & _fbb,const ErrorCodeT * _o,const flatbuffers::rehasher_function_t * _rehasher)3744 inline flatbuffers::Offset<ErrorCode> CreateErrorCode(flatbuffers::FlatBufferBuilder &_fbb, const ErrorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3745   (void)_rehasher;
3746   (void)_o;
3747   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ErrorCodeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3748   auto _source = _o->source;
3749   auto _tflite_error = _o->tflite_error;
3750   auto _underlying_api_error = _o->underlying_api_error;
3751   return tflite::CreateErrorCode(
3752       _fbb,
3753       _source,
3754       _tflite_error,
3755       _underlying_api_error);
3756 }
3757 
3758 
3759 inline bool operator==(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs) {
3760   return
3761       (lhs.stage == rhs.stage) &&
3762       (lhs.exit_code == rhs.exit_code) &&
3763       (lhs.signal == rhs.signal) &&
3764       (lhs.error_code == rhs.error_code) &&
3765       (lhs.mini_benchmark_error_code == rhs.mini_benchmark_error_code);
3766 }
3767 
3768 inline bool operator!=(const BenchmarkErrorT &lhs, const BenchmarkErrorT &rhs) {
3769     return !(lhs == rhs);
3770 }
3771 
3772 
UnPack(const flatbuffers::resolver_function_t * _resolver)3773 inline BenchmarkErrorT *BenchmarkError::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3774   auto _o = new BenchmarkErrorT();
3775   UnPackTo(_o, _resolver);
3776   return _o;
3777 }
3778 
UnPackTo(BenchmarkErrorT * _o,const flatbuffers::resolver_function_t * _resolver)3779 inline void BenchmarkError::UnPackTo(BenchmarkErrorT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3780   (void)_o;
3781   (void)_resolver;
3782   { auto _e = stage(); _o->stage = _e; }
3783   { auto _e = exit_code(); _o->exit_code = _e; }
3784   { auto _e = signal(); _o->signal = _e; }
3785   { auto _e = error_code(); if (_e) { _o->error_code.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->error_code[_i] = std::unique_ptr<tflite::ErrorCodeT>(_e->Get(_i)->UnPack(_resolver)); } } }
3786   { auto _e = mini_benchmark_error_code(); _o->mini_benchmark_error_code = _e; }
3787 }
3788 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)3789 inline flatbuffers::Offset<BenchmarkError> BenchmarkError::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3790   return CreateBenchmarkError(_fbb, _o, _rehasher);
3791 }
3792 
CreateBenchmarkError(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkErrorT * _o,const flatbuffers::rehasher_function_t * _rehasher)3793 inline flatbuffers::Offset<BenchmarkError> CreateBenchmarkError(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkErrorT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3794   (void)_rehasher;
3795   (void)_o;
3796   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkErrorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3797   auto _stage = _o->stage;
3798   auto _exit_code = _o->exit_code;
3799   auto _signal = _o->signal;
3800   auto _error_code = _o->error_code.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::ErrorCode>> (_o->error_code.size(), [](size_t i, _VectorArgs *__va) { return CreateErrorCode(*__va->__fbb, __va->__o->error_code[i].get(), __va->__rehasher); }, &_va ) : 0;
3801   auto _mini_benchmark_error_code = _o->mini_benchmark_error_code;
3802   return tflite::CreateBenchmarkError(
3803       _fbb,
3804       _stage,
3805       _exit_code,
3806       _signal,
3807       _error_code,
3808       _mini_benchmark_error_code);
3809 }
3810 
3811 
3812 inline bool operator==(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs) {
3813   return
3814       ((lhs.tflite_settings == rhs.tflite_settings) || (lhs.tflite_settings && rhs.tflite_settings && *lhs.tflite_settings == *rhs.tflite_settings)) &&
3815       (lhs.event_type == rhs.event_type) &&
3816       ((lhs.result == rhs.result) || (lhs.result && rhs.result && *lhs.result == *rhs.result)) &&
3817       ((lhs.error == rhs.error) || (lhs.error && rhs.error && *lhs.error == *rhs.error)) &&
3818       (lhs.boottime_us == rhs.boottime_us) &&
3819       (lhs.wallclock_us == rhs.wallclock_us);
3820 }
3821 
3822 inline bool operator!=(const BenchmarkEventT &lhs, const BenchmarkEventT &rhs) {
3823     return !(lhs == rhs);
3824 }
3825 
3826 
UnPack(const flatbuffers::resolver_function_t * _resolver)3827 inline BenchmarkEventT *BenchmarkEvent::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3828   auto _o = new BenchmarkEventT();
3829   UnPackTo(_o, _resolver);
3830   return _o;
3831 }
3832 
UnPackTo(BenchmarkEventT * _o,const flatbuffers::resolver_function_t * _resolver)3833 inline void BenchmarkEvent::UnPackTo(BenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3834   (void)_o;
3835   (void)_resolver;
3836   { auto _e = tflite_settings(); if (_e) _o->tflite_settings = std::unique_ptr<tflite::TFLiteSettingsT>(_e->UnPack(_resolver)); }
3837   { auto _e = event_type(); _o->event_type = _e; }
3838   { auto _e = result(); if (_e) _o->result = std::unique_ptr<tflite::BenchmarkResultT>(_e->UnPack(_resolver)); }
3839   { auto _e = error(); if (_e) _o->error = std::unique_ptr<tflite::BenchmarkErrorT>(_e->UnPack(_resolver)); }
3840   { auto _e = boottime_us(); _o->boottime_us = _e; }
3841   { auto _e = wallclock_us(); _o->wallclock_us = _e; }
3842 }
3843 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)3844 inline flatbuffers::Offset<BenchmarkEvent> BenchmarkEvent::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3845   return CreateBenchmarkEvent(_fbb, _o, _rehasher);
3846 }
3847 
CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)3848 inline flatbuffers::Offset<BenchmarkEvent> CreateBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3849   (void)_rehasher;
3850   (void)_o;
3851   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkEventT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3852   auto _tflite_settings = _o->tflite_settings ? CreateTFLiteSettings(_fbb, _o->tflite_settings.get(), _rehasher) : 0;
3853   auto _event_type = _o->event_type;
3854   auto _result = _o->result ? CreateBenchmarkResult(_fbb, _o->result.get(), _rehasher) : 0;
3855   auto _error = _o->error ? CreateBenchmarkError(_fbb, _o->error.get(), _rehasher) : 0;
3856   auto _boottime_us = _o->boottime_us;
3857   auto _wallclock_us = _o->wallclock_us;
3858   return tflite::CreateBenchmarkEvent(
3859       _fbb,
3860       _tflite_settings,
3861       _event_type,
3862       _result,
3863       _error,
3864       _boottime_us,
3865       _wallclock_us);
3866 }
3867 
3868 
3869 inline bool operator==(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs) {
3870   return
3871       (lhs.number_of_source_events == rhs.number_of_source_events) &&
3872       ((lhs.min_latency_event == rhs.min_latency_event) || (lhs.min_latency_event && rhs.min_latency_event && *lhs.min_latency_event == *rhs.min_latency_event)) &&
3873       (lhs.min_inference_time_us == rhs.min_inference_time_us);
3874 }
3875 
3876 inline bool operator!=(const BestAccelerationDecisionT &lhs, const BestAccelerationDecisionT &rhs) {
3877     return !(lhs == rhs);
3878 }
3879 
3880 
UnPack(const flatbuffers::resolver_function_t * _resolver)3881 inline BestAccelerationDecisionT *BestAccelerationDecision::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3882   auto _o = new BestAccelerationDecisionT();
3883   UnPackTo(_o, _resolver);
3884   return _o;
3885 }
3886 
UnPackTo(BestAccelerationDecisionT * _o,const flatbuffers::resolver_function_t * _resolver)3887 inline void BestAccelerationDecision::UnPackTo(BestAccelerationDecisionT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3888   (void)_o;
3889   (void)_resolver;
3890   { auto _e = number_of_source_events(); _o->number_of_source_events = _e; }
3891   { auto _e = min_latency_event(); if (_e) _o->min_latency_event = std::unique_ptr<tflite::BenchmarkEventT>(_e->UnPack(_resolver)); }
3892   { auto _e = min_inference_time_us(); _o->min_inference_time_us = _e; }
3893 }
3894 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BestAccelerationDecisionT * _o,const flatbuffers::rehasher_function_t * _rehasher)3895 inline flatbuffers::Offset<BestAccelerationDecision> BestAccelerationDecision::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3896   return CreateBestAccelerationDecision(_fbb, _o, _rehasher);
3897 }
3898 
CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder & _fbb,const BestAccelerationDecisionT * _o,const flatbuffers::rehasher_function_t * _rehasher)3899 inline flatbuffers::Offset<BestAccelerationDecision> CreateBestAccelerationDecision(flatbuffers::FlatBufferBuilder &_fbb, const BestAccelerationDecisionT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3900   (void)_rehasher;
3901   (void)_o;
3902   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BestAccelerationDecisionT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3903   auto _number_of_source_events = _o->number_of_source_events;
3904   auto _min_latency_event = _o->min_latency_event ? CreateBenchmarkEvent(_fbb, _o->min_latency_event.get(), _rehasher) : 0;
3905   auto _min_inference_time_us = _o->min_inference_time_us;
3906   return tflite::CreateBestAccelerationDecision(
3907       _fbb,
3908       _number_of_source_events,
3909       _min_latency_event,
3910       _min_inference_time_us);
3911 }
3912 
3913 
3914 inline bool operator==(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs) {
3915   return
3916       (lhs.initialization_status == rhs.initialization_status);
3917 }
3918 
3919 inline bool operator!=(const BenchmarkInitializationFailureT &lhs, const BenchmarkInitializationFailureT &rhs) {
3920     return !(lhs == rhs);
3921 }
3922 
3923 
UnPack(const flatbuffers::resolver_function_t * _resolver)3924 inline BenchmarkInitializationFailureT *BenchmarkInitializationFailure::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3925   auto _o = new BenchmarkInitializationFailureT();
3926   UnPackTo(_o, _resolver);
3927   return _o;
3928 }
3929 
UnPackTo(BenchmarkInitializationFailureT * _o,const flatbuffers::resolver_function_t * _resolver)3930 inline void BenchmarkInitializationFailure::UnPackTo(BenchmarkInitializationFailureT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3931   (void)_o;
3932   (void)_resolver;
3933   { auto _e = initialization_status(); _o->initialization_status = _e; }
3934 }
3935 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkInitializationFailureT * _o,const flatbuffers::rehasher_function_t * _rehasher)3936 inline flatbuffers::Offset<BenchmarkInitializationFailure> BenchmarkInitializationFailure::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3937   return CreateBenchmarkInitializationFailure(_fbb, _o, _rehasher);
3938 }
3939 
CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkInitializationFailureT * _o,const flatbuffers::rehasher_function_t * _rehasher)3940 inline flatbuffers::Offset<BenchmarkInitializationFailure> CreateBenchmarkInitializationFailure(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkInitializationFailureT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3941   (void)_rehasher;
3942   (void)_o;
3943   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkInitializationFailureT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3944   auto _initialization_status = _o->initialization_status;
3945   return tflite::CreateBenchmarkInitializationFailure(
3946       _fbb,
3947       _initialization_status);
3948 }
3949 
3950 
3951 inline bool operator==(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs) {
3952   return
3953       (lhs.is_log_flushing_event == rhs.is_log_flushing_event) &&
3954       ((lhs.best_acceleration_decision == rhs.best_acceleration_decision) || (lhs.best_acceleration_decision && rhs.best_acceleration_decision && *lhs.best_acceleration_decision == *rhs.best_acceleration_decision)) &&
3955       ((lhs.initialization_failure == rhs.initialization_failure) || (lhs.initialization_failure && rhs.initialization_failure && *lhs.initialization_failure == *rhs.initialization_failure)) &&
3956       ((lhs.benchmark_event == rhs.benchmark_event) || (lhs.benchmark_event && rhs.benchmark_event && *lhs.benchmark_event == *rhs.benchmark_event));
3957 }
3958 
3959 inline bool operator!=(const MiniBenchmarkEventT &lhs, const MiniBenchmarkEventT &rhs) {
3960     return !(lhs == rhs);
3961 }
3962 
3963 
UnPack(const flatbuffers::resolver_function_t * _resolver)3964 inline MiniBenchmarkEventT *MiniBenchmarkEvent::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
3965   auto _o = new MiniBenchmarkEventT();
3966   UnPackTo(_o, _resolver);
3967   return _o;
3968 }
3969 
UnPackTo(MiniBenchmarkEventT * _o,const flatbuffers::resolver_function_t * _resolver)3970 inline void MiniBenchmarkEvent::UnPackTo(MiniBenchmarkEventT *_o, const flatbuffers::resolver_function_t *_resolver) const {
3971   (void)_o;
3972   (void)_resolver;
3973   { auto _e = is_log_flushing_event(); _o->is_log_flushing_event = _e; }
3974   { auto _e = best_acceleration_decision(); if (_e) _o->best_acceleration_decision = std::unique_ptr<tflite::BestAccelerationDecisionT>(_e->UnPack(_resolver)); }
3975   { auto _e = initialization_failure(); if (_e) _o->initialization_failure = std::unique_ptr<tflite::BenchmarkInitializationFailureT>(_e->UnPack(_resolver)); }
3976   { auto _e = benchmark_event(); if (_e) _o->benchmark_event = std::unique_ptr<tflite::BenchmarkEventT>(_e->UnPack(_resolver)); }
3977 }
3978 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const MiniBenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)3979 inline flatbuffers::Offset<MiniBenchmarkEvent> MiniBenchmarkEvent::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
3980   return CreateMiniBenchmarkEvent(_fbb, _o, _rehasher);
3981 }
3982 
CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder & _fbb,const MiniBenchmarkEventT * _o,const flatbuffers::rehasher_function_t * _rehasher)3983 inline flatbuffers::Offset<MiniBenchmarkEvent> CreateMiniBenchmarkEvent(flatbuffers::FlatBufferBuilder &_fbb, const MiniBenchmarkEventT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
3984   (void)_rehasher;
3985   (void)_o;
3986   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MiniBenchmarkEventT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
3987   auto _is_log_flushing_event = _o->is_log_flushing_event;
3988   auto _best_acceleration_decision = _o->best_acceleration_decision ? CreateBestAccelerationDecision(_fbb, _o->best_acceleration_decision.get(), _rehasher) : 0;
3989   auto _initialization_failure = _o->initialization_failure ? CreateBenchmarkInitializationFailure(_fbb, _o->initialization_failure.get(), _rehasher) : 0;
3990   auto _benchmark_event = _o->benchmark_event ? CreateBenchmarkEvent(_fbb, _o->benchmark_event.get(), _rehasher) : 0;
3991   return tflite::CreateMiniBenchmarkEvent(
3992       _fbb,
3993       _is_log_flushing_event,
3994       _best_acceleration_decision,
3995       _initialization_failure,
3996       _benchmark_event);
3997 }
3998 
3999 
4000 inline bool operator==(const ModelFileT &lhs, const ModelFileT &rhs) {
4001   return
4002       (lhs.filename == rhs.filename) &&
4003       (lhs.fd == rhs.fd) &&
4004       (lhs.offset == rhs.offset) &&
4005       (lhs.length == rhs.length);
4006 }
4007 
4008 inline bool operator!=(const ModelFileT &lhs, const ModelFileT &rhs) {
4009     return !(lhs == rhs);
4010 }
4011 
4012 
UnPack(const flatbuffers::resolver_function_t * _resolver)4013 inline ModelFileT *ModelFile::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4014   auto _o = new ModelFileT();
4015   UnPackTo(_o, _resolver);
4016   return _o;
4017 }
4018 
UnPackTo(ModelFileT * _o,const flatbuffers::resolver_function_t * _resolver)4019 inline void ModelFile::UnPackTo(ModelFileT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4020   (void)_o;
4021   (void)_resolver;
4022   { auto _e = filename(); if (_e) _o->filename = _e->str(); }
4023   { auto _e = fd(); _o->fd = _e; }
4024   { auto _e = offset(); _o->offset = _e; }
4025   { auto _e = length(); _o->length = _e; }
4026 }
4027 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const ModelFileT * _o,const flatbuffers::rehasher_function_t * _rehasher)4028 inline flatbuffers::Offset<ModelFile> ModelFile::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4029   return CreateModelFile(_fbb, _o, _rehasher);
4030 }
4031 
CreateModelFile(flatbuffers::FlatBufferBuilder & _fbb,const ModelFileT * _o,const flatbuffers::rehasher_function_t * _rehasher)4032 inline flatbuffers::Offset<ModelFile> CreateModelFile(flatbuffers::FlatBufferBuilder &_fbb, const ModelFileT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4033   (void)_rehasher;
4034   (void)_o;
4035   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ModelFileT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4036   auto _filename = _o->filename.empty() ? 0 : _fbb.CreateString(_o->filename);
4037   auto _fd = _o->fd;
4038   auto _offset = _o->offset;
4039   auto _length = _o->length;
4040   return tflite::CreateModelFile(
4041       _fbb,
4042       _filename,
4043       _fd,
4044       _offset,
4045       _length);
4046 }
4047 
4048 
4049 inline bool operator==(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs) {
4050   return
4051       (lhs.storage_file_path == rhs.storage_file_path) &&
4052       (lhs.data_directory_path == rhs.data_directory_path);
4053 }
4054 
4055 inline bool operator!=(const BenchmarkStoragePathsT &lhs, const BenchmarkStoragePathsT &rhs) {
4056     return !(lhs == rhs);
4057 }
4058 
4059 
UnPack(const flatbuffers::resolver_function_t * _resolver)4060 inline BenchmarkStoragePathsT *BenchmarkStoragePaths::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4061   auto _o = new BenchmarkStoragePathsT();
4062   UnPackTo(_o, _resolver);
4063   return _o;
4064 }
4065 
UnPackTo(BenchmarkStoragePathsT * _o,const flatbuffers::resolver_function_t * _resolver)4066 inline void BenchmarkStoragePaths::UnPackTo(BenchmarkStoragePathsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4067   (void)_o;
4068   (void)_resolver;
4069   { auto _e = storage_file_path(); if (_e) _o->storage_file_path = _e->str(); }
4070   { auto _e = data_directory_path(); if (_e) _o->data_directory_path = _e->str(); }
4071 }
4072 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkStoragePathsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4073 inline flatbuffers::Offset<BenchmarkStoragePaths> BenchmarkStoragePaths::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4074   return CreateBenchmarkStoragePaths(_fbb, _o, _rehasher);
4075 }
4076 
CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder & _fbb,const BenchmarkStoragePathsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4077 inline flatbuffers::Offset<BenchmarkStoragePaths> CreateBenchmarkStoragePaths(flatbuffers::FlatBufferBuilder &_fbb, const BenchmarkStoragePathsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4078   (void)_rehasher;
4079   (void)_o;
4080   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BenchmarkStoragePathsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4081   auto _storage_file_path = _o->storage_file_path.empty() ? 0 : _fbb.CreateString(_o->storage_file_path);
4082   auto _data_directory_path = _o->data_directory_path.empty() ? 0 : _fbb.CreateString(_o->data_directory_path);
4083   return tflite::CreateBenchmarkStoragePaths(
4084       _fbb,
4085       _storage_file_path,
4086       _data_directory_path);
4087 }
4088 
4089 
4090 inline bool operator==(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs) {
4091   return
4092       (lhs.settings_to_test == rhs.settings_to_test) &&
4093       ((lhs.model_file == rhs.model_file) || (lhs.model_file && rhs.model_file && *lhs.model_file == *rhs.model_file)) &&
4094       ((lhs.storage_paths == rhs.storage_paths) || (lhs.storage_paths && rhs.storage_paths && *lhs.storage_paths == *rhs.storage_paths));
4095 }
4096 
4097 inline bool operator!=(const MinibenchmarkSettingsT &lhs, const MinibenchmarkSettingsT &rhs) {
4098     return !(lhs == rhs);
4099 }
4100 
4101 
UnPack(const flatbuffers::resolver_function_t * _resolver)4102 inline MinibenchmarkSettingsT *MinibenchmarkSettings::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
4103   auto _o = new MinibenchmarkSettingsT();
4104   UnPackTo(_o, _resolver);
4105   return _o;
4106 }
4107 
UnPackTo(MinibenchmarkSettingsT * _o,const flatbuffers::resolver_function_t * _resolver)4108 inline void MinibenchmarkSettings::UnPackTo(MinibenchmarkSettingsT *_o, const flatbuffers::resolver_function_t *_resolver) const {
4109   (void)_o;
4110   (void)_resolver;
4111   { auto _e = settings_to_test(); if (_e) { _o->settings_to_test.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->settings_to_test[_i] = std::unique_ptr<tflite::TFLiteSettingsT>(_e->Get(_i)->UnPack(_resolver)); } } }
4112   { auto _e = model_file(); if (_e) _o->model_file = std::unique_ptr<tflite::ModelFileT>(_e->UnPack(_resolver)); }
4113   { auto _e = storage_paths(); if (_e) _o->storage_paths = std::unique_ptr<tflite::BenchmarkStoragePathsT>(_e->UnPack(_resolver)); }
4114 }
4115 
Pack(flatbuffers::FlatBufferBuilder & _fbb,const MinibenchmarkSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4116 inline flatbuffers::Offset<MinibenchmarkSettings> MinibenchmarkSettings::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
4117   return CreateMinibenchmarkSettings(_fbb, _o, _rehasher);
4118 }
4119 
CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder & _fbb,const MinibenchmarkSettingsT * _o,const flatbuffers::rehasher_function_t * _rehasher)4120 inline flatbuffers::Offset<MinibenchmarkSettings> CreateMinibenchmarkSettings(flatbuffers::FlatBufferBuilder &_fbb, const MinibenchmarkSettingsT *_o, const flatbuffers::rehasher_function_t *_rehasher) {
4121   (void)_rehasher;
4122   (void)_o;
4123   struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MinibenchmarkSettingsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
4124   auto _settings_to_test = _o->settings_to_test.size() ? _fbb.CreateVector<flatbuffers::Offset<tflite::TFLiteSettings>> (_o->settings_to_test.size(), [](size_t i, _VectorArgs *__va) { return CreateTFLiteSettings(*__va->__fbb, __va->__o->settings_to_test[i].get(), __va->__rehasher); }, &_va ) : 0;
4125   auto _model_file = _o->model_file ? CreateModelFile(_fbb, _o->model_file.get(), _rehasher) : 0;
4126   auto _storage_paths = _o->storage_paths ? CreateBenchmarkStoragePaths(_fbb, _o->storage_paths.get(), _rehasher) : 0;
4127   return tflite::CreateMinibenchmarkSettings(
4128       _fbb,
4129       _settings_to_test,
4130       _model_file,
4131       _storage_paths);
4132 }
4133 
4134 }  // namespace tflite
4135 
4136 #endif  // FLATBUFFERS_GENERATED_CONFIGURATIONFORGENERATION_TFLITE_H_
4137