• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 // Contains all the entry points to the C Neural Networks API.
18 // We do basic validation of the operands and then call the class
19 // that implements the functionality.
20 
21 #define LOG_TAG "NeuralNetworks"
22 
23 #include "NeuralNetworks.h"
24 
25 #include <ControlFlow.h>
26 #include <LegacyUtils.h>
27 #include <MetaModel.h>
28 #include <Tracing.h>
29 #include <nnapi/Types.h>
30 
31 #include <algorithm>
32 #include <cstddef>
33 #include <memory>
34 #include <utility>
35 #include <vector>
36 
37 #include "BurstBuilder.h"
38 #include "CompilationBuilder.h"
39 #include "Event.h"
40 #include "ExecutionBuilder.h"
41 #include "ExecutionCallback.h"
42 #include "FeatureLevel.h"
43 #include "Manager.h"
44 #include "Memory.h"
45 #include "ModelBuilder.h"
46 #include "NeuralNetworksExtensions.h"
47 #include "NeuralNetworksOEM.h"
48 
49 #ifdef NN_COMPATIBILITY_LIBRARY_BUILD
50 #include "NeuralNetworksSupportLibraryImpl.h"
51 #endif  // NN_COMPATIBILITY_LIBRARY_BUILD
52 
53 using namespace android::nn;
54 
55 // Make sure the constants defined in the header files have not changed values.
56 // IMPORTANT: When adding new values, update kNumberOfDataTypes or kNumberOfDataTypesOEM
57 // in Utils.h.
58 static_assert(ANEURALNETWORKS_FLOAT32 == 0, "ANEURALNETWORKS_FLOAT32 has changed");
59 static_assert(ANEURALNETWORKS_INT32 == 1, "ANEURALNETWORKS_INT32 has changed");
60 static_assert(ANEURALNETWORKS_UINT32 == 2, "ANEURALNETWORKS_UINT32 has changed");
61 static_assert(ANEURALNETWORKS_TENSOR_FLOAT32 == 3, "ANEURALNETWORKS_TENSOR_FLOAT32 has changed");
62 static_assert(ANEURALNETWORKS_TENSOR_INT32 == 4, "ANEURALNETWORKS_TENSOR_INT32 has changed");
63 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_ASYMM == 5,
64               "ANEURALNETWORKS_TENSOR_QUANT8_ASYMM has changed");
65 static_assert(ANEURALNETWORKS_BOOL == 6, "ANEURALNETWORKS_BOOL has changed");
66 static_assert(ANEURALNETWORKS_TENSOR_QUANT16_SYMM == 7,
67               "ANEURALNETWORKS_TENSOR_QUANT16_SYMM has changed");
68 static_assert(ANEURALNETWORKS_TENSOR_FLOAT16 == 8, "ANEURALNETWORKS_TENSOR_FLOAT16 has changed");
69 static_assert(ANEURALNETWORKS_TENSOR_BOOL8 == 9, "ANEURALNETWORKS_TENSOR_BOOL8 has changed");
70 static_assert(ANEURALNETWORKS_FLOAT16 == 10, "ANEURALNETWORKS_FLOAT16 has changed");
71 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL == 11,
72               "ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL has changed");
73 static_assert(ANEURALNETWORKS_TENSOR_QUANT16_ASYMM == 12,
74               "ANEURALNETWORKS_TENSOR_QUANT16_ASYMM has changed");
75 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_SYMM == 13,
76               "ANEURALNETWORKS_TENSOR_QUANT8_SYMM has changed");
77 static_assert(ANEURALNETWORKS_OEM_SCALAR == 10000, "ANEURALNETWORKS_OEM_SCALAR has changed");
78 static_assert(ANEURALNETWORKS_TENSOR_OEM_BYTE == 10001,
79               "ANEURALNETWORKS_TENSOR_OEM_BYTE has changed");
80 
81 // IMPORTANT: When adding new values, update kNumberOfOperationTypes or
82 // kNumberOfOperationTypesOEMin Utils.h.
83 static_assert(ANEURALNETWORKS_ADD == 0, "ANEURALNETWORKS_ADD has changed");
84 static_assert(ANEURALNETWORKS_AVERAGE_POOL_2D == 1, "ANEURALNETWORKS_AVERAGE_POOL_2D has changed");
85 static_assert(ANEURALNETWORKS_CONCATENATION == 2, "ANEURALNETWORKS_CONCATENATION has changed");
86 static_assert(ANEURALNETWORKS_CONV_2D == 3, "ANEURALNETWORKS_CONV_2D has changed");
87 static_assert(ANEURALNETWORKS_DEPTHWISE_CONV_2D == 4,
88               "ANEURALNETWORKS_DEPTHWISE_CONV_2D has changed");
89 static_assert(ANEURALNETWORKS_DEPTH_TO_SPACE == 5, "ANEURALNETWORKS_DEPTH_TO_SPACE has changed");
90 static_assert(ANEURALNETWORKS_DEQUANTIZE == 6, "ANEURALNETWORKS_DEQUANTIZE has changed");
91 static_assert(ANEURALNETWORKS_EMBEDDING_LOOKUP == 7,
92               "ANEURALNETWORKS_EMBEDDING_LOOKUP has changed");
93 static_assert(ANEURALNETWORKS_FLOOR == 8, "ANEURALNETWORKS_FLOOR has changed");
94 static_assert(ANEURALNETWORKS_FULLY_CONNECTED == 9, "ANEURALNETWORKS_FULLY_CONNECTED has changed");
95 static_assert(ANEURALNETWORKS_HASHTABLE_LOOKUP == 10,
96               "ANEURALNETWORKS_HASHTABLE_LOOKUP has changed");
97 static_assert(ANEURALNETWORKS_L2_NORMALIZATION == 11,
98               "ANEURALNETWORKS_L2_NORMALIZATION has changed");
99 static_assert(ANEURALNETWORKS_L2_POOL_2D == 12, "ANEURALNETWORKS_L2_POOL has changed");
100 static_assert(ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION == 13,
101               "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION has changed");
102 static_assert(ANEURALNETWORKS_LOGISTIC == 14, "ANEURALNETWORKS_LOGISTIC has changed");
103 static_assert(ANEURALNETWORKS_LSH_PROJECTION == 15, "ANEURALNETWORKS_LSH_PROJECTION has changed");
104 static_assert(ANEURALNETWORKS_LSTM == 16, "ANEURALNETWORKS_LSTM has changed");
105 static_assert(ANEURALNETWORKS_MAX_POOL_2D == 17, "ANEURALNETWORKS_MAX_POOL has changed");
106 static_assert(ANEURALNETWORKS_MUL == 18, "ANEURALNETWORKS_MUL has changed");
107 static_assert(ANEURALNETWORKS_RELU == 19, "ANEURALNETWORKS_RELU has changed");
108 static_assert(ANEURALNETWORKS_RELU1 == 20, "ANEURALNETWORKS_RELU1 has changed");
109 static_assert(ANEURALNETWORKS_RELU6 == 21, "ANEURALNETWORKS_RELU6 has changed");
110 static_assert(ANEURALNETWORKS_RESHAPE == 22, "ANEURALNETWORKS_RESHAPE has changed");
111 static_assert(ANEURALNETWORKS_RESIZE_BILINEAR == 23, "ANEURALNETWORKS_RESIZE_BILINEAR has changed");
112 static_assert(ANEURALNETWORKS_RNN == 24, "ANEURALNETWORKS_RNN has changed");
113 static_assert(ANEURALNETWORKS_SOFTMAX == 25, "ANEURALNETWORKS_SOFTMAX has changed");
114 static_assert(ANEURALNETWORKS_SPACE_TO_DEPTH == 26, "ANEURALNETWORKS_SPACE_TO_DEPTH has changed");
115 static_assert(ANEURALNETWORKS_SVDF == 27, "ANEURALNETWORKS_SVDF has changed");
116 static_assert(ANEURALNETWORKS_TANH == 28, "ANEURALNETWORKS_TANH has changed");
117 
118 static_assert(ANEURALNETWORKS_BATCH_TO_SPACE_ND == 29,
119               "ANEURALNETWORKS_BATCH_TO_SPACE_ND has changed");
120 static_assert(ANEURALNETWORKS_DIV == 30, "ANEURALNETWORKS_DIV has changed");
121 static_assert(ANEURALNETWORKS_MEAN == 31, "ANEURALNETWORKS_MEAN has changed");
122 static_assert(ANEURALNETWORKS_PAD == 32, "ANEURALNETWORKS_PAD has changed");
123 static_assert(ANEURALNETWORKS_SPACE_TO_BATCH_ND == 33,
124               "ANEURALNETWORKS_SPACE_TO_BATCH_ND has changed");
125 static_assert(ANEURALNETWORKS_SQUEEZE == 34, "ANEURALNETWORKS_SQUEEZE has changed");
126 static_assert(ANEURALNETWORKS_STRIDED_SLICE == 35, "ANEURALNETWORKS_STRIDED_SLICE has changed");
127 static_assert(ANEURALNETWORKS_SUB == 36, "ANEURALNETWORKS_TANH has changed");
128 static_assert(ANEURALNETWORKS_TRANSPOSE == 37, "ANEURALNETWORKS_TRANSPOSE has changed");
129 
130 static_assert(ANEURALNETWORKS_ABS == 38, "ANEURALNETWORKS_ABS has changed");
131 static_assert(ANEURALNETWORKS_ARGMAX == 39, "ANEURALNETWORKS_ARGMAX has changed");
132 static_assert(ANEURALNETWORKS_ARGMIN == 40, "ANEURALNETWORKS_ARGMIN has changed");
133 static_assert(ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM == 41,
134               "ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM has changed");
135 static_assert(ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM == 42,
136               "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM has changed");
137 static_assert(ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN == 43,
138               "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN has changed");
139 static_assert(ANEURALNETWORKS_BOX_WITH_NMS_LIMIT == 44,
140               "ANEURALNETWORKS_BOX_WITH_NMS_LIMIT has changed");
141 static_assert(ANEURALNETWORKS_CAST == 45, "ANEURALNETWORKS_CAST has changed");
142 static_assert(ANEURALNETWORKS_CHANNEL_SHUFFLE == 46, "ANEURALNETWORKS_CHANNEL_SHUFFLE has changed");
143 static_assert(ANEURALNETWORKS_DETECTION_POSTPROCESSING == 47,
144               "ANEURALNETWORKS_DETECTION_POSTPROCESSING has changed");
145 static_assert(ANEURALNETWORKS_EQUAL == 48, "ANEURALNETWORKS_EQUAL has changed");
146 static_assert(ANEURALNETWORKS_EXP == 49, "ANEURALNETWORKS_EXP has changed");
147 static_assert(ANEURALNETWORKS_EXPAND_DIMS == 50, "ANEURALNETWORKS_EXPAND_DIMS has changed");
148 static_assert(ANEURALNETWORKS_GATHER == 51, "ANEURALNETWORKS_GATHER has changed");
149 static_assert(ANEURALNETWORKS_GENERATE_PROPOSALS == 52,
150               "ANEURALNETWORKS_GENERATE_PROPOSALS has changed");
151 static_assert(ANEURALNETWORKS_GREATER == 53, "ANEURALNETWORKS_GREATER has changed");
152 static_assert(ANEURALNETWORKS_GREATER_EQUAL == 54, "ANEURALNETWORKS_GREATER_EQUAL has changed");
153 static_assert(ANEURALNETWORKS_GROUPED_CONV_2D == 55, "ANEURALNETWORKS_GROUPED_CONV_2D has changed");
154 static_assert(ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT == 56,
155               "ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT has changed");
156 static_assert(ANEURALNETWORKS_INSTANCE_NORMALIZATION == 57,
157               "ANEURALNETWORKS_INSTANCE_NORMALIZATION has changed");
158 static_assert(ANEURALNETWORKS_LESS == 58, "ANEURALNETWORKS_LESS has changed");
159 static_assert(ANEURALNETWORKS_LESS_EQUAL == 59, "ANEURALNETWORKS_LESS_EQUAL has changed");
160 static_assert(ANEURALNETWORKS_LOG == 60, "ANEURALNETWORKS_LOG has changed");
161 static_assert(ANEURALNETWORKS_LOGICAL_AND == 61, "ANEURALNETWORKS_LOGICAL_AND has changed");
162 static_assert(ANEURALNETWORKS_LOGICAL_NOT == 62, "ANEURALNETWORKS_LOGICAL_NOT has changed");
163 static_assert(ANEURALNETWORKS_LOGICAL_OR == 63, "ANEURALNETWORKS_LOGICAL_OR has changed");
164 static_assert(ANEURALNETWORKS_LOG_SOFTMAX == 64, "ANEURALNETWORKS_LOG_SOFTMAX has changed");
165 static_assert(ANEURALNETWORKS_MAXIMUM == 65, "ANEURALNETWORKS_MAXIMUM has changed");
166 static_assert(ANEURALNETWORKS_MINIMUM == 66, "ANEURALNETWORKS_MINIMUM has changed");
167 static_assert(ANEURALNETWORKS_NEG == 67, "ANEURALNETWORKS_NEG has changed");
168 static_assert(ANEURALNETWORKS_NOT_EQUAL == 68, "ANEURALNETWORKS_NOT_EQUAL has changed");
169 static_assert(ANEURALNETWORKS_PAD_V2 == 69, "ANEURALNETWORKS_PAD_V2 has changed");
170 static_assert(ANEURALNETWORKS_POW == 70, "ANEURALNETWORKS_POW has changed");
171 static_assert(ANEURALNETWORKS_PRELU == 71, "ANEURALNETWORKS_PRELU has changed");
172 static_assert(ANEURALNETWORKS_QUANTIZE == 72, "ANEURALNETWORKS_QUANTIZE has changed");
173 static_assert(ANEURALNETWORKS_QUANTIZED_16BIT_LSTM == 73,
174               "ANEURALNETWORKS_QUANTIZED_16BIT_LSTM has changed");
175 static_assert(ANEURALNETWORKS_RANDOM_MULTINOMIAL == 74,
176               "ANEURALNETWORKS_RANDOM_MULTINOMIAL has changed");
177 static_assert(ANEURALNETWORKS_REDUCE_ALL == 75, "ANEURALNETWORKS_REDUCE_ALL has changed");
178 static_assert(ANEURALNETWORKS_REDUCE_ANY == 76, "ANEURALNETWORKS_REDUCE_ANY has changed");
179 static_assert(ANEURALNETWORKS_REDUCE_MAX == 77, "ANEURALNETWORKS_REDUCE_MAX has changed");
180 static_assert(ANEURALNETWORKS_REDUCE_MIN == 78, "ANEURALNETWORKS_REDUCE_MIN has changed");
181 static_assert(ANEURALNETWORKS_REDUCE_PROD == 79, "ANEURALNETWORKS_REDUCE_PROD has changed");
182 static_assert(ANEURALNETWORKS_REDUCE_SUM == 80, "ANEURALNETWORKS_REDUCE_SUM has changed");
183 static_assert(ANEURALNETWORKS_ROI_ALIGN == 81, "ANEURALNETWORKS_ROI_ALIGN has changed");
184 static_assert(ANEURALNETWORKS_ROI_POOLING == 82, "ANEURALNETWORKS_ROI_POOLING has changed");
185 static_assert(ANEURALNETWORKS_RSQRT == 83, "ANEURALNETWORKS_RSQRT has changed");
186 static_assert(ANEURALNETWORKS_SELECT == 84, "ANEURALNETWORKS_SELECT has changed");
187 static_assert(ANEURALNETWORKS_SIN == 85, "ANEURALNETWORKS_SIN has changed");
188 static_assert(ANEURALNETWORKS_SLICE == 86, "ANEURALNETWORKS_SLICE has changed");
189 static_assert(ANEURALNETWORKS_SPLIT == 87, "ANEURALNETWORKS_SPLIT has changed");
190 static_assert(ANEURALNETWORKS_SQRT == 88, "ANEURALNETWORKS_SQRT has changed");
191 static_assert(ANEURALNETWORKS_TILE == 89, "ANEURALNETWORKS_TILE has changed");
192 static_assert(ANEURALNETWORKS_TOPK_V2 == 90, "ANEURALNETWORKS_TOPK_V2 has changed");
193 static_assert(ANEURALNETWORKS_TRANSPOSE_CONV_2D == 91,
194               "ANEURALNETWORKS_TRANSPOSE_CONV_2D has changed");
195 static_assert(ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM == 92,
196               "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM has changed");
197 static_assert(ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN == 93,
198               "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN has changed");
199 static_assert(ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR == 94,
200               "ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR has changed");
201 static_assert(ANEURALNETWORKS_QUANTIZED_LSTM == 95, "ANEURALNETWORKS_QUANTIZED_LSTM has changed");
202 static_assert(ANEURALNETWORKS_IF == 96, "ANEURALNETWORKS_IF has changed");
203 static_assert(ANEURALNETWORKS_WHILE == 97, "ANEURALNETWORKS_WHILE has changed");
204 static_assert(ANEURALNETWORKS_ELU == 98, "ANEURALNETWORKS_ELU has changed");
205 static_assert(ANEURALNETWORKS_HARD_SWISH == 99, "ANEURALNETWORKS_HARD_SWISH has changed");
206 static_assert(ANEURALNETWORKS_FILL == 100, "ANEURALNETWORKS_FILL has changed");
207 static_assert(ANEURALNETWORKS_RANK == 101, "ANEURALNETWORKS_RANK has changed");
208 
209 static_assert(ANEURALNETWORKS_OEM_OPERATION == 10000, "ANEURALNETWORKS_OEM_OPERATION has changed");
210 
211 static_assert(ANEURALNETWORKS_FUSED_NONE == 0, "ANEURALNETWORKS_FUSED_NONE has changed");
212 static_assert(ANEURALNETWORKS_FUSED_RELU == 1, "ANEURALNETWORKS_FUSED_RELU has changed");
213 static_assert(ANEURALNETWORKS_FUSED_RELU1 == 2, "ANEURALNETWORKS_FUSED_RELU1 has changed");
214 static_assert(ANEURALNETWORKS_FUSED_RELU6 == 3, "ANEURALNETWORKS_FUSED_RELU6 has changed");
215 
216 static_assert(ANEURALNETWORKS_PREFER_LOW_POWER == 0,
217               "ANEURALNETWORKS_PREFER_LOW_POWER has changed");
218 static_assert(ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER == 1,
219               "ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER has changed");
220 static_assert(ANEURALNETWORKS_PREFER_SUSTAINED_SPEED == 2,
221               "ANEURALNETWORKS_PREFER_SUSTAINED_SPEED has changed");
222 
223 static_assert(ANEURALNETWORKS_NO_ERROR == 0, "ANEURALNETWORKS_NO_ERROR has changed");
224 static_assert(ANEURALNETWORKS_OUT_OF_MEMORY == 1, "ANEURALNETWORKS_OUT_OF_MEMORY has changed");
225 static_assert(ANEURALNETWORKS_INCOMPLETE == 2, "ANEURALNETWORKS_INCOMPLETE has changed");
226 static_assert(ANEURALNETWORKS_UNEXPECTED_NULL == 3, "ANEURALNETWORKS_UNEXPECTED_NULL has changed");
227 static_assert(ANEURALNETWORKS_BAD_DATA == 4, "ANEURALNETWORKS_BAD_DATA has changed");
228 static_assert(ANEURALNETWORKS_OP_FAILED == 5, "ANEURALNETWORKS_OP_FAILED has changed");
229 static_assert(ANEURALNETWORKS_BAD_STATE == 6, "ANEURALNETWORKS_BAD_STATE has changed");
230 static_assert(ANEURALNETWORKS_UNMAPPABLE == 7, "ANEURALNETWORKS_UNMAPPABLE has changed");
231 static_assert(ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE == 8,
232               "ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE has changed");
233 static_assert(ANEURALNETWORKS_UNAVAILABLE_DEVICE == 9,
234               "ANEURALNETWORKS_UNAVAILABLE_DEVICE has changed");
235 static_assert(ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT == 10,
236               "ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT has changed");
237 static_assert(ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT == 11,
238               "ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT has changed");
239 static_assert(ANEURALNETWORKS_RESOURCE_EXHAUSTED_TRANSIENT == 12,
240               "ANEURALNETWORKS_RESOURCE_EXHAUSTED_TRANSIENT has changed");
241 static_assert(ANEURALNETWORKS_RESOURCE_EXHAUSTED_PERSISTENT == 13,
242               "ANEURALNETWORKS_RESOURCE_EXHAUSTED_PERSISTENT has changed");
243 static_assert(ANEURALNETWORKS_DEAD_OBJECT == 14, "ANEURALNETWORKS_DEAD_OBJECT has changed");
244 
245 static_assert(ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES == 128,
246               "ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES has changed");
247 
248 static_assert(ANEURALNETWORKS_DEVICE_UNKNOWN == 0, "ANEURALNETWORKS_DEVICE_UNKNOWN has changed");
249 static_assert(ANEURALNETWORKS_DEVICE_OTHER == 1, "ANEURALNETWORKS_DEVICE_OTHER has changed");
250 static_assert(ANEURALNETWORKS_DEVICE_CPU == 2, "ANEURALNETWORKS_DEVICE_CPU has changed");
251 static_assert(ANEURALNETWORKS_DEVICE_GPU == 3, "ANEURALNETWORKS_DEVICE_GPU has changed");
252 static_assert(ANEURALNETWORKS_DEVICE_ACCELERATOR == 4,
253               "ANEURALNETWORKS_DEVICE_ACCELERATOR has changed");
254 
255 static_assert(ANEURALNETWORKS_DURATION_ON_HARDWARE == 0,
256               "ANEURALNETWORKS_DURATION_ON_HARDWARE has changed");
257 static_assert(ANEURALNETWORKS_DURATION_IN_DRIVER == 1,
258               "ANEURALNETWORKS_DURATION_IN_DRIVER has changed");
259 static_assert(ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE == 2,
260               "ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE has changed");
261 static_assert(ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER == 3,
262               "ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER has changed");
263 
264 // Make sure that the constants are compatible with the values defined in
265 // hardware/interfaces/neuralnetworks/1.0/types.hal.
266 static_assert(static_cast<int32_t>(OperandType::OEM) == ANEURALNETWORKS_OEM_SCALAR,
267               "OEM != ANEURALNETWORKS_OEM");
268 static_assert(static_cast<int32_t>(OperandType::FLOAT32) == ANEURALNETWORKS_FLOAT32,
269               "FLOAT32 != ANEURALNETWORKS_FLOAT32");
270 static_assert(static_cast<int32_t>(OperandType::INT32) == ANEURALNETWORKS_INT32,
271               "INT32 != ANEURALNETWORKS_INT32");
272 static_assert(static_cast<int32_t>(OperandType::UINT32) == ANEURALNETWORKS_UINT32,
273               "UINT32 != ANEURALNETWORKS_UINT32");
274 static_assert(static_cast<int32_t>(OperandType::TENSOR_OEM_BYTE) == ANEURALNETWORKS_TENSOR_OEM_BYTE,
275               "TENSOR_OEM_BYTE != ANEURALNETWORKS_TENSOR_OEM_BYTE");
276 static_assert(static_cast<int32_t>(OperandType::TENSOR_FLOAT32) == ANEURALNETWORKS_TENSOR_FLOAT32,
277               "TENSOR_FLOAT32 != ANEURALNETWORKS_TENSOR_FLOAT32");
278 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_ASYMM) ==
279                       ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
280               "TENSOR_QUANT8_ASYMM != ANEURALNETWORKS_TENSOR_QUANT8_ASYMM");
281 
282 static_assert(static_cast<int32_t>(OperationType::ADD) == ANEURALNETWORKS_ADD,
283               "OperationType::ADD != ANEURALNETWORKS_ADD");
284 static_assert(static_cast<int32_t>(OperationType::AVERAGE_POOL_2D) ==
285                       ANEURALNETWORKS_AVERAGE_POOL_2D,
286               "OperationType::AVERAGE_POOL_2D != ANEURALNETWORKS_AVERAGE_POOL_2D");
287 static_assert(static_cast<int32_t>(OperationType::CONV_2D) == ANEURALNETWORKS_CONV_2D,
288               "OperationType::CONV_2D != ANEURALNETWORKS_CONV_2D");
289 static_assert(static_cast<int32_t>(OperationType::DEPTHWISE_CONV_2D) ==
290                       ANEURALNETWORKS_DEPTHWISE_CONV_2D,
291               "OperationType::DEPTHWISE_CONV_2D != ANEURALNETWORKS_DEPTHWISE_CONV_2D");
292 static_assert(static_cast<int32_t>(OperationType::DEPTH_TO_SPACE) == ANEURALNETWORKS_DEPTH_TO_SPACE,
293               "OperationType::DEPTH_TO_SPACE != ANEURALNETWORKS_DEPTH_TO_SPACE");
294 static_assert(static_cast<int32_t>(OperationType::DEQUANTIZE) == ANEURALNETWORKS_DEQUANTIZE,
295               "OperationType::DEQUANTIZE != ANEURALNETWORKS_DEQUANTIZE");
296 static_assert(static_cast<int32_t>(OperationType::EMBEDDING_LOOKUP) ==
297                       ANEURALNETWORKS_EMBEDDING_LOOKUP,
298               "OperationType::EMBEDDING_LOOKUP != ANEURALNETWORKS_EMBEDDING_LOOKUP");
299 static_assert(static_cast<int32_t>(OperationType::FLOOR) == ANEURALNETWORKS_FLOOR,
300               "OperationType::FLOOR != ANEURALNETWORKS_FLOOR");
301 static_assert(static_cast<int32_t>(OperationType::FULLY_CONNECTED) ==
302                       ANEURALNETWORKS_FULLY_CONNECTED,
303               "OperationType::FULLY_CONNECTED != ANEURALNETWORKS_FULLY_CONNECTED");
304 static_assert(static_cast<int32_t>(OperationType::HASHTABLE_LOOKUP) ==
305                       ANEURALNETWORKS_HASHTABLE_LOOKUP,
306               "OperationType::HASHTABLE_LOOKUP != ANEURALNETWORKS_HASHTABLE_LOOKUP");
307 static_assert(static_cast<int32_t>(OperationType::L2_NORMALIZATION) ==
308                       ANEURALNETWORKS_L2_NORMALIZATION,
309               "OperationType::L2_NORMALIZATION != ANEURALNETWORKS_L2_NORMALIZATION");
310 static_assert(static_cast<int32_t>(OperationType::L2_POOL_2D) == ANEURALNETWORKS_L2_POOL_2D,
311               "OperationType::L2_POOL_2D != ANEURALNETWORKS_L2_POOL_2D");
312 static_assert(static_cast<int32_t>(OperationType::LOCAL_RESPONSE_NORMALIZATION) ==
313                       ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION,
314               "OperationType::LOCAL_RESPONSE_NORMALIZATION != "
315               "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION");
316 static_assert(static_cast<int32_t>(OperationType::LOGISTIC) == ANEURALNETWORKS_LOGISTIC,
317               "OperationType::LOGISTIC != ANEURALNETWORKS_LOGISTIC");
318 static_assert(static_cast<int32_t>(OperationType::LSH_PROJECTION) == ANEURALNETWORKS_LSH_PROJECTION,
319               "OperationType::LSH_PROJECTION != ANEURALNETWORKS_LSH_PROJECTION");
320 static_assert(static_cast<int32_t>(OperationType::LSTM) == ANEURALNETWORKS_LSTM,
321               "OperationType::LSTM != ANEURALNETWORKS_LSTM");
322 static_assert(static_cast<int32_t>(OperationType::MAX_POOL_2D) == ANEURALNETWORKS_MAX_POOL_2D,
323               "OperationType::MAX_POOL_2D != ANEURALNETWORKS_MAX_POOL_2D");
324 static_assert(static_cast<int32_t>(OperationType::MUL) == ANEURALNETWORKS_MUL,
325               "OperationType::MUL != ANEURALNETWORKS_MUL");
326 static_assert(static_cast<int32_t>(OperationType::RELU) == ANEURALNETWORKS_RELU,
327               "OperationType::RELU != ANEURALNETWORKS_RELU");
328 static_assert(static_cast<int32_t>(OperationType::RELU1) == ANEURALNETWORKS_RELU1,
329               "OperationType::RELU1 != ANEURALNETWORKS_RELU1");
330 static_assert(static_cast<int32_t>(OperationType::RELU6) == ANEURALNETWORKS_RELU6,
331               "OperationType::RELU6 != ANEURALNETWORKS_RELU6");
332 static_assert(static_cast<int32_t>(OperationType::RESHAPE) == ANEURALNETWORKS_RESHAPE,
333               "OperationType::RESHAPE != ANEURALNETWORKS_RESHAPE");
334 static_assert(static_cast<int32_t>(OperationType::RESIZE_BILINEAR) ==
335                       ANEURALNETWORKS_RESIZE_BILINEAR,
336               "OperationType::RESIZE_BILINEAR != ANEURALNETWORKS_RESIZE_BILINEAR");
337 static_assert(static_cast<int32_t>(OperationType::RNN) == ANEURALNETWORKS_RNN,
338               "OperationType::RNN != ANEURALNETWORKS_RNN");
339 static_assert(static_cast<int32_t>(OperationType::SOFTMAX) == ANEURALNETWORKS_SOFTMAX,
340               "OperationType::SOFTMAX != ANEURALNETWORKS_SOFTMAX");
341 static_assert(static_cast<int32_t>(OperationType::SPACE_TO_DEPTH) == ANEURALNETWORKS_SPACE_TO_DEPTH,
342               "OperationType::SPACE_TO_DEPTH != ANEURALNETWORKS_SPACE_TO_DEPTH");
343 static_assert(static_cast<int32_t>(OperationType::SVDF) == ANEURALNETWORKS_SVDF,
344               "OperationType::SVDF != ANEURALNETWORKS_SVDF");
345 static_assert(static_cast<int32_t>(OperationType::TANH) == ANEURALNETWORKS_TANH,
346               "OperationType::TANH != ANEURALNETWORKS_TANH");
347 
348 static_assert(static_cast<int32_t>(FusedActivationFunc::NONE) == ANEURALNETWORKS_FUSED_NONE,
349               "FusedActivationFunc::NONE != ANEURALNETWORKS_FUSED_NONE");
350 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU) == ANEURALNETWORKS_FUSED_RELU,
351               "FusedActivationFunc::RELU != ANEURALNETWORKS_FUSED_RELU");
352 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU1) == ANEURALNETWORKS_FUSED_RELU1,
353               "FusedActivationFunc::RELU1 != ANEURALNETWORKS_FUSED_RELU1");
354 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU6) == ANEURALNETWORKS_FUSED_RELU6,
355               "FusedActivationFunc::RELU6 != ANEURALNETWORKS_FUSED_RELU6");
356 
357 // Make sure that the constants are compatible with the values defined in
358 // hardware/interfaces/neuralnetworks/1.1/types.hal.
359 static_assert(static_cast<int32_t>(OperationType::BATCH_TO_SPACE_ND) ==
360                       ANEURALNETWORKS_BATCH_TO_SPACE_ND,
361               "OperationType::BATCH_TO_SPACE_ND != ANEURALNETWORKS_BATCH_TO_SPACE_ND");
362 static_assert(static_cast<int32_t>(OperationType::DIV) == ANEURALNETWORKS_DIV,
363               "OperationType::DIV != ANEURALNETWORKS_DIV");
364 static_assert(static_cast<int32_t>(OperationType::MEAN) == ANEURALNETWORKS_MEAN,
365               "OperationType::MEAN != ANEURALNETWORKS_MEAN");
366 static_assert(static_cast<int32_t>(OperationType::PAD) == ANEURALNETWORKS_PAD,
367               "OperationType::PAD != ANEURALNETWORKS_PAD");
368 static_assert(static_cast<int32_t>(OperationType::SPACE_TO_BATCH_ND) ==
369                       ANEURALNETWORKS_SPACE_TO_BATCH_ND,
370               "OperationType::SPACE_TO_BATCH_ND != ANEURALNETWORKS_SPACE_TO_BATCH_ND");
371 static_assert(static_cast<int32_t>(OperationType::SQUEEZE) == ANEURALNETWORKS_SQUEEZE,
372               "OperationType::SQUEEZE != ANEURALNETWORKS_SQUEEZE");
373 static_assert(static_cast<int32_t>(OperationType::STRIDED_SLICE) == ANEURALNETWORKS_STRIDED_SLICE,
374               "OperationType::STRIDED_SLICE != ANEURALNETWORKS_STRIDED_SLICE");
375 static_assert(static_cast<int32_t>(OperationType::SUB) == ANEURALNETWORKS_SUB,
376               "OperationType::SUB != ANEURALNETWORKS_SUB");
377 static_assert(static_cast<int32_t>(OperationType::TRANSPOSE) == ANEURALNETWORKS_TRANSPOSE,
378               "OperationType::TRANSPOSE != ANEURALNETWORKS_TRANSPOSE");
379 
380 // Make sure that the constants are compatible with the values defined in
381 // hardware/interfaces/neuralnetworks/1.2/types.hal.
382 static_assert(static_cast<int32_t>(OperandType::BOOL) == ANEURALNETWORKS_BOOL,
383               "BOOL != ANEURALNETWORKS_BOOL");
384 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT16_SYMM) ==
385                       ANEURALNETWORKS_TENSOR_QUANT16_SYMM,
386               "TENSOR_QUANT16_SYMM != ANEURALNETWORKS_TENSOR_QUANT16_SYMM");
387 static_assert(static_cast<int32_t>(OperandType::TENSOR_FLOAT16) == ANEURALNETWORKS_TENSOR_FLOAT16,
388               "TENSOR_FLOAT16 != ANEURALNETWORKS_TENSOR_FLOAT16");
389 static_assert(static_cast<int32_t>(OperandType::TENSOR_BOOL8) == ANEURALNETWORKS_TENSOR_BOOL8,
390               "TENSOR_BOOL8 != ANEURALNETWORKS_TENSOR_BOOL8");
391 static_assert(static_cast<int32_t>(OperandType::FLOAT16) == ANEURALNETWORKS_FLOAT16,
392               "FLOAT16 != ANEURALNETWORKS_FLOAT16");
393 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) ==
394                       ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL,
395               "TENSOR_QUANT8_SYMM_PER_CHANNEL != ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL");
396 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT16_ASYMM) ==
397                       ANEURALNETWORKS_TENSOR_QUANT16_ASYMM,
398               "TENSOR_QUANT16_ASYMM != ANEURALNETWORKS_TENSOR_QUANT16_ASYMM");
399 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_SYMM) ==
400                       ANEURALNETWORKS_TENSOR_QUANT8_SYMM,
401               "TENSOR_QUANT8_SYMM != ANEURALNETWORKS_TENSOR_QUANT8_SYMM");
402 
403 static_assert(static_cast<int32_t>(OperationType::ABS) == ANEURALNETWORKS_ABS,
404               "OperationType::ABS != ANEURALNETWORKS_ABS");
405 static_assert(static_cast<int32_t>(OperationType::ARGMAX) == ANEURALNETWORKS_ARGMAX,
406               "OperationType::ARGMAX != ANEURALNETWORKS_ARGMAX");
407 static_assert(static_cast<int32_t>(OperationType::ARGMIN) == ANEURALNETWORKS_ARGMIN,
408               "OperationType::ARGMIN != ANEURALNETWORKS_ARGMIN");
409 static_assert(static_cast<int32_t>(OperationType::AXIS_ALIGNED_BBOX_TRANSFORM) ==
410                       ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM,
411               "OperationType::AXIS_ALIGNED_BBOX_TRANSFORM != "
412               "ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM");
413 static_assert(static_cast<int32_t>(OperationType::BIDIRECTIONAL_SEQUENCE_LSTM) ==
414                       ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM,
415               "OperationType::BIDIRECTIONAL_SEQUENCE_LSTM != "
416               "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM");
417 static_assert(
418         static_cast<int32_t>(OperationType::BIDIRECTIONAL_SEQUENCE_RNN) ==
419                 ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN,
420         "OperationType::BIDIRECTIONAL_SEQUENCE_RNN != ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN");
421 static_assert(static_cast<int32_t>(OperationType::BOX_WITH_NMS_LIMIT) ==
422                       ANEURALNETWORKS_BOX_WITH_NMS_LIMIT,
423               "OperationType::BOX_WITH_NMS_LIMIT != ANEURALNETWORKS_BOX_WITH_NMS_LIMIT");
424 static_assert(static_cast<int32_t>(OperationType::CAST) == ANEURALNETWORKS_CAST,
425               "OperationType::CAST != ANEURALNETWORKS_CAST");
426 static_assert(static_cast<int32_t>(OperationType::CHANNEL_SHUFFLE) ==
427                       ANEURALNETWORKS_CHANNEL_SHUFFLE,
428               "OperationType::CHANNEL_SHUFFLE != ANEURALNETWORKS_CHANNEL_SHUFFLE");
429 static_assert(
430         static_cast<int32_t>(OperationType::DETECTION_POSTPROCESSING) ==
431                 ANEURALNETWORKS_DETECTION_POSTPROCESSING,
432         "OperationType::DETECTION_POSTPROCESSING != ANEURALNETWORKS_DETECTION_POSTPROCESSING");
433 static_assert(static_cast<int32_t>(OperationType::EQUAL) == ANEURALNETWORKS_EQUAL,
434               "OperationType::EQUAL != ANEURALNETWORKS_EQUAL");
435 static_assert(static_cast<int32_t>(OperationType::EXP) == ANEURALNETWORKS_EXP,
436               "OperationType::EXP != ANEURALNETWORKS_EXP");
437 static_assert(static_cast<int32_t>(OperationType::EXPAND_DIMS) == ANEURALNETWORKS_EXPAND_DIMS,
438               "OperationType::EXPAND_DIMS != ANEURALNETWORKS_EXPAND_DIMS");
439 static_assert(static_cast<int32_t>(OperationType::GATHER) == ANEURALNETWORKS_GATHER,
440               "OperationType::GATHER != ANEURALNETWORKS_GATHER");
441 static_assert(static_cast<int32_t>(OperationType::GENERATE_PROPOSALS) ==
442                       ANEURALNETWORKS_GENERATE_PROPOSALS,
443               "OperationType::GENERATE_PROPOSALS != ANEURALNETWORKS_GENERATE_PROPOSALS");
444 static_assert(static_cast<int32_t>(OperationType::GREATER) == ANEURALNETWORKS_GREATER,
445               "OperationType::GREATER != ANEURALNETWORKS_GREATER");
446 static_assert(static_cast<int32_t>(OperationType::GREATER_EQUAL) == ANEURALNETWORKS_GREATER_EQUAL,
447               "OperationType::GREATER_EQUAL != ANEURALNETWORKS_GREATER_EQUAL");
448 static_assert(static_cast<int32_t>(OperationType::GROUPED_CONV_2D) ==
449                       ANEURALNETWORKS_GROUPED_CONV_2D,
450               "OperationType::GROUPED_CONV_2D != ANEURALNETWORKS_GROUPED_CONV_2D");
451 static_assert(static_cast<int32_t>(OperationType::HEATMAP_MAX_KEYPOINT) ==
452                       ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT,
453               "OperationType::HEATMAP_MAX_KEYPOINT != ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT");
454 static_assert(static_cast<int32_t>(OperationType::INSTANCE_NORMALIZATION) ==
455                       ANEURALNETWORKS_INSTANCE_NORMALIZATION,
456               "OperationType::INSTANCE_NORMALIZATION != ANEURALNETWORKS_INSTANCE_NORMALIZATION");
457 static_assert(static_cast<int32_t>(OperationType::LESS) == ANEURALNETWORKS_LESS,
458               "OperationType::LESS != ANEURALNETWORKS_LESS");
459 static_assert(static_cast<int32_t>(OperationType::LESS_EQUAL) == ANEURALNETWORKS_LESS_EQUAL,
460               "OperationType::LESS_EQUAL != ANEURALNETWORKS_LESS_EQUAL");
461 static_assert(static_cast<int32_t>(OperationType::LOG) == ANEURALNETWORKS_LOG,
462               "OperationType::LOG != ANEURALNETWORKS_LOG");
463 static_assert(static_cast<int32_t>(OperationType::LOGICAL_AND) == ANEURALNETWORKS_LOGICAL_AND,
464               "OperationType::LOGICAL_AND != ANEURALNETWORKS_LOGICAL_AND");
465 static_assert(static_cast<int32_t>(OperationType::LOGICAL_NOT) == ANEURALNETWORKS_LOGICAL_NOT,
466               "OperationType::LOGICAL_NOT != ANEURALNETWORKS_LOGICAL_NOT");
467 static_assert(static_cast<int32_t>(OperationType::LOGICAL_OR) == ANEURALNETWORKS_LOGICAL_OR,
468               "OperationType::LOGICAL_OR != ANEURALNETWORKS_LOGICAL_OR");
469 static_assert(static_cast<int32_t>(OperationType::LOG_SOFTMAX) == ANEURALNETWORKS_LOG_SOFTMAX,
470               "OperationType::LOG_SOFTMAX != ANEURALNETWORKS_LOG_SOFTMAX");
471 static_assert(static_cast<int32_t>(OperationType::MAXIMUM) == ANEURALNETWORKS_MAXIMUM,
472               "OperationType::MAXIMUM != ANEURALNETWORKS_MAXIMUM");
473 static_assert(static_cast<int32_t>(OperationType::MINIMUM) == ANEURALNETWORKS_MINIMUM,
474               "OperationType::MINIMUM != ANEURALNETWORKS_MINIMUM");
475 static_assert(static_cast<int32_t>(OperationType::NEG) == ANEURALNETWORKS_NEG,
476               "OperationType::NEG != ANEURALNETWORKS_NEG");
477 static_assert(static_cast<int32_t>(OperationType::NOT_EQUAL) == ANEURALNETWORKS_NOT_EQUAL,
478               "OperationType::NOT_EQUAL != ANEURALNETWORKS_NOT_EQUAL");
479 static_assert(static_cast<int32_t>(OperationType::PAD_V2) == ANEURALNETWORKS_PAD_V2,
480               "OperationType::PAD_V2 != ANEURALNETWORKS_PAD_V2");
481 static_assert(static_cast<int32_t>(OperationType::POW) == ANEURALNETWORKS_POW,
482               "OperationType::POW != ANEURALNETWORKS_POW");
483 static_assert(static_cast<int32_t>(OperationType::PRELU) == ANEURALNETWORKS_PRELU,
484               "OperationType::PRELU != ANEURALNETWORKS_PRELU");
485 static_assert(static_cast<int32_t>(OperationType::QUANTIZE) == ANEURALNETWORKS_QUANTIZE,
486               "OperationType::QUANTIZE != ANEURALNETWORKS_QUANTIZE");
487 static_assert(static_cast<int32_t>(OperationType::QUANTIZED_16BIT_LSTM) ==
488                       ANEURALNETWORKS_QUANTIZED_16BIT_LSTM,
489               "OperationType::QUANTIZED_16BIT_LSTM != ANEURALNETWORKS_QUANTIZED_16BIT_LSTM");
490 static_assert(static_cast<int32_t>(OperationType::RANDOM_MULTINOMIAL) ==
491                       ANEURALNETWORKS_RANDOM_MULTINOMIAL,
492               "OperationType::RANDOM_MULTINOMIAL != ANEURALNETWORKS_RANDOM_MULTINOMIAL");
493 static_assert(static_cast<int32_t>(OperationType::REDUCE_ALL) == ANEURALNETWORKS_REDUCE_ALL,
494               "OperationType::REDUCE_ALL != ANEURALNETWORKS_REDUCE_ALL");
495 static_assert(static_cast<int32_t>(OperationType::REDUCE_ANY) == ANEURALNETWORKS_REDUCE_ANY,
496               "OperationType::REDUCE_ANY != ANEURALNETWORKS_REDUCE_ANY");
497 static_assert(static_cast<int32_t>(OperationType::REDUCE_MAX) == ANEURALNETWORKS_REDUCE_MAX,
498               "OperationType::REDUCE_MAX != ANEURALNETWORKS_REDUCE_MAX");
499 static_assert(static_cast<int32_t>(OperationType::REDUCE_MIN) == ANEURALNETWORKS_REDUCE_MIN,
500               "OperationType::REDUCE_MIN != ANEURALNETWORKS_REDUCE_MIN");
501 static_assert(static_cast<int32_t>(OperationType::REDUCE_PROD) == ANEURALNETWORKS_REDUCE_PROD,
502               "OperationType::REDUCE_PROD != ANEURALNETWORKS_REDUCE_PROD");
503 static_assert(static_cast<int32_t>(OperationType::REDUCE_SUM) == ANEURALNETWORKS_REDUCE_SUM,
504               "OperationType::REDUCE_SUM != ANEURALNETWORKS_REDUCE_SUM");
505 static_assert(static_cast<int32_t>(OperationType::ROI_ALIGN) == ANEURALNETWORKS_ROI_ALIGN,
506               "OperationType::ROI_ALIGN != ANEURALNETWORKS_ROI_ALIGN");
507 static_assert(static_cast<int32_t>(OperationType::ROI_POOLING) == ANEURALNETWORKS_ROI_POOLING,
508               "OperationType::ROI_POOLING != ANEURALNETWORKS_ROI_POOLING");
509 static_assert(static_cast<int32_t>(OperationType::RSQRT) == ANEURALNETWORKS_RSQRT,
510               "OperationType::RSQRT != ANEURALNETWORKS_RSQRT");
511 static_assert(static_cast<int32_t>(OperationType::SELECT) == ANEURALNETWORKS_SELECT,
512               "OperationType::SELECT != ANEURALNETWORKS_SELECT");
513 static_assert(static_cast<int32_t>(OperationType::SIN) == ANEURALNETWORKS_SIN,
514               "OperationType::SIN != ANEURALNETWORKS_SIN");
515 static_assert(static_cast<int32_t>(OperationType::SLICE) == ANEURALNETWORKS_SLICE,
516               "OperationType::SLICE != ANEURALNETWORKS_SLICE");
517 static_assert(static_cast<int32_t>(OperationType::SPLIT) == ANEURALNETWORKS_SPLIT,
518               "OperationType::SPLIT != ANEURALNETWORKS_SPLIT");
519 static_assert(static_cast<int32_t>(OperationType::SQRT) == ANEURALNETWORKS_SQRT,
520               "OperationType::SQRT != ANEURALNETWORKS_SQRT");
521 static_assert(static_cast<int32_t>(OperationType::TILE) == ANEURALNETWORKS_TILE,
522               "OperationType::TILE != ANEURALNETWORKS_TILE");
523 static_assert(static_cast<int32_t>(OperationType::TOPK_V2) == ANEURALNETWORKS_TOPK_V2,
524               "OperationType::TOPK_V2 != ANEURALNETWORKS_TOPK_V2");
525 static_assert(static_cast<int32_t>(OperationType::TRANSPOSE_CONV_2D) ==
526                       ANEURALNETWORKS_TRANSPOSE_CONV_2D,
527               "OperationType::TRANSPOSE_CONV_2D != ANEURALNETWORKS_TRANSPOSE_CONV_2D");
528 static_assert(static_cast<int32_t>(OperationType::UNIDIRECTIONAL_SEQUENCE_LSTM) ==
529                       ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM,
530               "OperationType::UNIDIRECTIONAL_SEQUENCE_LSTM != "
531               "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM");
532 static_assert(static_cast<int32_t>(OperationType::UNIDIRECTIONAL_SEQUENCE_RNN) ==
533                       ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN,
534               "OperationType::UNIDIRECTIONAL_SEQUENCE_RNN != "
535               "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN");
536 static_assert(static_cast<int32_t>(OperationType::RESIZE_NEAREST_NEIGHBOR) ==
537                       ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR,
538               "OperationType::RESIZE_NEAREST_NEIGHBOR != ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR");
539 static_assert(static_cast<int32_t>(OperationType::QUANTIZED_LSTM) == ANEURALNETWORKS_QUANTIZED_LSTM,
540               "OperationType::QUANTIZED_LSTM != ANEURALNETWORKS_QUANTIZED_LSTM");
541 static_assert(static_cast<int32_t>(OperationType::IF) == ANEURALNETWORKS_IF,
542               "OperationType::IF != ANEURALNETWORKS_IF");
543 static_assert(static_cast<int32_t>(OperationType::WHILE) == ANEURALNETWORKS_WHILE,
544               "OperationType::WHILE != ANEURALNETWORKS_WHILE");
545 static_assert(static_cast<int32_t>(OperationType::ELU) == ANEURALNETWORKS_ELU,
546               "OperationType::ELU != ANEURALNETWORKS_ELU");
547 static_assert(static_cast<int32_t>(OperationType::HARD_SWISH) == ANEURALNETWORKS_HARD_SWISH,
548               "OperationType::HARD_SWISH != ANEURALNETWORKS_HARD_SWISH");
549 static_assert(static_cast<int32_t>(OperationType::FILL) == ANEURALNETWORKS_FILL,
550               "OperationType::FILL != ANEURALNETWORKS_FILL");
551 static_assert(static_cast<int32_t>(OperationType::RANK) == ANEURALNETWORKS_RANK,
552               "OperationType::RANK != ANEURALNETWORKS_RANK");
553 
554 static_assert(static_cast<int32_t>(DeviceType::OTHER) == ANEURALNETWORKS_DEVICE_OTHER,
555               "DeviceType::OTHER != ANEURALNETWORKS_DEVICE_OTHER");
556 static_assert(static_cast<int32_t>(DeviceType::CPU) == ANEURALNETWORKS_DEVICE_CPU,
557               "DeviceType::CPU != ANEURALNETWORKS_DEVICE_CPU");
558 static_assert(static_cast<int32_t>(DeviceType::GPU) == ANEURALNETWORKS_DEVICE_GPU,
559               "DeviceType::GPU != ANEURALNETWORKS_DEVICE_GPU");
560 static_assert(static_cast<int32_t>(DeviceType::ACCELERATOR) == ANEURALNETWORKS_DEVICE_ACCELERATOR,
561               "DeviceType::ACCELERATOR != ANEURALNETWORKS_DEVICE_ACCELERATOR");
562 
563 // Make sure that the constants are compatible with the values defined in
564 // hardware/interfaces/neuralnetworks/1.3/types.hal.
565 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_LOW) ==
566                       Priority::LOW,
567               "ANEURALNETWORKS_PRIORITY_LOW does not map to Priority::LOW");
568 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_MEDIUM) ==
569                       Priority::MEDIUM,
570               "ANEURALNETWORKS_PRIORITY_MEDIUM does not map to Priority::MEDIUM");
571 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_HIGH) ==
572                       Priority::HIGH,
573               "ANEURALNETWORKS_PRIORITY_HIGH does not map to Priority::HIGH");
574 
575 // Asserts for ANeuralNetworksOperandType memory layout
576 static_assert(offsetof(ANeuralNetworksOperandType, type) == 0,
577               "ANeuralNetworksOperandType.type offset != 0");
578 static_assert(offsetof(ANeuralNetworksOperandType, dimensionCount) == 4,
579               "ANeuralNetworksOperandType.dimensionCount offset != 4");
580 static_assert(offsetof(ANeuralNetworksOperandType, dimensions) == 8,
581               "ANeuralNetworksOperandType.dimensions offset != 8");
582 static_assert(offsetof(ANeuralNetworksOperandType, scale) == 8 + sizeof(void*),
583               "ANeuralNetworksOperandType.scale offset != 8 + sizeof(void*)");
584 static_assert(offsetof(ANeuralNetworksOperandType, zeroPoint) == 12 + sizeof(void*),
585               "ANeuralNetworksOperandType.zeroPoint offset != 12 + sizeof(void*)");
586 static_assert(sizeof(ANeuralNetworksOperandType) == 16 + sizeof(void*),
587               "ANeuralNetworksOperandType size changed");
588 static_assert(alignof(ANeuralNetworksOperandType) == alignof(void*),
589               "ANeuralNetworksOperandType alignment changed");
590 
591 // Asserts for ANeuralNetworksSymmPerChannelQuantParams memory layout
592 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, channelDim) == 0,
593               "ANeuralNetworksSymmPerChannelQuantParams.channelDim offset != 4 + sizeof(void*)");
594 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, scaleCount) == 4,
595               "ANeuralNetworksSymmPerChannelQuantParams.scaleCount offset != 0");
596 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, scales) == 8,
597               "ANeuralNetworksSymmPerChannelQuantParams.scales offset != 4");
598 static_assert(sizeof(ANeuralNetworksSymmPerChannelQuantParams) == 8 + sizeof(void*),
599               "ANeuralNetworksSymmPerChannelQuantParams size != 8 + sizeof(void*)");
600 static_assert(alignof(ANeuralNetworksSymmPerChannelQuantParams) == alignof(void*),
601               "ANeuralNetworksOperandType alignment changed");
602 
603 // Asserts for compilation caching
604 static_assert(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN == 32,
605               "ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN has changed");
606 static_assert(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN == kByteSizeOfCacheToken,
607               "ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN != kByteSizeOfCacheToken");
608 
609 // Asserts for compilation priority
610 static_assert(ANEURALNETWORKS_PRIORITY_LOW == 90, "ANEURALNETWORKS_PRIORITY_LOW has changed");
611 static_assert(ANEURALNETWORKS_PRIORITY_MEDIUM == 100,
612               "ANEURALNETWORKS_PRIORITY_MEDIUM has changed");
613 static_assert(ANEURALNETWORKS_PRIORITY_HIGH == 110, "ANEURALNETWORKS_PRIORITY_HIGH has changed");
614 static_assert(ANEURALNETWORKS_PRIORITY_DEFAULT == ANEURALNETWORKS_PRIORITY_MEDIUM,
615               "ANEURALNETWORKS_PRIORITY_DEFAULT has changed");
616 
617 // Asserts for feature levels
618 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_1 == 27, "ANEURALNETWORKS_FEATURE_LEVEL_1 has changed");
619 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_2 == 28, "ANEURALNETWORKS_FEATURE_LEVEL_2 has changed");
620 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_3 == 29, "ANEURALNETWORKS_FEATURE_LEVEL_3 has changed");
621 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_4 == 30, "ANEURALNETWORKS_FEATURE_LEVEL_4 has changed");
622 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_5 == 31, "ANEURALNETWORKS_FEATURE_LEVEL_5 has changed");
623 
624 #ifdef NN_COMPATIBILITY_LIBRARY_BUILD
625 
626 static_assert(sizeof(SL_ANeuralNetworksPerformanceInfo) == sizeof(float) * 2,
627               "SL_ANeuralNetworksPerformanceInfo size changed");
628 static_assert(sizeof(SL_ANeuralNetworksOperandPerformanceInfo) ==
629                       sizeof(float) * 2 + sizeof(int32_t),
630               "SL_ANeuralNetworksOperandPerformanceInfo size changed");
631 static_assert(sizeof(SL_ANeuralNetworksExtensionOperandTypeInformation) == 8,
632               "SL_ANeuralNetworksExtensionOperandTypeInformation size changed");
633 
634 static_assert(SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_SCALAR == 0,
635               "SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_SCALAR has changed");
636 static_assert(SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_TENSOR == 1,
637               "SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_TENSOR has changed");
638 static_assert(SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_IF == 2,
639               "SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_IF has changed");
640 static_assert(SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_WHILE == 3,
641               "SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_WHILE has changed");
642 
643 #endif  // NN_COMPATIBILITY_LIBRARY_BUILD
644 
ANeuralNetworks_getDeviceCount(uint32_t * numDevices)645 int ANeuralNetworks_getDeviceCount(uint32_t* numDevices) {
646     if (numDevices == nullptr) {
647         LOG(ERROR) << "ANeuralNetworks_getDeviceCount passed a nullptr";
648         return ANEURALNETWORKS_UNEXPECTED_NULL;
649     }
650     *numDevices = DeviceManager::get()->getDrivers().size();
651     return ANEURALNETWORKS_NO_ERROR;
652 }
653 
ANeuralNetworks_getDevice(uint32_t devIndex,ANeuralNetworksDevice ** device)654 int ANeuralNetworks_getDevice(uint32_t devIndex, ANeuralNetworksDevice** device) {
655     if (device == nullptr) {
656         LOG(ERROR) << "ANeuralNetworks_getDevice passed a nullptr";
657         return ANEURALNETWORKS_UNEXPECTED_NULL;
658     }
659     const std::vector<std::shared_ptr<Device>>& devices = DeviceManager::get()->getDrivers();
660     if (devIndex >= devices.size()) {
661         LOG(ERROR) << "ANeuralNetworks_getDevice passed an invalid device index";
662         return ANEURALNETWORKS_BAD_DATA;
663     }
664     *device = reinterpret_cast<ANeuralNetworksDevice*>(devices.at(devIndex).get());
665     return ANEURALNETWORKS_NO_ERROR;
666 }
667 
ANeuralNetworksDevice_getName(const ANeuralNetworksDevice * device,const char ** name)668 int ANeuralNetworksDevice_getName(const ANeuralNetworksDevice* device, const char** name) {
669     if (device == nullptr || name == nullptr) {
670         LOG(ERROR) << "ANeuralNetworksDevice_getName passed a nullptr";
671         return ANEURALNETWORKS_UNEXPECTED_NULL;
672     }
673     const Device* d = reinterpret_cast<const Device*>(device);
674     *name = d->getName().c_str();
675     return ANEURALNETWORKS_NO_ERROR;
676 }
677 
ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice * device,const char ** version)678 int ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice* device, const char** version) {
679     if (device == nullptr || version == nullptr) {
680         LOG(ERROR) << "ANeuralNetworksDevice_getVersion passed a nullptr";
681         return ANEURALNETWORKS_UNEXPECTED_NULL;
682     }
683     const Device* d = reinterpret_cast<const Device*>(device);
684     *version = d->getVersionString().c_str();
685     return ANEURALNETWORKS_NO_ERROR;
686 }
687 
ANeuralNetworksDevice_getType(const ANeuralNetworksDevice * device,int32_t * type)688 int ANeuralNetworksDevice_getType(const ANeuralNetworksDevice* device, int32_t* type) {
689     if (device == nullptr || type == nullptr) {
690         LOG(ERROR) << "ANeuralNetworksDevice_getType passed a nullptr";
691         return ANEURALNETWORKS_UNEXPECTED_NULL;
692     }
693     const Device* d = reinterpret_cast<const Device*>(device);
694     int32_t dType = d->getType();
695     if (dType < 0) {
696         return ANEURALNETWORKS_OP_FAILED;
697     }
698     *type = d->getType();
699     return ANEURALNETWORKS_NO_ERROR;
700 }
701 
ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice * device,int64_t * featureLevel)702 int ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice* device,
703                                           int64_t* featureLevel) {
704     if (device == nullptr || featureLevel == nullptr) {
705         LOG(ERROR) << "ANeuralNetworksDevice_getFeatureLevel passed a nullptr";
706         return ANEURALNETWORKS_UNEXPECTED_NULL;
707     }
708     Device* d = reinterpret_cast<Device*>(const_cast<ANeuralNetworksDevice*>(device));
709     int64_t dFeatureLevel = d->getFeatureLevel();
710     if (dFeatureLevel < 0) {
711         return ANEURALNETWORKS_BAD_STATE;
712     }
713     *featureLevel = dFeatureLevel;
714     return ANEURALNETWORKS_NO_ERROR;
715 }
716 
ANeuralNetworksDevice_wait(const ANeuralNetworksDevice * device)717 int ANeuralNetworksDevice_wait(const ANeuralNetworksDevice* device) {
718     if (device == nullptr) {
719         LOG(ERROR) << "ANeuralNetworksDevice_wait passed a nullptr";
720         return ANEURALNETWORKS_UNEXPECTED_NULL;
721     }
722     const Device* d = reinterpret_cast<const Device*>(device);
723     return d->wait();
724 }
725 
ANeuralNetworksModel_getSupportedOperationsForDevices(const ANeuralNetworksModel * model,const ANeuralNetworksDevice * const * devices,uint32_t numDevices,bool * supportedOps)726 int ANeuralNetworksModel_getSupportedOperationsForDevices(
727         const ANeuralNetworksModel* model, const ANeuralNetworksDevice* const* devices,
728         uint32_t numDevices, bool* supportedOps) {
729     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksModel_getSupportedOperationsForDevices");
730     if (model == nullptr || devices == nullptr || supportedOps == nullptr) {
731         LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed a nullptr";
732         return ANEURALNETWORKS_UNEXPECTED_NULL;
733     }
734     if (numDevices == 0) {
735         LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed an empty "
736                       "device list";
737         return ANEURALNETWORKS_BAD_DATA;
738     }
739     const ModelBuilder* m = reinterpret_cast<const ModelBuilder*>(model);
740     if (!m->isFinished() || !m->isValid()) {
741         LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed an unfinished "
742                       "or invalid Model";
743         return ANEURALNETWORKS_BAD_STATE;
744     }
745 
746     const Model canonicalModel = m->makeModel();
747     const std::vector<uint32_t>& opMap = m->getSortedOperationMapping();
748     // init the output array to false for all the operations.
749     std::fill(supportedOps, supportedOps + opMap.size(), false);
750     for (uint32_t i = 0; i < numDevices; i++) {
751         if (devices[i] == nullptr) {
752             LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed a nullptr "
753                           "as a device";
754             return ANEURALNETWORKS_UNEXPECTED_NULL;
755         }
756         for (uint32_t j = i + 1; j < numDevices; j++) {
757             if (devices[i] == devices[j]) {
758                 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed "
759                               "duplicate devices";
760                 return ANEURALNETWORKS_BAD_DATA;
761             }
762         }
763 
764         Device* d = reinterpret_cast<Device*>(const_cast<ANeuralNetworksDevice*>(devices[i]));
765         const MetaModel metaModel(canonicalModel, DeviceManager::get()->strictSlicing());
766         const std::vector<bool> supportsByDevice = d->getSupportedOperations(metaModel);
767         for (uint32_t j = 0; j < supportsByDevice.size(); j++) {
768             uint32_t originalIdx = opMap[j];
769             supportedOps[originalIdx] |= supportsByDevice[j];
770         }
771     }
772     return ANEURALNETWORKS_NO_ERROR;
773 }
774 
ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel * model,const ANeuralNetworksDevice * const * devices,uint32_t numDevices,ANeuralNetworksCompilation ** compilation)775 int ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel* model,
776                                                 const ANeuralNetworksDevice* const* devices,
777                                                 uint32_t numDevices,
778                                                 ANeuralNetworksCompilation** compilation) {
779     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_createForDevices");
780     if (model == nullptr || devices == nullptr || compilation == nullptr) {
781         LOG(ERROR) << "ANeuralNetworksCompilation_createForDevices passed a nullptr";
782         return ANEURALNETWORKS_UNEXPECTED_NULL;
783     }
784 
785     if (numDevices == 0) {
786         LOG(ERROR) << "ANeuralNetworksCompilation_createForDevices passed an empty device list";
787         return ANEURALNETWORKS_BAD_DATA;
788     }
789 
790     std::vector<std::shared_ptr<Device>> selectedDevices;
791     for (uint32_t i = 0; i < numDevices; i++) {
792         if (devices[i] == nullptr) {
793             LOG(ERROR)
794                     << "ANeuralNetworksCompilation_createForDevices passed a nullptr as a device";
795             return ANEURALNETWORKS_UNEXPECTED_NULL;
796         }
797         for (uint32_t j = i + 1; j < numDevices; j++) {
798             if (devices[i] == devices[j]) {
799                 LOG(ERROR)
800                         << "ANeuralNetworksCompilation_createForDevices passed duplicate devices";
801                 return ANEURALNETWORKS_BAD_DATA;
802             }
803         }
804         for (auto& device : DeviceManager::get()->getDrivers()) {
805             if (device.get() == reinterpret_cast<const Device*>(devices[i])) {
806                 // Find a match
807                 selectedDevices.push_back(device);
808                 break;
809             }
810         }
811     }
812 
813     if (selectedDevices.size() != numDevices) {
814         LOG(ERROR) << "ANeuralNetworksCompilation_createForDevices passed an invalid device set";
815         return ANEURALNETWORKS_BAD_DATA;
816     }
817     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
818     CompilationBuilder* c = nullptr;
819     // No CPU fallback when user specifies the list of devices manually.
820     int result = m->createCompilation(&c, selectedDevices, /* explicitDeviceList */ true);
821     *compilation = reinterpret_cast<ANeuralNetworksCompilation*>(c);
822     return result;
823 }
824 
ANeuralNetworksExecution_compute(ANeuralNetworksExecution * execution)825 int ANeuralNetworksExecution_compute(ANeuralNetworksExecution* execution) {
826     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_compute");
827     if (!execution) {
828         LOG(ERROR) << "ANeuralNetworksExecution_compute passed a nullptr";
829         return ANEURALNETWORKS_UNEXPECTED_NULL;
830     }
831     // TODO validate the rest
832 
833     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
834     return r->computeSynchronously();
835 }
836 
ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution * execution,bool measure)837 int ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution* execution, bool measure) {
838     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setMeasureTiming");
839     if (!execution) {
840         LOG(ERROR) << "ANeuralNetworksExecution_setMeasureTiming passed a nullptr";
841         return ANEURALNETWORKS_UNEXPECTED_NULL;
842     }
843     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
844     return r->setMeasureTiming(measure);
845 }
846 
ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution * execution,int32_t durationCode,uint64_t * duration)847 int ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution* execution,
848                                          int32_t durationCode, uint64_t* duration) {
849     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getDuration");
850     if (!execution || !duration) {
851         LOG(ERROR) << "ANeuralNetworksExecution_getDuration passed a nullptr";
852         return ANEURALNETWORKS_UNEXPECTED_NULL;
853     }
854     switch (durationCode) {
855         case ANEURALNETWORKS_DURATION_ON_HARDWARE:
856         case ANEURALNETWORKS_DURATION_IN_DRIVER:
857         case ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE:
858         case ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER:
859             break;
860         default:
861             LOG(ERROR) << "ANeuralNetworksExecution_getDuration passed a bad durationCode "
862                        << durationCode;
863             return ANEURALNETWORKS_BAD_DATA;
864     }
865     const ExecutionBuilder* r = reinterpret_cast<const ExecutionBuilder*>(execution);
866     return r->getDuration(durationCode, duration);
867 }
868 
ANeuralNetworksBurst_create(ANeuralNetworksCompilation * compilation,ANeuralNetworksBurst ** burst)869 int ANeuralNetworksBurst_create(ANeuralNetworksCompilation* compilation,
870                                 ANeuralNetworksBurst** burst) {
871     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksBurst_create");
872     if (!compilation || !burst) {
873         LOG(ERROR) << "ANeuralNetworksBurst_create passed a nullptr";
874         return ANEURALNETWORKS_UNEXPECTED_NULL;
875     }
876 
877     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
878     BurstBuilder* b = nullptr;
879     int result = c->createBurst(&b);
880     *burst = reinterpret_cast<ANeuralNetworksBurst*>(b);
881     return result;
882 }
883 
ANeuralNetworksBurst_free(ANeuralNetworksBurst * burst)884 void ANeuralNetworksBurst_free(ANeuralNetworksBurst* burst) {
885     NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksBurst_free");
886     // No validation.  Free of nullptr is valid.
887     BurstBuilder* b = reinterpret_cast<BurstBuilder*>(burst);
888     delete b;
889 }
890 
ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution * execution,ANeuralNetworksBurst * burst)891 int ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution* execution,
892                                           ANeuralNetworksBurst* burst) {
893     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_burstCompute");
894     if (!execution || !burst) {
895         LOG(ERROR) << "ANeuralNetworksExecution_burstCompute passed a nullptr";
896         return ANEURALNETWORKS_UNEXPECTED_NULL;
897     }
898 
899     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
900     BurstBuilder* b = reinterpret_cast<BurstBuilder*>(burst);
901 
902     if (r->getCompilation() != b->getCompilation()) {
903         LOG(ERROR) << "ANeuralNetworksBurst and ANeuralNetworksExecution "
904                       "used in ANeuralNetworksExecution_burstCompute must "
905                       "originate from the same ANeuralNetworksCompilation";
906         return ANEURALNETWORKS_BAD_DATA;
907     }
908 
909     const bool locked = b->tryLock();
910     if (!locked) {
911         LOG(ERROR) << "ANeuralNetworksBurst is already being used in another "
912                       "call to ANeuralNetworksExecution_burstCompute";
913         return ANEURALNETWORKS_BAD_STATE;
914     }
915 
916     const int n = r->burstCompute(b);
917     b->unlock();
918 
919     return n;
920 }
921 
ANeuralNetworksMemoryDesc_create(ANeuralNetworksMemoryDesc ** desc)922 int ANeuralNetworksMemoryDesc_create(ANeuralNetworksMemoryDesc** desc) {
923     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_create");
924     if (desc != nullptr) {
925         *desc = nullptr;
926     }
927     if (!desc) {
928         LOG(ERROR) << "ANeuralNetworksMemoryDesc_create passed a nullptr";
929         return ANEURALNETWORKS_UNEXPECTED_NULL;
930     }
931     auto mb = std::make_unique<MemoryBuilder>();
932     *desc = reinterpret_cast<ANeuralNetworksMemoryDesc*>(mb.release());
933     return ANEURALNETWORKS_NO_ERROR;
934 }
935 
ANeuralNetworksMemoryDesc_free(ANeuralNetworksMemoryDesc * desc)936 void ANeuralNetworksMemoryDesc_free(ANeuralNetworksMemoryDesc* desc) {
937     NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksMemoryDesc_free");
938     // No validation.  Free of nullptr is valid.
939     MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
940     delete mb;
941 }
942 
ANeuralNetworksMemoryDesc_addInputRole(ANeuralNetworksMemoryDesc * desc,const ANeuralNetworksCompilation * compilation,uint32_t index,float frequency)943 int ANeuralNetworksMemoryDesc_addInputRole(ANeuralNetworksMemoryDesc* desc,
944                                            const ANeuralNetworksCompilation* compilation,
945                                            uint32_t index, float frequency) {
946     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_addInputRole");
947     if (!desc || !compilation) {
948         LOG(ERROR) << "ANeuralNetworksMemoryDesc_addInputRole passed a nullptr";
949         return ANEURALNETWORKS_UNEXPECTED_NULL;
950     }
951     MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
952     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
953     return mb->addRole(*c, IOType::INPUT, index, frequency);
954 }
955 
ANeuralNetworksMemoryDesc_addOutputRole(ANeuralNetworksMemoryDesc * desc,const ANeuralNetworksCompilation * compilation,uint32_t index,float frequency)956 int ANeuralNetworksMemoryDesc_addOutputRole(ANeuralNetworksMemoryDesc* desc,
957                                             const ANeuralNetworksCompilation* compilation,
958                                             uint32_t index, float frequency) {
959     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_addOutputRole");
960     if (!desc || !compilation) {
961         LOG(ERROR) << "ANeuralNetworksMemoryDesc_addOutputRole passed a nullptr";
962         return ANEURALNETWORKS_UNEXPECTED_NULL;
963     }
964     MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
965     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
966     return mb->addRole(*c, IOType::OUTPUT, index, frequency);
967 }
968 
ANeuralNetworksMemoryDesc_setDimensions(ANeuralNetworksMemoryDesc * desc,uint32_t rank,const uint32_t * dimensions)969 int ANeuralNetworksMemoryDesc_setDimensions(ANeuralNetworksMemoryDesc* desc, uint32_t rank,
970                                             const uint32_t* dimensions) {
971     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_setDimensions");
972     if (!desc || (!dimensions && rank > 0)) {
973         LOG(ERROR) << "ANeuralNetworksMemoryDesc_setDimensions passed a nullptr";
974         return ANEURALNETWORKS_UNEXPECTED_NULL;
975     }
976     const std::vector<uint32_t> dims(dimensions, dimensions + rank);
977     MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
978     return mb->setDimensions(dims);
979 }
980 
ANeuralNetworksMemoryDesc_finish(ANeuralNetworksMemoryDesc * desc)981 int ANeuralNetworksMemoryDesc_finish(ANeuralNetworksMemoryDesc* desc) {
982     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_finish");
983     if (!desc) {
984         LOG(ERROR) << "ANeuralNetworksMemoryDesc_finish passed a nullptr";
985         return ANEURALNETWORKS_UNEXPECTED_NULL;
986     }
987     MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
988     return mb->finish();
989 }
990 
ANeuralNetworksMemory_createFromDesc(const ANeuralNetworksMemoryDesc * desc,ANeuralNetworksMemory ** memory)991 int ANeuralNetworksMemory_createFromDesc(const ANeuralNetworksMemoryDesc* desc,
992                                          ANeuralNetworksMemory** memory) {
993     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemory_createFromDesc");
994     if (memory != nullptr) {
995         *memory = nullptr;
996     }
997     if (!desc || !memory) {
998         LOG(ERROR) << "ANeuralNetworksMemory_createFromDesc passed a nullptr";
999         return ANEURALNETWORKS_UNEXPECTED_NULL;
1000     }
1001     const MemoryBuilder* mb = reinterpret_cast<const MemoryBuilder*>(desc);
1002     auto [n, m] = mb->allocate();
1003     if (n != ANEURALNETWORKS_NO_ERROR) {
1004         return n;
1005     }
1006     *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1007     return ANEURALNETWORKS_NO_ERROR;
1008 }
1009 
ANeuralNetworksMemory_copy(const ANeuralNetworksMemory * src,const ANeuralNetworksMemory * dst)1010 int ANeuralNetworksMemory_copy(const ANeuralNetworksMemory* src, const ANeuralNetworksMemory* dst) {
1011     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksMemory_copy");
1012     if (!src || !dst) {
1013         LOG(ERROR) << "ANeuralNetworksMemory_copy passed a nullptr";
1014         return ANEURALNETWORKS_UNEXPECTED_NULL;
1015     }
1016     const RuntimeMemory* s = reinterpret_cast<const RuntimeMemory*>(src);
1017     const RuntimeMemory* d = reinterpret_cast<const RuntimeMemory*>(dst);
1018     return RuntimeMemory::copy(*s, *d);
1019 }
1020 
ANeuralNetworksMemory_createFromFd(size_t size,int prot,int fd,size_t offset,ANeuralNetworksMemory ** memory)1021 int ANeuralNetworksMemory_createFromFd(size_t size, int prot, int fd, size_t offset,
1022                                        ANeuralNetworksMemory** memory) {
1023     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksMemory_createFromFd");
1024     *memory = nullptr;  // WARNING: b/138965390
1025     int n = ANEURALNETWORKS_NO_ERROR;
1026     std::unique_ptr<MemoryFd> m;
1027     std::tie(n, m) = MemoryFd::create(size, prot, fd, offset);
1028     if (n != ANEURALNETWORKS_NO_ERROR) {
1029         return n;
1030     }
1031     *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1032     return ANEURALNETWORKS_NO_ERROR;
1033 }
1034 
ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer * ahwb,ANeuralNetworksMemory ** memory)1035 int ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer* ahwb,
1036                                                     ANeuralNetworksMemory** memory) {
1037     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksMemory_createFromAHardwareBuffer");
1038     *memory = nullptr;  // WARNING: b/138965390
1039     int n = ANEURALNETWORKS_NO_ERROR;
1040     std::unique_ptr<MemoryAHWB> m;
1041     std::tie(n, m) = MemoryAHWB::create(*ahwb);
1042     if (n != ANEURALNETWORKS_NO_ERROR) {
1043         return n;
1044     }
1045     *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1046     return ANEURALNETWORKS_NO_ERROR;
1047 }
1048 
ANeuralNetworksMemory_free(ANeuralNetworksMemory * memory)1049 void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory) {
1050     NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksMemory_free");
1051     // No validation.  Free of nullptr is valid.
1052     RuntimeMemory* m = reinterpret_cast<RuntimeMemory*>(memory);
1053     delete m;
1054 }
1055 
ANeuralNetworksModel_create(ANeuralNetworksModel ** model)1056 int ANeuralNetworksModel_create(ANeuralNetworksModel** model) {
1057     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_create");
1058     initVLogMask();
1059     if (!model) {
1060         LOG(ERROR) << "ANeuralNetworksModel_create passed a nullptr";
1061         return ANEURALNETWORKS_UNEXPECTED_NULL;
1062     }
1063     ModelBuilder* m = new (std::nothrow) ModelBuilder();
1064     if (m == nullptr) {
1065         *model = nullptr;
1066         return ANEURALNETWORKS_OUT_OF_MEMORY;
1067     }
1068     *model = reinterpret_cast<ANeuralNetworksModel*>(m);
1069     return ANEURALNETWORKS_NO_ERROR;
1070 }
1071 
ANeuralNetworksModel_free(ANeuralNetworksModel * model)1072 void ANeuralNetworksModel_free(ANeuralNetworksModel* model) {
1073     NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksModel_free");
1074     // No validation.  Free of nullptr is valid.
1075     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1076     delete m;
1077 }
1078 
ANeuralNetworksModel_finish(ANeuralNetworksModel * model)1079 int ANeuralNetworksModel_finish(ANeuralNetworksModel* model) {
1080     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_finish");
1081     if (!model) {
1082         LOG(ERROR) << "ANeuralNetworksModel_finish passed a nullptr";
1083         return ANEURALNETWORKS_UNEXPECTED_NULL;
1084     }
1085     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1086     return m->finish();
1087 }
1088 
ANeuralNetworksModel_addOperand(ANeuralNetworksModel * model,const ANeuralNetworksOperandType * type)1089 int ANeuralNetworksModel_addOperand(ANeuralNetworksModel* model,
1090                                     const ANeuralNetworksOperandType* type) {
1091     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_addOperand");
1092     if (!model || !type) {
1093         LOG(ERROR) << "ANeuralNetworksModel_addOperand passed a nullptr";
1094         return ANEURALNETWORKS_UNEXPECTED_NULL;
1095     }
1096     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1097     return m->addOperand(*type);
1098 }
1099 
ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel * model,int32_t index,const void * buffer,size_t length)1100 int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel* model, int32_t index,
1101                                          const void* buffer, size_t length) {
1102     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValue");
1103     if (!model || (!buffer && length != 0)) {
1104         LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
1105         return ANEURALNETWORKS_UNEXPECTED_NULL;
1106     }
1107     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1108     return m->setOperandValue(index, buffer, length);
1109 }
1110 
ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksMemory * memory,size_t offset,size_t length)1111 int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index,
1112                                                    const ANeuralNetworksMemory* memory,
1113                                                    size_t offset, size_t length) {
1114     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValueFromMemory");
1115     if (!model || !memory) {
1116         LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
1117         return ANEURALNETWORKS_UNEXPECTED_NULL;
1118     }
1119     const RuntimeMemory* mem = reinterpret_cast<const RuntimeMemory*>(memory);
1120     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1121     return m->setOperandValueFromMemory(index, mem, offset, length);
1122 }
1123 
ANeuralNetworksModel_setOperandValueFromModel(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksModel * value)1124 int ANeuralNetworksModel_setOperandValueFromModel(ANeuralNetworksModel* model, int32_t index,
1125                                                   const ANeuralNetworksModel* value) {
1126     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValueFromModel");
1127     if (!model || !value) {
1128         LOG(ERROR) << "ANeuralNetworksModel_setOperandValueFromModel passed a nullptr";
1129         return ANEURALNETWORKS_UNEXPECTED_NULL;
1130     }
1131     const ModelBuilder* val = reinterpret_cast<const ModelBuilder*>(value);
1132     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1133     return m->setOperandValueFromModel(index, val);
1134 }
1135 
ANeuralNetworksModel_addOperation(ANeuralNetworksModel * model,ANeuralNetworksOperationType type,uint32_t inputCount,const uint32_t * inputs,uint32_t outputCount,const uint32_t * outputs)1136 int ANeuralNetworksModel_addOperation(ANeuralNetworksModel* model,
1137                                       ANeuralNetworksOperationType type, uint32_t inputCount,
1138                                       const uint32_t* inputs, uint32_t outputCount,
1139                                       const uint32_t* outputs) {
1140     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_addOperation");
1141     if (!model || !inputs || !outputs) {
1142         LOG(ERROR) << "ANeuralNetworksModel_addOperation passed a nullptr";
1143         return ANEURALNETWORKS_UNEXPECTED_NULL;
1144     }
1145     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1146     return m->addOperation(type, inputCount, inputs, outputCount, outputs);
1147 }
1148 
ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksSymmPerChannelQuantParams * channelQuant)1149 int ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(
1150         ANeuralNetworksModel* model, int32_t index,
1151         const ANeuralNetworksSymmPerChannelQuantParams* channelQuant) {
1152     NNTRACE_RT(NNTRACE_PHASE_PREPARATION,
1153                "ANeuralNetworksModel_setOperandSymmPerChannelQuantParams");
1154     if (!model || !channelQuant) {
1155         LOG(ERROR) << "ANeuralNetworksModel_setOperandSymmPerChannelQuantParams passed a nullptr";
1156         return ANEURALNETWORKS_UNEXPECTED_NULL;
1157     }
1158     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1159     return m->setOperandSymmPerChannelQuantParams(index, *channelQuant);
1160 }
1161 
ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel * model,uint32_t inputCount,const uint32_t * inputs,uint32_t outputCount,const uint32_t * outputs)1162 int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel* model, uint32_t inputCount,
1163                                                   const uint32_t* inputs, uint32_t outputCount,
1164                                                   const uint32_t* outputs) {
1165     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_identifyInputsAndOutputs");
1166     if (!model || !inputs || !outputs) {
1167         LOG(ERROR) << ("ANeuralNetworksModel_identifyInputsAndOutputs passed a nullptr");
1168         return ANEURALNETWORKS_UNEXPECTED_NULL;
1169     }
1170     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1171     return m->identifyInputsAndOutputs(inputCount, inputs, outputCount, outputs);
1172 }
1173 
ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel * model,bool allow)1174 int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel* model, bool allow) {
1175     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_relaxComputationFloat32toFloat16");
1176     if (!model) {
1177         LOG(ERROR) << ("ANeuralNetworksModel_relaxComputationFloat32toFloat16 passed a nullptr");
1178         return ANEURALNETWORKS_UNEXPECTED_NULL;
1179     }
1180     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1181     return m->relaxComputationFloat32toFloat16(allow);
1182 }
1183 
ANeuralNetworksCompilation_create(ANeuralNetworksModel * model,ANeuralNetworksCompilation ** compilation)1184 int ANeuralNetworksCompilation_create(ANeuralNetworksModel* model,
1185                                       ANeuralNetworksCompilation** compilation) {
1186     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_create");
1187     if (!model || !compilation) {
1188         LOG(ERROR) << "ANeuralNetworksCompilation_create passed a nullptr";
1189         return ANEURALNETWORKS_UNEXPECTED_NULL;
1190     }
1191 
1192     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1193     CompilationBuilder* c = nullptr;
1194 
1195     const auto& drivers = DeviceManager::get()->getDrivers();
1196     std::vector<std::shared_ptr<Device>> nonUpdatableDrivers;
1197     nonUpdatableDrivers.reserve(drivers.size());
1198     std::copy_if(drivers.begin(), drivers.end(), std::back_inserter(nonUpdatableDrivers),
1199                  [](const auto& driver) { return !driver->isUpdatable(); });
1200 
1201     int result = m->createCompilation(&c, nonUpdatableDrivers);
1202     *compilation = reinterpret_cast<ANeuralNetworksCompilation*>(c);
1203     return result;
1204 }
1205 
ANeuralNetworksCompilation_free(ANeuralNetworksCompilation * compilation)1206 void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation* compilation) {
1207     NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksCompilation_free");
1208     // No validation.  Free of nullptr is valid.
1209     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1210     delete c;
1211 }
1212 
ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation * compilation,int32_t preference)1213 int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation* compilation,
1214                                              int32_t preference) {
1215     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setPreference");
1216     if (!compilation) {
1217         LOG(ERROR) << "ANeuralNetworksCompilation_setPreference passed a nullptr";
1218         return ANEURALNETWORKS_UNEXPECTED_NULL;
1219     }
1220     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1221     return c->setPreference(preference);
1222 }
1223 
ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation * compilation,const char * cacheDir,const uint8_t * token)1224 int ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation* compilation,
1225                                           const char* cacheDir, const uint8_t* token) {
1226     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setCaching");
1227     if (!compilation || !cacheDir || !token) {
1228         LOG(ERROR) << "ANeuralNetworksCompilation_setCaching passed a nullptr";
1229         return ANEURALNETWORKS_UNEXPECTED_NULL;
1230     }
1231     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1232     return c->setCaching(cacheDir, token);
1233 }
1234 
ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation * compilation)1235 int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation* compilation) {
1236     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_finish");
1237     if (!compilation) {
1238         LOG(ERROR) << "ANeuralNetworksCompilation_finish passed a nullptr";
1239         return ANEURALNETWORKS_UNEXPECTED_NULL;
1240     }
1241     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1242     return c->finish();
1243 }
1244 
ANeuralNetworksCompilation_setPriority(ANeuralNetworksCompilation * compilation,int priority)1245 int ANeuralNetworksCompilation_setPriority(ANeuralNetworksCompilation* compilation, int priority) {
1246     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setPriority");
1247     if (!compilation) {
1248         LOG(ERROR) << "ANeuralNetworksCompilation_setPriority passed a nullptr";
1249         return ANEURALNETWORKS_UNEXPECTED_NULL;
1250     }
1251     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1252     return c->setPriority(priority);
1253 }
1254 
ANeuralNetworksCompilation_setTimeout(ANeuralNetworksCompilation * compilation,uint64_t duration)1255 int ANeuralNetworksCompilation_setTimeout(ANeuralNetworksCompilation* compilation,
1256                                           uint64_t duration) {
1257     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setTimeout");
1258     if (!compilation) {
1259         LOG(ERROR) << "ANeuralNetworksCompilation_setTimeout passed a nullptr";
1260         return ANEURALNETWORKS_UNEXPECTED_NULL;
1261     }
1262     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1263     return c->setTimeoutDuration(duration);
1264 }
1265 
ANeuralNetworksExecution_create(ANeuralNetworksCompilation * compilation,ANeuralNetworksExecution ** execution)1266 int ANeuralNetworksExecution_create(ANeuralNetworksCompilation* compilation,
1267                                     ANeuralNetworksExecution** execution) {
1268     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_create");
1269     if (!compilation || !execution) {
1270         LOG(ERROR) << "ANeuralNetworksExecution_create passed a nullptr";
1271         return ANEURALNETWORKS_UNEXPECTED_NULL;
1272     }
1273 
1274     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1275     ExecutionBuilder* r = nullptr;
1276     int result = c->createExecution(&r);
1277     *execution = reinterpret_cast<ANeuralNetworksExecution*>(r);
1278     return result;
1279 }
1280 
ANeuralNetworksExecution_free(ANeuralNetworksExecution * execution)1281 void ANeuralNetworksExecution_free(ANeuralNetworksExecution* execution) {
1282     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_free");
1283     // Free of nullptr is valid.
1284     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1285     if (r && r->inFlight()) {
1286         LOG(ERROR) << "ANeuralNetworksExecution_free passed an in-flight ANeuralNetworksExecution"
1287                    << " and is therefore ignored";
1288         return;
1289     }
1290     delete r;
1291 }
1292 
ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution * execution,int32_t index,uint32_t * rank)1293 int ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution* execution,
1294                                                   int32_t index, uint32_t* rank) {
1295     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getOutputOperandRank");
1296     if (!execution || !rank) {
1297         LOG(ERROR) << "ANeuralNetworksExecution_getOutputOperandRank passed a nullptr";
1298         return ANEURALNETWORKS_UNEXPECTED_NULL;
1299     }
1300     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1301     return r->getOutputOperandRank(index, rank);
1302 }
1303 
ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution * execution,int32_t index,uint32_t * dimensions)1304 int ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution* execution,
1305                                                         int32_t index, uint32_t* dimensions) {
1306     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getOutputOperandDimensions");
1307     if (!execution || !dimensions) {
1308         LOG(ERROR) << "ANeuralNetworksExecution_getOutputOperandDimensions passed a nullptr";
1309         return ANEURALNETWORKS_UNEXPECTED_NULL;
1310     }
1311     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1312     return r->getOutputOperandDimensions(index, dimensions);
1313 }
1314 
ANeuralNetworksExecution_setInput(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,const void * buffer,size_t length)1315 int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution* execution, int32_t index,
1316                                       const ANeuralNetworksOperandType* type, const void* buffer,
1317                                       size_t length) {
1318     NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setInput");
1319     if (!execution || (!buffer && length != 0)) {
1320         LOG(ERROR) << "ANeuralNetworksExecution_setInput passed a nullptr";
1321         return ANEURALNETWORKS_UNEXPECTED_NULL;
1322     }
1323     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1324     return r->setInput(index, type, buffer, length);
1325 }
1326 
ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,const ANeuralNetworksMemory * memory,size_t offset,size_t length)1327 int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
1328                                                 const ANeuralNetworksOperandType* type,
1329                                                 const ANeuralNetworksMemory* memory, size_t offset,
1330                                                 size_t length) {
1331     NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setInputFromMemory");
1332     if (!execution || !memory) {
1333         LOG(ERROR) << "ANeuralNetworksExecution_setInputFromMemory passed a nullptr";
1334         return ANEURALNETWORKS_UNEXPECTED_NULL;
1335     }
1336 
1337     const RuntimeMemory* m = reinterpret_cast<const RuntimeMemory*>(memory);
1338     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1339     return r->setInputFromMemory(index, type, m, offset, length);
1340 }
1341 
ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,void * buffer,size_t length)1342 int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution* execution, int32_t index,
1343                                        const ANeuralNetworksOperandType* type, void* buffer,
1344                                        size_t length) {
1345     NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setOutput");
1346     if (!execution || (!buffer && length != 0)) {
1347         LOG(ERROR) << "ANeuralNetworksExecution_setOutput passed a nullptr";
1348         return ANEURALNETWORKS_UNEXPECTED_NULL;
1349     }
1350     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1351     return r->setOutput(index, type, buffer, length);
1352 }
1353 
ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,const ANeuralNetworksMemory * memory,size_t offset,size_t length)1354 int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
1355                                                  const ANeuralNetworksOperandType* type,
1356                                                  const ANeuralNetworksMemory* memory, size_t offset,
1357                                                  size_t length) {
1358     NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setOutputFromMemory");
1359     if (!execution || !memory) {
1360         LOG(ERROR) << "ANeuralNetworksExecution_setOutputFromMemory passed a nullptr";
1361         return ANEURALNETWORKS_UNEXPECTED_NULL;
1362     }
1363 
1364     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1365     const RuntimeMemory* m = reinterpret_cast<const RuntimeMemory*>(memory);
1366     return r->setOutputFromMemory(index, type, m, offset, length);
1367 }
1368 
ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution * execution,ANeuralNetworksEvent ** event)1369 int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution* execution,
1370                                           ANeuralNetworksEvent** event) {
1371     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_startCompute");
1372     if (!event) {
1373         LOG(ERROR) << "ANeuralNetworksExecution_startCompute passed a nullptr";
1374         return ANEURALNETWORKS_UNEXPECTED_NULL;
1375     }
1376     if (!execution) {
1377         LOG(ERROR) << "ANeuralNetworksExecution_startCompute passed a nullptr";
1378         *event = nullptr;
1379         return ANEURALNETWORKS_UNEXPECTED_NULL;
1380     }
1381     // TODO validate the rest
1382 
1383     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1384 
1385     std::shared_ptr<ExecutionCallback> callback;
1386     *event = nullptr;
1387 
1388     int n = r->computeAsynchronously(&callback);
1389     if (n != ANEURALNETWORKS_NO_ERROR) {
1390         return n;
1391     }
1392     auto e = std::make_unique<CallbackEvent>(std::move(callback));
1393     *event = reinterpret_cast<ANeuralNetworksEvent*>(e.release());
1394     return ANEURALNETWORKS_NO_ERROR;
1395 }
1396 
ANeuralNetworksExecution_setTimeout(ANeuralNetworksExecution * execution,uint64_t duration)1397 int ANeuralNetworksExecution_setTimeout(ANeuralNetworksExecution* execution, uint64_t duration) {
1398     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setTimeout");
1399     if (!execution) {
1400         LOG(ERROR) << "ANeuralNetworksExecution_setTimeout passed a nullptr";
1401         return ANEURALNETWORKS_UNEXPECTED_NULL;
1402     }
1403 
1404     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1405     return r->setTimeoutDuration(duration);
1406 }
1407 
ANeuralNetworksEvent_wait(ANeuralNetworksEvent * event)1408 int ANeuralNetworksEvent_wait(ANeuralNetworksEvent* event) {
1409     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksEvent_wait");
1410     if (event == nullptr) {
1411         LOG(ERROR) << "ANeuralNetworksEvent_wait passed a nullptr";
1412         return ANEURALNETWORKS_UNEXPECTED_NULL;
1413     }
1414 
1415     IEvent* e = reinterpret_cast<IEvent*>(event);
1416     return convertErrorStatusToResultCode(e->wait());
1417 }
1418 
ANeuralNetworksEvent_free(ANeuralNetworksEvent * event)1419 void ANeuralNetworksEvent_free(ANeuralNetworksEvent* event) {
1420     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksEvent_free");
1421     // No validation.  Free of nullptr is valid.
1422     if (event) {
1423         IEvent* e = reinterpret_cast<IEvent*>(event);
1424         e->wait();
1425         delete e;
1426     }
1427 }
1428 
ANeuralNetworksExecution_setLoopTimeout(ANeuralNetworksExecution * execution,uint64_t duration)1429 int ANeuralNetworksExecution_setLoopTimeout(ANeuralNetworksExecution* execution,
1430                                             uint64_t duration) {
1431     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setLoopTimeout");
1432     if (!execution) {
1433         LOG(ERROR) << "ANeuralNetworksExecution_setLoopTimeout passed a nullptr";
1434         return ANEURALNETWORKS_UNEXPECTED_NULL;
1435     }
1436 
1437     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1438     return r->setLoopTimeout(duration);
1439 }
1440 
ANeuralNetworks_getDefaultLoopTimeout()1441 uint64_t ANeuralNetworks_getDefaultLoopTimeout() {
1442     return operation_while::kTimeoutNsDefault;
1443 }
1444 
ANeuralNetworks_getMaximumLoopTimeout()1445 uint64_t ANeuralNetworks_getMaximumLoopTimeout() {
1446     return operation_while::kTimeoutNsMaximum;
1447 }
1448 
ANeuralNetworksDevice_getExtensionSupport(const ANeuralNetworksDevice * device,const char * extensionName,bool * isExtensionSupported)1449 int ANeuralNetworksDevice_getExtensionSupport(const ANeuralNetworksDevice* device,
1450                                               const char* extensionName,
1451                                               bool* isExtensionSupported) {
1452     if (device == nullptr || extensionName == nullptr || isExtensionSupported == nullptr) {
1453         LOG(ERROR) << "ANeuralNetworksDevice_getExtensionSupport passed a nullptr";
1454         return ANEURALNETWORKS_UNEXPECTED_NULL;
1455     }
1456 
1457     const Device* d = reinterpret_cast<const Device*>(device);
1458     const auto& supportedExtensions = d->getSupportedExtensions();
1459     *isExtensionSupported = std::any_of(supportedExtensions.begin(), supportedExtensions.end(),
1460                                         [extensionName](const auto& supportedExtension) {
1461                                             return supportedExtension.name == extensionName;
1462                                         });
1463 
1464     return ANEURALNETWORKS_NO_ERROR;
1465 }
1466 
ANeuralNetworksModel_getExtensionOperandType(ANeuralNetworksModel * model,const char * extensionName,uint16_t operandCodeWithinExtension,int32_t * type)1467 int ANeuralNetworksModel_getExtensionOperandType(ANeuralNetworksModel* model,
1468                                                  const char* extensionName,
1469                                                  uint16_t operandCodeWithinExtension,
1470                                                  int32_t* type) {
1471     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_getExtensionOperandType");
1472     if (!model || !extensionName || !type) {
1473         LOG(ERROR) << "ANeuralNetworksModel_getExtensionOperandType passed a nullptr";
1474         return ANEURALNETWORKS_UNEXPECTED_NULL;
1475     }
1476     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1477     return m->getExtensionType(extensionName, operandCodeWithinExtension, type);
1478 }
1479 
ANeuralNetworksModel_getExtensionOperationType(ANeuralNetworksModel * model,const char * extensionName,uint16_t operationCodeWithinExtension,ANeuralNetworksOperationType * type)1480 int ANeuralNetworksModel_getExtensionOperationType(ANeuralNetworksModel* model,
1481                                                    const char* extensionName,
1482                                                    uint16_t operationCodeWithinExtension,
1483                                                    ANeuralNetworksOperationType* type) {
1484     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_getExtensionOperationType");
1485     if (!model || !extensionName || !type) {
1486         LOG(ERROR) << "ANeuralNetworksModel_getExtensionOperationType passed a nullptr";
1487         return ANEURALNETWORKS_UNEXPECTED_NULL;
1488     }
1489     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1490     return m->getExtensionType(extensionName, operationCodeWithinExtension, type);
1491 }
1492 
ANeuralNetworksModel_setOperandExtensionData(ANeuralNetworksModel * model,int32_t index,const void * data,size_t length)1493 int ANeuralNetworksModel_setOperandExtensionData(ANeuralNetworksModel* model, int32_t index,
1494                                                  const void* data, size_t length) {
1495     NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandExtensionData");
1496     if (!model || (!data && length != 0)) {
1497         LOG(ERROR) << "ANeuralNetworksModel_setOperandExtensionData passed a nullptr";
1498         return ANEURALNETWORKS_UNEXPECTED_NULL;
1499     }
1500     ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
1501     return m->setOperandExtensionData(index, data, length);
1502 }
1503 
ANeuralNetworksEvent_createFromSyncFenceFd(int syncFenceFd,ANeuralNetworksEvent ** event)1504 int ANeuralNetworksEvent_createFromSyncFenceFd(int syncFenceFd, ANeuralNetworksEvent** event) {
1505     if (event == nullptr) {
1506         LOG(ERROR) << "ANeuralNetworksEvent_createFromSyncFenceFd passed a nullptr";
1507         return ANEURALNETWORKS_UNEXPECTED_NULL;
1508     }
1509     if (syncFenceFd <= 0) {
1510         LOG(ERROR) << "ANeuralNetworksEvent_createFromSyncFenceFd passed an invalid fd: "
1511                    << syncFenceFd;
1512         *event = nullptr;
1513         return ANEURALNETWORKS_BAD_DATA;
1514     }
1515     std::unique_ptr<SyncFenceEvent> e =
1516             std::make_unique<SyncFenceEvent>(syncFenceFd, nullptr, nullptr);
1517     *event = reinterpret_cast<ANeuralNetworksEvent*>(e.release());
1518     return ANEURALNETWORKS_NO_ERROR;
1519 }
1520 
ANeuralNetworksEvent_getSyncFenceFd(const ANeuralNetworksEvent * event,int * syncFenceFd)1521 int ANeuralNetworksEvent_getSyncFenceFd(const ANeuralNetworksEvent* event, int* syncFenceFd) {
1522     if (syncFenceFd == nullptr) {
1523         LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd passed a nullptr";
1524         return ANEURALNETWORKS_UNEXPECTED_NULL;
1525     }
1526     *syncFenceFd = -1;
1527     if (event == nullptr) {
1528         LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd passed a nullptr";
1529         return ANEURALNETWORKS_UNEXPECTED_NULL;
1530     }
1531     const IEvent* e = reinterpret_cast<const IEvent*>(event);
1532     // The client owns the dupped fd, and is responsible for closing it.
1533     *syncFenceFd = e->getSyncFenceFd(/*shouldDup*/ true);
1534     if (*syncFenceFd <= 0) {
1535         LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd unable to get valid sync_fence fd";
1536         *syncFenceFd = -1;
1537         return ANEURALNETWORKS_BAD_DATA;
1538     }
1539     return ANEURALNETWORKS_NO_ERROR;
1540 }
1541 
ANeuralNetworksExecution_startComputeWithDependencies(ANeuralNetworksExecution * execution,const ANeuralNetworksEvent * const * dependencies,uint32_t numOfDependencies,uint64_t duration,ANeuralNetworksEvent ** event)1542 int ANeuralNetworksExecution_startComputeWithDependencies(
1543         ANeuralNetworksExecution* execution, const ANeuralNetworksEvent* const* dependencies,
1544         uint32_t numOfDependencies, uint64_t duration, ANeuralNetworksEvent** event) {
1545     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_startComputeWithDependencies");
1546     if (!event) {
1547         LOG(ERROR) << "ANeuralNetworksExecution_startComputeWithDependencies passed a nullptr";
1548         return ANEURALNETWORKS_UNEXPECTED_NULL;
1549     }
1550     if ((!dependencies && numOfDependencies != 0) || !execution) {
1551         LOG(ERROR) << "ANeuralNetworksExecution_startComputeWithDependencies passed a nullptr";
1552         *event = nullptr;
1553         return ANEURALNETWORKS_UNEXPECTED_NULL;
1554     }
1555     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1556 
1557     std::vector<int> waitForList;
1558     for (uint32_t i = 0; i < numOfDependencies; i++) {
1559         if (!dependencies[i]) {
1560             LOG(ERROR) << "ANeuralNetworksExecution_startComputeWithDependencies passed a nullptr";
1561             *event = nullptr;
1562             return ANEURALNETWORKS_UNEXPECTED_NULL;
1563         }
1564         const IEvent* e = reinterpret_cast<const IEvent*>(dependencies[i]);
1565         int syncFenceFd = e->getSyncFenceFd(/*should_dup*/ false);
1566         if (syncFenceFd < 0) {
1567             e->wait();
1568         } else {
1569             waitForList.push_back(syncFenceFd);
1570         }
1571     }
1572 
1573     if (r->getCompilation()->hasDynamicTemporaries()) {
1574         // The current implementation of fenced execution does not support
1575         // dynamic temporaries.  Fall back to non fenced execution.
1576         LOG(INFO) << "ANeuralNetworksExecution_startComputeWithDependencies falling back"
1577                   << " to ANeuralNetworksExecution_startCompute"
1578                   << " because of boundary operands of unknown size";
1579         for (int syncFenceFd : waitForList) {
1580             if (syncFenceFd > 0) {
1581                 auto w = syncWait(syncFenceFd, -1);
1582                 if (w != FenceState::SIGNALED) {
1583                     VLOG(EXECUTION) << "syncWait failed, fd: " << syncFenceFd;
1584                     *event = nullptr;
1585                     return ANEURALNETWORKS_OP_FAILED;
1586                 }
1587             }
1588         }
1589         return ANeuralNetworksExecution_startCompute(execution, event);
1590     }
1591 
1592     int syncFenceToSignal = -1;
1593     int n = r->computeFenced(waitForList, duration, &syncFenceToSignal);
1594     std::unique_ptr<SyncFenceEvent> e = std::make_unique<SyncFenceEvent>(
1595             syncFenceToSignal, r->getExecuteFencedInfoCallback(),
1596             // TODO(miaowang): support dynamic output shape only with memory domain.
1597             // For now just return empty output shapes.
1598             [r](ErrorStatus status) { return r->finishComputation(status, {}); });
1599     close(syncFenceToSignal);
1600     if (n != ANEURALNETWORKS_NO_ERROR) {
1601         *event = nullptr;
1602     } else {
1603         *event = reinterpret_cast<ANeuralNetworksEvent*>(e.release());
1604     }
1605     return n;
1606 }
1607 
ANeuralNetworks_getRuntimeFeatureLevel()1608 int64_t ANeuralNetworks_getRuntimeFeatureLevel() {
1609     return kCurrentNNAPIRuntimeFeatureLevel;
1610 }
1611 
ANeuralNetworksExecution_enableInputAndOutputPadding(ANeuralNetworksExecution * execution,bool enable)1612 int ANeuralNetworksExecution_enableInputAndOutputPadding(ANeuralNetworksExecution* execution,
1613                                                          bool enable) {
1614     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_enableInputAndOutputPadding");
1615     if (!execution) {
1616         LOG(ERROR) << "ANeuralNetworksExecution_enableInputAndOutputPadding passed a nullptr";
1617         return ANEURALNETWORKS_UNEXPECTED_NULL;
1618     }
1619     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1620     return r->enableInputAndOutputPadding(enable);
1621 }
1622 
ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(const ANeuralNetworksCompilation * compilation,uint32_t index,uint32_t * alignment)1623 int ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(
1624         const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* alignment) {
1625     NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1626                "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput");
1627     if (!compilation || !alignment) {
1628         LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput passed a "
1629                       "nullptr";
1630         return ANEURALNETWORKS_UNEXPECTED_NULL;
1631     }
1632     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
1633     return c->getPreferredMemoryAlignmentForInput(index, alignment);
1634 }
1635 
ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(const ANeuralNetworksCompilation * compilation,uint32_t index,uint32_t * padding)1636 int ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(
1637         const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* padding) {
1638     NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1639                "ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput");
1640     if (!compilation || !padding) {
1641         LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput passed a "
1642                       "nullptr";
1643         return ANEURALNETWORKS_UNEXPECTED_NULL;
1644     }
1645     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
1646     return c->getPreferredMemoryPaddingForInput(index, padding);
1647 }
1648 
ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(const ANeuralNetworksCompilation * compilation,uint32_t index,uint32_t * alignment)1649 int ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(
1650         const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* alignment) {
1651     NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1652                "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput");
1653     if (!compilation || !alignment) {
1654         LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput passed a "
1655                       "nullptr";
1656         return ANEURALNETWORKS_UNEXPECTED_NULL;
1657     }
1658     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
1659     return c->getPreferredMemoryAlignmentForOutput(index, alignment);
1660 }
1661 
ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(const ANeuralNetworksCompilation * compilation,uint32_t index,uint32_t * padding)1662 int ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(
1663         const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* padding) {
1664     NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1665                "ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput");
1666     if (!compilation || !padding) {
1667         LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput passed a "
1668                       "nullptr";
1669         return ANEURALNETWORKS_UNEXPECTED_NULL;
1670     }
1671     const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
1672     return c->getPreferredMemoryPaddingForOutput(index, padding);
1673 }
1674 
ANeuralNetworksExecution_setReusable(ANeuralNetworksExecution * execution,bool reusable)1675 int ANeuralNetworksExecution_setReusable(ANeuralNetworksExecution* execution, bool reusable) {
1676     NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setReusable");
1677     if (!execution) {
1678         LOG(ERROR) << "ANeuralNetworksExecution_setReusable passed a nullptr";
1679         return ANEURALNETWORKS_UNEXPECTED_NULL;
1680     }
1681     ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
1682     return r->setReusable(reusable);
1683 }
1684 
1685 #ifdef NN_COMPATIBILITY_LIBRARY_BUILD
1686 
SL_ANeuralNetworksCompilation_setCachingFromFds(ANeuralNetworksCompilation * compilation,const int * modelCacheFds,const uint32_t numModelCacheFiles,const int * dataCacheFds,const uint32_t numDataCacheFiles,const uint8_t * token)1687 int SL_ANeuralNetworksCompilation_setCachingFromFds(ANeuralNetworksCompilation* compilation,
1688                                                     const int* modelCacheFds,
1689                                                     const uint32_t numModelCacheFiles,
1690                                                     const int* dataCacheFds,
1691                                                     const uint32_t numDataCacheFiles,
1692                                                     const uint8_t* token) {
1693     NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "SL_ANeuralNetworksCompilation_setCachingFromFds");
1694     if (!compilation || (numModelCacheFiles != 0 && !modelCacheFds) ||
1695         (numDataCacheFiles != 0 && !dataCacheFds) || !token) {
1696         LOG(ERROR) << "SL_ANeuralNetworksCompilation_setCachingFromFds passed a nullptr";
1697         return ANEURALNETWORKS_UNEXPECTED_NULL;
1698     }
1699     CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
1700     return c->setCachingFromFds(modelCacheFds, numModelCacheFiles, dataCacheFds, numDataCacheFiles,
1701                                 token);
1702 }
1703 
SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded(const ANeuralNetworksDevice * device,uint32_t * numModelCacheFiles,uint32_t * numDataCacheFiles)1704 int SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded(const ANeuralNetworksDevice* device,
1705                                                          uint32_t* numModelCacheFiles,
1706                                                          uint32_t* numDataCacheFiles) {
1707     if (numModelCacheFiles) *numModelCacheFiles = 0;
1708     if (numDataCacheFiles) *numDataCacheFiles = 0;
1709 
1710     if (device == nullptr || numModelCacheFiles == nullptr || numDataCacheFiles == nullptr) {
1711         LOG(ERROR) << "SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded passed a nullptr";
1712         return ANEURALNETWORKS_UNEXPECTED_NULL;
1713     }
1714 
1715     const Device* d = reinterpret_cast<const Device*>(device);
1716     std::tie(*numModelCacheFiles, *numDataCacheFiles) = d->getNumberOfCacheFilesNeeded();
1717     return ANEURALNETWORKS_NO_ERROR;
1718 }
1719 
SL_ANeuralNetworksDevice_getPerformanceInfo(const ANeuralNetworksDevice * device,int32_t performanceInfoKind,SL_ANeuralNetworksPerformanceInfo * performanceInfo)1720 int SL_ANeuralNetworksDevice_getPerformanceInfo(
1721         const ANeuralNetworksDevice* device, int32_t performanceInfoKind,
1722         SL_ANeuralNetworksPerformanceInfo* performanceInfo) {
1723     if (performanceInfo) *performanceInfo = {.execTime = 0.0f, .powerUsage = 0.0f};
1724 
1725     if (device == nullptr || performanceInfo == nullptr) {
1726         LOG(ERROR) << "SL_ANeuralNetworksDevice_getPerformanceInfo passed a nullptr";
1727         return ANEURALNETWORKS_UNEXPECTED_NULL;
1728     }
1729 
1730     constexpr auto conv = [](const Capabilities::PerformanceInfo& info) {
1731         return SL_ANeuralNetworksPerformanceInfo{.execTime = info.execTime,
1732                                                  .powerUsage = info.powerUsage};
1733     };
1734 
1735     const Device* d = reinterpret_cast<const Device*>(device);
1736     const Capabilities& capabilities = d->getCapabilities();
1737 
1738     switch (performanceInfoKind) {
1739         case SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_SCALAR:
1740             *performanceInfo = conv(capabilities.relaxedFloat32toFloat16PerformanceScalar);
1741             return ANEURALNETWORKS_NO_ERROR;
1742         case SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_TENSOR:
1743             *performanceInfo = conv(capabilities.relaxedFloat32toFloat16PerformanceTensor);
1744             return ANEURALNETWORKS_NO_ERROR;
1745         case SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_IF:
1746             *performanceInfo = conv(capabilities.ifPerformance);
1747             return ANEURALNETWORKS_NO_ERROR;
1748         case SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_WHILE:
1749             *performanceInfo = conv(capabilities.whilePerformance);
1750             return ANEURALNETWORKS_NO_ERROR;
1751     }
1752 
1753     LOG(ERROR) << "SL_ANeuralNetworksDevice_getPerformanceInfo passed unknown performanceInfoKind "
1754                << performanceInfoKind;
1755     return ANEURALNETWORKS_BAD_DATA;
1756 }
1757 
SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo(const ANeuralNetworksDevice * device,void * context,void (* callback)(SL_ANeuralNetworksOperandPerformanceInfo,void *))1758 int SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo(
1759         const ANeuralNetworksDevice* device, void* context,
1760         void (*callback)(SL_ANeuralNetworksOperandPerformanceInfo, void*)) {
1761     if (device == nullptr || context == nullptr || callback == nullptr) {
1762         LOG(ERROR) << "SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo passed a nullptr";
1763         return ANEURALNETWORKS_UNEXPECTED_NULL;
1764     }
1765 
1766     constexpr auto conv = [](const Capabilities::OperandPerformance& operandPerformance) {
1767         return SL_ANeuralNetworksOperandPerformanceInfo{
1768                 .operandType = static_cast<int32_t>(operandPerformance.type),
1769                 .performanceInfo = {.execTime = operandPerformance.info.execTime,
1770                                     .powerUsage = operandPerformance.info.powerUsage},
1771         };
1772     };
1773 
1774     const Device* d = reinterpret_cast<const Device*>(device);
1775     const Capabilities& capabilities = d->getCapabilities();
1776 
1777     for (const auto& operandPerformance : capabilities.operandPerformance.asVector()) {
1778         const SL_ANeuralNetworksOperandPerformanceInfo opPerf = conv(operandPerformance);
1779         callback(opPerf, context);
1780     }
1781     return ANEURALNETWORKS_NO_ERROR;
1782 }
1783 
SL_ANeuralNetworksDevice_getVendorExtensionCount(const ANeuralNetworksDevice * device,uint32_t * vendorExtensionCount)1784 int SL_ANeuralNetworksDevice_getVendorExtensionCount(const ANeuralNetworksDevice* device,
1785                                                      uint32_t* vendorExtensionCount) {
1786     if (vendorExtensionCount) *vendorExtensionCount = 0;
1787 
1788     if (device == nullptr || vendorExtensionCount == nullptr) {
1789         LOG(ERROR) << "SL_ANeuralNetworksDevice_getVendorExtensionCount passed a nullptr";
1790         return ANEURALNETWORKS_UNEXPECTED_NULL;
1791     }
1792 
1793     const Device* d = reinterpret_cast<const Device*>(device);
1794     *vendorExtensionCount = d->getSupportedExtensions().size();
1795     return ANEURALNETWORKS_NO_ERROR;
1796 }
1797 
SL_ANeuralNetworksDevice_getVendorExtensionName(const ANeuralNetworksDevice * device,uint32_t vendorExtensionIndex,const char ** extensionName)1798 int SL_ANeuralNetworksDevice_getVendorExtensionName(const ANeuralNetworksDevice* device,
1799                                                     uint32_t vendorExtensionIndex,
1800                                                     const char** extensionName) {
1801     if (extensionName) *extensionName = nullptr;
1802 
1803     if (device == nullptr || extensionName == nullptr) {
1804         LOG(ERROR) << "SL_ANeuralNetworksDevice_getVendorExtensionName passed a nullptr";
1805         return ANEURALNETWORKS_UNEXPECTED_NULL;
1806     }
1807 
1808     const Device* d = reinterpret_cast<const Device*>(device);
1809     const auto& extensions = d->getSupportedExtensions();
1810 
1811     if (vendorExtensionIndex >= extensions.size()) {
1812         LOG(ERROR)
1813                 << "SL_ANeuralNetworksDevice_getVendorExtensionName passed a vendorExtensionIndex "
1814                    "that is out of range";
1815         return ANEURALNETWORKS_BAD_DATA;
1816     }
1817     const auto& extension = extensions[vendorExtensionIndex];
1818 
1819     *extensionName = extension.name.c_str();
1820     return ANEURALNETWORKS_NO_ERROR;
1821 }
1822 
SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation(const ANeuralNetworksDevice * device,uint32_t vendorExtensionIndex,void * context,void (* callback)(SL_ANeuralNetworksExtensionOperandTypeInformation,void *))1823 int SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation(
1824         const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex, void* context,
1825         void (*callback)(SL_ANeuralNetworksExtensionOperandTypeInformation, void*)) {
1826     if (device == nullptr || context == nullptr || callback == nullptr) {
1827         LOG(ERROR)
1828                 << "SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation passed a "
1829                    "nullptr";
1830         return ANEURALNETWORKS_UNEXPECTED_NULL;
1831     }
1832 
1833     const Device* d = reinterpret_cast<const Device*>(device);
1834     const auto& extensions = d->getSupportedExtensions();
1835 
1836     if (vendorExtensionIndex >= extensions.size()) {
1837         LOG(ERROR)
1838                 << "SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation passed a "
1839                    "vendorExtensionIndex that is out of range";
1840         return ANEURALNETWORKS_BAD_DATA;
1841     }
1842     const auto& operandTypes = extensions[vendorExtensionIndex].operandTypes;
1843 
1844     constexpr auto conv = [](const Extension::OperandTypeInformation& operandTypeInfo) {
1845         return SL_ANeuralNetworksExtensionOperandTypeInformation{
1846                 .byteSize = operandTypeInfo.byteSize,
1847                 .type = operandTypeInfo.type,
1848                 .isTensor = operandTypeInfo.isTensor,
1849         };
1850     };
1851 
1852     for (const auto& operandTypeInfo : operandTypes) {
1853         const SL_ANeuralNetworksExtensionOperandTypeInformation opTypeInfo = conv(operandTypeInfo);
1854         callback(opTypeInfo, context);
1855     }
1856     return ANEURALNETWORKS_NO_ERROR;
1857 }
1858 
1859 #define NNCL_FUNC(symbol) .symbol = symbol
1860 
1861 NnApiSLDriverImplFL5 slDriverImpl{
1862         .base{.implFeatureLevel = ANEURALNETWORKS_FEATURE_LEVEL_5},
1863         NNCL_FUNC(ANeuralNetworksBurst_create),
1864         NNCL_FUNC(ANeuralNetworksBurst_free),
1865         NNCL_FUNC(ANeuralNetworksCompilation_createForDevices),
1866         NNCL_FUNC(ANeuralNetworksCompilation_finish),
1867         NNCL_FUNC(ANeuralNetworksCompilation_free),
1868         NNCL_FUNC(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput),
1869         NNCL_FUNC(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput),
1870         NNCL_FUNC(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput),
1871         NNCL_FUNC(ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput),
1872         NNCL_FUNC(ANeuralNetworksCompilation_setCaching),
1873         NNCL_FUNC(ANeuralNetworksCompilation_setPreference),
1874         NNCL_FUNC(ANeuralNetworksCompilation_setPriority),
1875         NNCL_FUNC(ANeuralNetworksCompilation_setTimeout),
1876         NNCL_FUNC(ANeuralNetworksDevice_getExtensionSupport),
1877         NNCL_FUNC(ANeuralNetworksDevice_getFeatureLevel),
1878         NNCL_FUNC(ANeuralNetworksDevice_getName),
1879         NNCL_FUNC(ANeuralNetworksDevice_getType),
1880         NNCL_FUNC(ANeuralNetworksDevice_getVersion),
1881         NNCL_FUNC(ANeuralNetworksDevice_wait),
1882         NNCL_FUNC(ANeuralNetworksEvent_createFromSyncFenceFd),
1883         NNCL_FUNC(ANeuralNetworksEvent_free),
1884         NNCL_FUNC(ANeuralNetworksEvent_getSyncFenceFd),
1885         NNCL_FUNC(ANeuralNetworksEvent_wait),
1886         NNCL_FUNC(ANeuralNetworksExecution_burstCompute),
1887         NNCL_FUNC(ANeuralNetworksExecution_compute),
1888         NNCL_FUNC(ANeuralNetworksExecution_create),
1889         NNCL_FUNC(ANeuralNetworksExecution_enableInputAndOutputPadding),
1890         NNCL_FUNC(ANeuralNetworksExecution_free),
1891         NNCL_FUNC(ANeuralNetworksExecution_getDuration),
1892         NNCL_FUNC(ANeuralNetworksExecution_getOutputOperandDimensions),
1893         NNCL_FUNC(ANeuralNetworksExecution_getOutputOperandRank),
1894         NNCL_FUNC(ANeuralNetworksExecution_setInput),
1895         NNCL_FUNC(ANeuralNetworksExecution_setInputFromMemory),
1896         NNCL_FUNC(ANeuralNetworksExecution_setLoopTimeout),
1897         NNCL_FUNC(ANeuralNetworksExecution_setMeasureTiming),
1898         NNCL_FUNC(ANeuralNetworksExecution_setOutput),
1899         NNCL_FUNC(ANeuralNetworksExecution_setOutputFromMemory),
1900         NNCL_FUNC(ANeuralNetworksExecution_setReusable),
1901         NNCL_FUNC(ANeuralNetworksExecution_setTimeout),
1902         NNCL_FUNC(ANeuralNetworksExecution_startComputeWithDependencies),
1903         NNCL_FUNC(ANeuralNetworksMemoryDesc_addInputRole),
1904         NNCL_FUNC(ANeuralNetworksMemoryDesc_addOutputRole),
1905         NNCL_FUNC(ANeuralNetworksMemoryDesc_create),
1906         NNCL_FUNC(ANeuralNetworksMemoryDesc_finish),
1907         NNCL_FUNC(ANeuralNetworksMemoryDesc_free),
1908         NNCL_FUNC(ANeuralNetworksMemoryDesc_setDimensions),
1909         NNCL_FUNC(ANeuralNetworksMemory_copy),
1910         NNCL_FUNC(ANeuralNetworksMemory_createFromAHardwareBuffer),
1911         NNCL_FUNC(ANeuralNetworksMemory_createFromDesc),
1912         NNCL_FUNC(ANeuralNetworksMemory_createFromFd),
1913         NNCL_FUNC(ANeuralNetworksMemory_free),
1914         NNCL_FUNC(ANeuralNetworksModel_addOperand),
1915         NNCL_FUNC(ANeuralNetworksModel_addOperation),
1916         NNCL_FUNC(ANeuralNetworksModel_create),
1917         NNCL_FUNC(ANeuralNetworksModel_finish),
1918         NNCL_FUNC(ANeuralNetworksModel_free),
1919         NNCL_FUNC(ANeuralNetworksModel_getExtensionOperandType),
1920         NNCL_FUNC(ANeuralNetworksModel_getExtensionOperationType),
1921         NNCL_FUNC(ANeuralNetworksModel_getSupportedOperationsForDevices),
1922         NNCL_FUNC(ANeuralNetworksModel_identifyInputsAndOutputs),
1923         NNCL_FUNC(ANeuralNetworksModel_relaxComputationFloat32toFloat16),
1924         NNCL_FUNC(ANeuralNetworksModel_setOperandExtensionData),
1925         NNCL_FUNC(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams),
1926         NNCL_FUNC(ANeuralNetworksModel_setOperandValue),
1927         NNCL_FUNC(ANeuralNetworksModel_setOperandValueFromMemory),
1928         NNCL_FUNC(ANeuralNetworksModel_setOperandValueFromModel),
1929         NNCL_FUNC(ANeuralNetworks_getDefaultLoopTimeout),
1930         NNCL_FUNC(ANeuralNetworks_getDevice),
1931         NNCL_FUNC(ANeuralNetworks_getDeviceCount),
1932         NNCL_FUNC(ANeuralNetworks_getMaximumLoopTimeout),
1933         NNCL_FUNC(ANeuralNetworks_getRuntimeFeatureLevel),
1934         NNCL_FUNC(SL_ANeuralNetworksCompilation_setCachingFromFds),
1935         NNCL_FUNC(SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded),
1936         NNCL_FUNC(SL_ANeuralNetworksDevice_getPerformanceInfo),
1937         NNCL_FUNC(SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo),
1938         NNCL_FUNC(SL_ANeuralNetworksDevice_getVendorExtensionCount),
1939         NNCL_FUNC(SL_ANeuralNetworksDevice_getVendorExtensionName),
1940         NNCL_FUNC(SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation),
1941 };
1942 
1943 #undef NNCL_FUNC
1944 
1945 __BEGIN_DECLS
ANeuralNetworks_getSLDriverImpl()1946 NnApiSLDriverImpl* ANeuralNetworks_getSLDriverImpl() {
1947     return reinterpret_cast<NnApiSLDriverImpl*>(&slDriverImpl);
1948 }
1949 __END_DECLS
1950 
1951 #endif  // NN_COMPATIBILITY_LIBRARY_BUILD
1952