• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 #include <stdlib.h>
16 #include <unistd.h>
17 
18 #include <cerrno>
19 #include <cstdint>
20 #include <cstdio>
21 #include <fstream>
22 #include <iostream>
23 #include <memory>
24 #include <string>
25 
26 #include "tensorflow/lite/nnapi/sl/include/SupportLibrary.h"
27 #include "tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h"
28 
29 namespace {
30 
GetTempDir()31 std::string GetTempDir() {
32   const char* temp_dir = getenv("TEST_TMPDIR");
33   if (temp_dir == nullptr || temp_dir[0] == '\0') {
34 #ifdef __ANDROID__
35     return "/data/local/tmp";
36 #else
37     return "/tmp";
38 #endif
39   } else {
40     return temp_dir;
41   }
42 }
43 
CallCountFilePath()44 std::string CallCountFilePath() {
45   return GetTempDir() + "/nnapi_sl_fake_impl.out";
46 }
47 // We write a . in the trace file to allow a caller to count the number of
48 // calls to NNAPI SL.
TraceCall()49 void TraceCall() {
50   std::ofstream trace_file(CallCountFilePath().c_str(), std::ofstream::app);
51   if (trace_file) {
52     std::cerr << "Tracing call\n";
53     trace_file << '.';
54     if (!trace_file) {
55       std::cerr << "Error writing to '" << CallCountFilePath() << "'\n";
56     }
57   } else {
58     std::cerr << "FAKE_NNAPI_SL: UNABLE TO TRACE CALL\n";
59   }
60 }
61 
62 template <int return_value, typename... Types>
TraceCallAndReturn(Types...args)63 int TraceCallAndReturn(Types... args) {
64   TraceCall();
65   return return_value;
66 }
67 
68 template <typename... Types>
JustTraceCall(Types...args)69 void JustTraceCall(Types... args) {
70   TraceCall();
71 }
72 
73 const uint32_t kDefaultMemoryPaddingAndAlignment = 64;
74 
GetNnApiSlDriverImpl()75 NnApiSLDriverImplFL5 GetNnApiSlDriverImpl() {
76   NnApiSLDriverImplFL5 sl_driver_impl;
77 
78   sl_driver_impl.base = {ANEURALNETWORKS_FEATURE_LEVEL_5};
79   sl_driver_impl.ANeuralNetworksBurst_create =
80       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
81   sl_driver_impl.ANeuralNetworksBurst_free = JustTraceCall;
82   sl_driver_impl.ANeuralNetworksCompilation_createForDevices =
83       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
84   sl_driver_impl.ANeuralNetworksCompilation_finish =
85       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
86   sl_driver_impl.ANeuralNetworksCompilation_free = JustTraceCall;
87   sl_driver_impl
88       .ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput =
89       [](const ANeuralNetworksCompilation* compilation, uint32_t index,
90          uint32_t* alignment) -> int {
91     TraceCall();
92     *alignment = kDefaultMemoryPaddingAndAlignment;
93     return ANEURALNETWORKS_NO_ERROR;
94   };
95   sl_driver_impl
96       .ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput =
97       [](const ANeuralNetworksCompilation* compilation, uint32_t index,
98          uint32_t* alignment) -> int {
99     TraceCall();
100     *alignment = kDefaultMemoryPaddingAndAlignment;
101     return ANEURALNETWORKS_NO_ERROR;
102   };
103   sl_driver_impl.ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput =
104       [](const ANeuralNetworksCompilation* compilation, uint32_t index,
105          uint32_t* padding) -> int {
106     TraceCall();
107     *padding = kDefaultMemoryPaddingAndAlignment;
108     return ANEURALNETWORKS_NO_ERROR;
109   };
110   sl_driver_impl.ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput =
111       [](const ANeuralNetworksCompilation* compilation, uint32_t index,
112          uint32_t* padding) -> int {
113     TraceCall();
114     *padding = kDefaultMemoryPaddingAndAlignment;
115     return ANEURALNETWORKS_NO_ERROR;
116   };
117   sl_driver_impl.ANeuralNetworksCompilation_setCaching =
118       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
119   sl_driver_impl.ANeuralNetworksCompilation_setPreference =
120       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
121   sl_driver_impl.ANeuralNetworksCompilation_setPriority =
122       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
123   sl_driver_impl.ANeuralNetworksCompilation_setTimeout =
124       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
125   sl_driver_impl.ANeuralNetworksDevice_getExtensionSupport =
126       [](const ANeuralNetworksDevice* device, const char* extensionName,
127          bool* isExtensionSupported) -> int {
128     *isExtensionSupported = false;
129     return ANEURALNETWORKS_NO_ERROR;
130   };
131   sl_driver_impl.ANeuralNetworksDevice_getFeatureLevel =
132       [](const ANeuralNetworksDevice* device, int64_t* featureLevel) -> int {
133     TraceCall();
134     *featureLevel = ANEURALNETWORKS_FEATURE_LEVEL_5;
135     return ANEURALNETWORKS_NO_ERROR;
136   };
137   sl_driver_impl.ANeuralNetworksDevice_getName =
138       [](const ANeuralNetworksDevice* device, const char** name) -> int {
139     TraceCall();
140     *name = "mockDevice";
141     return ANEURALNETWORKS_BAD_DATA;
142   };
143   sl_driver_impl.ANeuralNetworksDevice_getType =
144       [](const ANeuralNetworksDevice* device, int32_t* type) -> int {
145     *type = ANEURALNETWORKS_DEVICE_CPU;
146     return ANEURALNETWORKS_BAD_DATA;
147   };
148   sl_driver_impl.ANeuralNetworksDevice_getVersion =
149       [](const ANeuralNetworksDevice* device, const char** version) -> int {
150     TraceCall();
151     *version = "mock";
152     return ANEURALNETWORKS_NO_ERROR;
153   };
154   sl_driver_impl.ANeuralNetworksDevice_wait =
155       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
156   sl_driver_impl.ANeuralNetworksEvent_createFromSyncFenceFd =
157       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
158   sl_driver_impl.ANeuralNetworksEvent_free = JustTraceCall;
159   sl_driver_impl.ANeuralNetworksEvent_getSyncFenceFd =
160       TraceCallAndReturn<ANEURALNETWORKS_BAD_DATA>;
161   sl_driver_impl.ANeuralNetworksEvent_wait =
162       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
163   sl_driver_impl.ANeuralNetworksExecution_burstCompute =
164       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
165   sl_driver_impl.ANeuralNetworksExecution_compute =
166       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
167   sl_driver_impl.ANeuralNetworksExecution_create =
168       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
169   sl_driver_impl.ANeuralNetworksExecution_enableInputAndOutputPadding =
170       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
171   sl_driver_impl.ANeuralNetworksExecution_free = JustTraceCall;
172   sl_driver_impl.ANeuralNetworksExecution_getDuration =
173       [](const ANeuralNetworksExecution* execution, int32_t durationCode,
174          uint64_t* duration) -> int {
175     TraceCall();
176     *duration = UINT64_MAX;
177     return ANEURALNETWORKS_NO_ERROR;
178   };
179   sl_driver_impl.ANeuralNetworksExecution_getOutputOperandDimensions =
180       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
181   sl_driver_impl.ANeuralNetworksExecution_getOutputOperandRank =
182       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
183   sl_driver_impl.ANeuralNetworksExecution_setInput =
184       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
185   sl_driver_impl.ANeuralNetworksExecution_setInputFromMemory =
186       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
187   sl_driver_impl.ANeuralNetworksExecution_setLoopTimeout =
188       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
189   sl_driver_impl.ANeuralNetworksExecution_setMeasureTiming =
190       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
191   sl_driver_impl.ANeuralNetworksExecution_setOutput =
192       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
193   sl_driver_impl.ANeuralNetworksExecution_setOutputFromMemory =
194       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
195   sl_driver_impl.ANeuralNetworksExecution_setReusable =
196       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
197   sl_driver_impl.ANeuralNetworksExecution_setTimeout =
198       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
199   sl_driver_impl.ANeuralNetworksExecution_startComputeWithDependencies =
200       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
201   sl_driver_impl.ANeuralNetworksMemoryDesc_addInputRole =
202       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
203   sl_driver_impl.ANeuralNetworksMemoryDesc_addOutputRole =
204       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
205   sl_driver_impl.ANeuralNetworksMemoryDesc_create =
206       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
207   sl_driver_impl.ANeuralNetworksMemoryDesc_finish =
208       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
209   sl_driver_impl.ANeuralNetworksMemoryDesc_free = JustTraceCall;
210   sl_driver_impl.ANeuralNetworksMemoryDesc_setDimensions =
211       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
212   sl_driver_impl.ANeuralNetworksMemory_copy =
213       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
214   sl_driver_impl.ANeuralNetworksMemory_createFromAHardwareBuffer =
215       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
216   sl_driver_impl.ANeuralNetworksMemory_createFromDesc =
217       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
218   sl_driver_impl.ANeuralNetworksMemory_createFromFd =
219       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
220   sl_driver_impl.ANeuralNetworksMemory_free = JustTraceCall;
221   sl_driver_impl.ANeuralNetworksModel_addOperand =
222       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
223   sl_driver_impl.ANeuralNetworksModel_addOperation =
224       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
225   sl_driver_impl.ANeuralNetworksModel_create =
226       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
227   sl_driver_impl.ANeuralNetworksModel_finish =
228       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
229   sl_driver_impl.ANeuralNetworksModel_free = JustTraceCall;
230   sl_driver_impl.ANeuralNetworksModel_getExtensionOperandType =
231       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
232   sl_driver_impl.ANeuralNetworksModel_getExtensionOperationType =
233       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
234   sl_driver_impl.ANeuralNetworksModel_getSupportedOperationsForDevices =
235       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
236   sl_driver_impl.ANeuralNetworksModel_identifyInputsAndOutputs =
237       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
238   sl_driver_impl.ANeuralNetworksModel_relaxComputationFloat32toFloat16 =
239       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
240   sl_driver_impl.ANeuralNetworksModel_setOperandExtensionData =
241       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
242   sl_driver_impl.ANeuralNetworksModel_setOperandSymmPerChannelQuantParams =
243       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
244   sl_driver_impl.ANeuralNetworksModel_setOperandValue =
245       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
246   sl_driver_impl.ANeuralNetworksModel_setOperandValueFromMemory =
247       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
248   sl_driver_impl.ANeuralNetworksModel_setOperandValueFromModel =
249       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
250   sl_driver_impl.ANeuralNetworks_getDefaultLoopTimeout = []() -> uint64_t {
251     TraceCall();
252     return UINT64_MAX;
253   };
254   sl_driver_impl.ANeuralNetworks_getDevice =
255       TraceCallAndReturn<ANEURALNETWORKS_NO_ERROR>;
256   sl_driver_impl.ANeuralNetworks_getDeviceCount =
257       [](uint32_t* num_devices) -> int {
258     TraceCall();
259     *num_devices = 0;
260     return ANEURALNETWORKS_NO_ERROR;
261   };
262   sl_driver_impl.ANeuralNetworks_getMaximumLoopTimeout = []() -> uint64_t {
263     TraceCall();
264     return UINT64_MAX;
265   };
266   sl_driver_impl.ANeuralNetworks_getRuntimeFeatureLevel = []() -> int64_t {
267     TraceCall();
268     return ANEURALNETWORKS_FEATURE_LEVEL_5;
269   };
270 
271   return sl_driver_impl;
272 }
273 
274 }  // namespace
275 
ANeuralNetworks_getSLDriverImpl()276 extern "C" NnApiSLDriverImpl* ANeuralNetworks_getSLDriverImpl() {
277   static NnApiSLDriverImplFL5 sl_driver_impl = GetNnApiSlDriverImpl();
278   return reinterpret_cast<NnApiSLDriverImpl*>(&sl_driver_impl);
279 }
280 
281 namespace tflite {
282 namespace acceleration {
283 
InitNnApiSlInvocationStatus()284 void InitNnApiSlInvocationStatus() { unlink(CallCountFilePath().c_str()); }
285 
WasNnApiSlInvoked()286 bool WasNnApiSlInvoked() {
287   std::cerr << "Checking if file '" << CallCountFilePath() << "' exists.\n";
288   if (FILE* trace_file = fopen(CallCountFilePath().c_str(), "r")) {
289     fclose(trace_file);
290     return true;
291   } else {
292     return false;
293   }
294 }
295 
CountNnApiSlApiCalls()296 int CountNnApiSlApiCalls() {
297   FILE* trace_file = fopen(CallCountFilePath().c_str(), "r");
298   int call_count = 0;
299   while (fgetc(trace_file) != EOF) {
300     call_count++;
301   }
302   return call_count;
303 }
304 
305 }  // namespace acceleration
306 }  // namespace tflite
307