1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 #include "tensorflow/lite/nnapi/nnapi_handler.h"
16
17 #include <cstdio>
18 #include <string>
19
20 #include "tensorflow/lite/nnapi/nnapi_implementation.h"
21
22 namespace tflite {
23 namespace nnapi {
24
25 // static
26 const char NnApiHandler::kNnapiReferenceDeviceName[] = "nnapi-reference";
27 // static
28 const int NnApiHandler::kNnapiReferenceDevice = 1;
29 // static
30 const int NnApiHandler::kNnapiDevice = 2;
31
32 char* NnApiHandler::nnapi_device_name_ = nullptr;
33 int NnApiHandler::nnapi_device_feature_level_;
34
NnApiPassthroughInstance()35 const NnApi* NnApiPassthroughInstance() {
36 static const NnApi orig_nnapi_copy = *NnApiImplementation();
37 return &orig_nnapi_copy;
38 }
39
40 // static
Instance()41 NnApiHandler* NnApiHandler::Instance() {
42 // Ensuring that the original copy of nnapi is saved before we return
43 // access to NnApiHandler
44 NnApiPassthroughInstance();
45 static NnApiHandler handler{const_cast<NnApi*>(NnApiImplementation())};
46 return &handler;
47 }
48
Reset()49 void NnApiHandler::Reset() {
50 // Restores global NNAPI to original value
51 *nnapi_ = *NnApiPassthroughInstance();
52 }
53
SetAndroidSdkVersion(int version,bool set_unsupported_ops_to_null)54 void NnApiHandler::SetAndroidSdkVersion(int version,
55 bool set_unsupported_ops_to_null) {
56 nnapi_->android_sdk_version = version;
57 nnapi_->nnapi_runtime_feature_level = version;
58
59 if (!set_unsupported_ops_to_null) {
60 return;
61 }
62
63 if (version < 29) {
64 nnapi_->ANeuralNetworks_getDeviceCount = nullptr;
65 nnapi_->ANeuralNetworks_getDevice = nullptr;
66 nnapi_->ANeuralNetworksDevice_getName = nullptr;
67 nnapi_->ANeuralNetworksDevice_getVersion = nullptr;
68 nnapi_->ANeuralNetworksDevice_getFeatureLevel = nullptr;
69 nnapi_->ANeuralNetworksDevice_getType = nullptr;
70 nnapi_->ANeuralNetworksModel_getSupportedOperationsForDevices = nullptr;
71 nnapi_->ANeuralNetworksCompilation_createForDevices = nullptr;
72 nnapi_->ANeuralNetworksCompilation_setCaching = nullptr;
73 nnapi_->ANeuralNetworksExecution_compute = nullptr;
74 nnapi_->ANeuralNetworksExecution_getOutputOperandRank = nullptr;
75 nnapi_->ANeuralNetworksExecution_getOutputOperandDimensions = nullptr;
76 nnapi_->ANeuralNetworksBurst_create = nullptr;
77 nnapi_->ANeuralNetworksBurst_free = nullptr;
78 nnapi_->ANeuralNetworksExecution_burstCompute = nullptr;
79 nnapi_->ANeuralNetworksMemory_createFromAHardwareBuffer = nullptr;
80 nnapi_->ANeuralNetworksExecution_setMeasureTiming = nullptr;
81 nnapi_->ANeuralNetworksExecution_getDuration = nullptr;
82 nnapi_->ANeuralNetworksDevice_getExtensionSupport = nullptr;
83 nnapi_->ANeuralNetworksModel_getExtensionOperandType = nullptr;
84 nnapi_->ANeuralNetworksModel_getExtensionOperationType = nullptr;
85 nnapi_->ANeuralNetworksModel_setOperandExtensionData = nullptr;
86 }
87 if (version < 28) {
88 nnapi_->ANeuralNetworksModel_relaxComputationFloat32toFloat16 = nullptr;
89 }
90 }
91
SetDeviceName(const std::string & name)92 void NnApiHandler::SetDeviceName(const std::string& name) {
93 delete[] nnapi_device_name_;
94 nnapi_device_name_ = new char[name.size() + 1];
95 std::strcpy(nnapi_device_name_, name.c_str()); // NOLINT
96 }
97
GetDeviceNameReturnsName(const std::string & name)98 void NnApiHandler::GetDeviceNameReturnsName(const std::string& name) {
99 NnApiHandler::SetDeviceName(name);
100 GetDeviceNameReturns<0>();
101 }
102
SetNnapiSupportedDevice(const std::string & name,int feature_level)103 void NnApiHandler::SetNnapiSupportedDevice(const std::string& name,
104 int feature_level) {
105 NnApiHandler::SetDeviceName(name);
106 nnapi_device_feature_level_ = feature_level;
107
108 GetDeviceCountReturnsCount<2>();
109 nnapi_->ANeuralNetworks_getDevice =
110 [](uint32_t devIndex, ANeuralNetworksDevice** device) -> int {
111 if (devIndex > 1) {
112 return ANEURALNETWORKS_BAD_DATA;
113 }
114
115 if (devIndex == 1) {
116 *device =
117 reinterpret_cast<ANeuralNetworksDevice*>(NnApiHandler::kNnapiDevice);
118 } else {
119 *device = reinterpret_cast<ANeuralNetworksDevice*>(
120 NnApiHandler::kNnapiReferenceDevice);
121 }
122 return ANEURALNETWORKS_NO_ERROR;
123 };
124 nnapi_->ANeuralNetworksDevice_getName =
125 [](const ANeuralNetworksDevice* device, const char** name) -> int {
126 if (device ==
127 reinterpret_cast<ANeuralNetworksDevice*>(NnApiHandler::kNnapiDevice)) {
128 *name = NnApiHandler::nnapi_device_name_;
129 return ANEURALNETWORKS_NO_ERROR;
130 }
131 if (device == reinterpret_cast<ANeuralNetworksDevice*>(
132 NnApiHandler::kNnapiReferenceDevice)) {
133 *name = NnApiHandler::kNnapiReferenceDeviceName;
134 return ANEURALNETWORKS_NO_ERROR;
135 }
136
137 return ANEURALNETWORKS_BAD_DATA;
138 };
139 nnapi_->ANeuralNetworksDevice_getFeatureLevel =
140 [](const ANeuralNetworksDevice* device, int64_t* featureLevel) -> int {
141 if (device ==
142 reinterpret_cast<ANeuralNetworksDevice*>(NnApiHandler::kNnapiDevice)) {
143 *featureLevel = NnApiHandler::nnapi_device_feature_level_;
144 return ANEURALNETWORKS_NO_ERROR;
145 }
146 if (device == reinterpret_cast<ANeuralNetworksDevice*>(
147 NnApiHandler::kNnapiReferenceDevice)) {
148 *featureLevel = 1000;
149 return ANEURALNETWORKS_NO_ERROR;
150 }
151
152 return ANEURALNETWORKS_BAD_DATA;
153 };
154 }
155
156 } // namespace nnapi
157 } // namespace tflite
158