• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "common/utils.h"
17 #include "frameworks/native/device_manager.h"
18 #include "frameworks/native/hdi_device.h"
19 #include "frameworks/native/nn_tensor.h"
20 #include "test/unittest/common/mock_idevice.h"
21 
22 OH_NN_ReturnCode OHOS::HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
23 
24 namespace OHOS {
25 namespace NeuralNetworkRuntime {
GetDevice(size_t deviceId) const26 std::shared_ptr<Device> DeviceManager::GetDevice(size_t deviceId) const
27 {
28     sptr<OHOS::HDI::Nnrt::V1_0::INnrtDevice> idevice
29         = sptr<OHOS::HDI::Nnrt::V1_0::MockIDevice>(new (std::nothrow) OHOS::HDI::Nnrt::V1_0::MockIDevice());
30     if (idevice == nullptr) {
31         LOGE("DeviceManager mock GetDevice failed, error happened when new sptr");
32         return nullptr;
33     }
34 
35     std::shared_ptr<Device> device = CreateSharedPtr<HDIDevice>(idevice);
36     if (device == nullptr) {
37         LOGE("DeviceManager mock GetDevice failed, the device is nullptr");
38         return nullptr;
39     }
40 
41     if (deviceId == 0) {
42         LOGE("DeviceManager mock GetDevice failed, the passed parameter deviceId is 0");
43         return nullptr;
44     } else {
45         return device;
46     }
47 }
48 
IsModelCacheSupported(bool & isSupported)49 OH_NN_ReturnCode HDIDevice::IsModelCacheSupported(bool& isSupported)
50 {
51     // isSupported is false when expecting to return success
52     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
53         // In order not to affect other use cases, set to the OH_NN_OPERATION_FORBIDDEN
54         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
55         isSupported = false;
56         return OH_NN_SUCCESS;
57     }
58 
59     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
60         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
61         isSupported = false;
62         return OH_NN_FAILED;
63     }
64 
65     isSupported = true;
66     return OH_NN_SUCCESS;
67 }
68 
GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,std::vector<bool> & ops)69 OH_NN_ReturnCode HDIDevice::GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
70                                                   std::vector<bool>& ops)
71 {
72     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_FILE) {
73         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
74         ops.emplace_back(true);
75         return OH_NN_SUCCESS;
76     }
77 
78     if (model == nullptr) {
79         LOGE("HDIDevice mock GetSupportedOperation failed, Model is nullptr, cannot query supported operation.");
80         return OH_NN_NULL_PTR;
81     }
82 
83     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
84         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
85         ops.emplace_back(false);
86         return OH_NN_SUCCESS;
87     }
88 
89     ops.emplace_back(true);
90     return OH_NN_SUCCESS;
91 }
92 
IsDynamicInputSupported(bool & isSupported)93 OH_NN_ReturnCode HDIDevice::IsDynamicInputSupported(bool& isSupported)
94 {
95     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
96         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
97         isSupported = false;
98         return OH_NN_FAILED;
99     }
100 
101     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PATH) {
102         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
103         isSupported = false;
104         return OH_NN_SUCCESS;
105     }
106 
107     isSupported = true;
108     return OH_NN_SUCCESS;
109 }
110 
IsPerformanceModeSupported(bool & isSupported)111 OH_NN_ReturnCode HDIDevice::IsPerformanceModeSupported(bool& isSupported)
112 {
113     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
114         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
115         isSupported = false;
116         return OH_NN_FAILED;
117     }
118 
119     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
120         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
121         isSupported = false;
122         return OH_NN_SUCCESS;
123     }
124 
125     isSupported = true;
126     return OH_NN_SUCCESS;
127 }
128 
IsPrioritySupported(bool & isSupported)129 OH_NN_ReturnCode HDIDevice::IsPrioritySupported(bool& isSupported)
130 {
131     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PARAMETER) {
132         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
133         isSupported = false;
134         return OH_NN_INVALID_PARAMETER;
135     }
136 
137     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
138         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
139         isSupported = false;
140         return OH_NN_SUCCESS;
141     }
142 
143     isSupported = true;
144     return OH_NN_SUCCESS;
145 }
146 
IsFloat16PrecisionSupported(bool & isSupported)147 OH_NN_ReturnCode HDIDevice::IsFloat16PrecisionSupported(bool& isSupported)
148 {
149     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_SUCCESS) {
150         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
151         isSupported = false;
152         return OH_NN_SUCCESS;
153     }
154 
155     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_MEMORY_ERROR) {
156         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
157         isSupported = false;
158         return OH_NN_MEMORY_ERROR;
159     }
160 
161     isSupported = true;
162     return OH_NN_SUCCESS;
163 }
164 
PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)165 OH_NN_ReturnCode HDIDevice::PrepareModel(std::shared_ptr<const mindspore::lite::LiteGraph> model,
166                                          const ModelConfig& config,
167                                          std::shared_ptr<PreparedModel>& preparedModel)
168 {
169     if (model == nullptr) {
170         LOGE("HDIDevice mock PrepareModel failed, the model is nullptr");
171         return OH_NN_INVALID_PARAMETER;
172     }
173 
174     if (config.enableFloat16 == false) {
175         LOGE("HDIDevice mock PrepareModel failed, the enableFloat16 is false");
176         return OH_NN_FAILED;
177     }
178 
179     sptr<OHOS::HDI::Nnrt::V1_0::IPreparedModel> hdiPreparedModel = sptr<OHOS::HDI::Nnrt::V1_0
180         ::MockIPreparedModel>(new (std::nothrow) OHOS::HDI::Nnrt::V1_0::MockIPreparedModel());
181     if (hdiPreparedModel == nullptr) {
182         LOGE("HDIDevice mock PrepareModel failed, error happened when new sptr");
183         return OH_NN_NULL_PTR;
184     }
185 
186     preparedModel = CreateSharedPtr<HDIPreparedModel>(hdiPreparedModel);
187     return OH_NN_SUCCESS;
188 }
189 
ExportModelCache(std::vector<ModelBuffer> & modelCache)190 OH_NN_ReturnCode HDIPreparedModel::ExportModelCache(std::vector<ModelBuffer>& modelCache)
191 {
192     if (!modelCache.empty()) {
193         LOGE("HDIPreparedModel mock ExportModelCache failed, the modelCache is not empty");
194         return OH_NN_INVALID_PARAMETER;
195     }
196 
197     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
198         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
199         return OH_NN_FAILED;
200     }
201 
202     int bufferSize = 13;
203     ModelBuffer modelBuffer;
204     std::string aBuffer = "mock_buffer_a";
205     modelBuffer.buffer = (void*)aBuffer.c_str();
206     modelBuffer.length = bufferSize;
207     modelCache.emplace_back(modelBuffer);
208 
209     ModelBuffer modelBuffer2;
210     std::string bBuffer = "mock_buffer_b";
211     modelBuffer2.buffer = (void*)bBuffer.c_str();
212     modelBuffer2.length = bufferSize;
213     modelCache.emplace_back(modelBuffer2);
214 
215     return OH_NN_SUCCESS;
216 }
217 
AllocateBuffer(size_t length)218 void* HDIDevice::AllocateBuffer(size_t length)
219 {
220     if (length == 0) {
221         LOGE("HDIDevice mock AllocateBuffer failed, the length param is invalid");
222         return nullptr;
223     }
224 
225     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_NULL_PTR) {
226         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
227         return nullptr;
228     }
229 
230     void* buffer = (void*)malloc(length);
231     if (buffer == nullptr) {
232         LOGE("HDIDevice mock AllocateBuffer failed, the buffer is nullptr");
233         return nullptr;
234     }
235     return buffer;
236 }
237 
ReleaseBuffer(const void * buffer)238 OH_NN_ReturnCode HDIDevice::ReleaseBuffer(const void* buffer)
239 {
240     if (buffer == nullptr) {
241         LOGE("HDIDevice mock ReleaseBuffer failed, the buffer is nullptr");
242         return OH_NN_NULL_PTR;
243     }
244 
245     free(const_cast<void *>(buffer));
246     buffer = nullptr;
247     return OH_NN_SUCCESS;
248 }
249 
PrepareModelFromModelCache(const std::vector<ModelBuffer> & modelCache,const ModelConfig & config,std::shared_ptr<PreparedModel> & preparedModel)250 OH_NN_ReturnCode HDIDevice::PrepareModelFromModelCache(const std::vector<ModelBuffer>& modelCache,
251                                                        const ModelConfig& config,
252                                                        std::shared_ptr<PreparedModel>& preparedModel)
253 {
254     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
255         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
256         return OH_NN_FAILED;
257     }
258 
259     if (modelCache.size() == 0 || config.enableFloat16 == false) {
260         LOGE("HDIDevice mock PrepareModel failed, the modelCache size equals 0 or enableFloat16 is false");
261         return OH_NN_FAILED;
262     }
263 
264     sptr<OHOS::HDI::Nnrt::V1_0::IPreparedModel> hdiPreparedModel = sptr<OHOS::HDI::Nnrt::V1_0
265         ::MockIPreparedModel>(new (std::nothrow) OHOS::HDI::Nnrt::V1_0::MockIPreparedModel());
266     if (hdiPreparedModel == nullptr) {
267         LOGE("HDIDevice mock PrepareModelFromModelCache failed, error happened when new sptr");
268         return OH_NN_NULL_PTR;
269     }
270 
271     preparedModel = CreateSharedPtr<HDIPreparedModel>(hdiPreparedModel);
272 
273     return OH_NN_SUCCESS;
274 }
275 
IsDynamicShape() const276 bool NNTensor::IsDynamicShape() const
277 {
278     if (HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
279         HDI::Nnrt::V1_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
280         return false;
281     }
282 
283     return true;
284 }
285 } // namespace NeuralNetworkRuntime
286 } // namespace OHOS