• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "transform.h"
17 
18 #include "memory_manager.h"
19 #include "common/log.h"
20 
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 const uint32_t BIT8_TO_BYTE = 1;
24 const uint32_t BIT16_TO_BYTE = 2;
25 const uint32_t BIT32_TO_BYTE = 4;
26 const uint32_t BIT64_TO_BYTE = 8;
27 
TransHDIDeviceType(const V1_0::DeviceType & iDeviceType)28 OH_NN_DeviceType HDIToNN::TransHDIDeviceType(const V1_0::DeviceType& iDeviceType)
29 {
30     switch (iDeviceType) {
31         case V1_0::DeviceType::CPU:
32             return OH_NN_CPU;
33         case V1_0::DeviceType::GPU:
34             return OH_NN_GPU;
35         case V1_0::DeviceType::ACCELERATOR:
36             return OH_NN_ACCELERATOR;
37         default:
38             return OH_NN_OTHERS;
39     }
40 }
41 
TransHDIDeviceStatus(const V1_0::DeviceStatus & iDeviceStatus)42 DeviceStatus HDIToNN::TransHDIDeviceStatus(const V1_0::DeviceStatus& iDeviceStatus)
43 {
44     switch (iDeviceStatus) {
45         case V1_0::DeviceStatus::AVAILABLE:
46             return DeviceStatus::AVAILABLE;
47         case V1_0::DeviceStatus::BUSY:
48             return DeviceStatus::BUSY;
49         case V1_0::DeviceStatus::OFFLINE:
50             return DeviceStatus::OFFLINE;
51         default:
52             return DeviceStatus::UNKNOWN;
53     }
54 }
55 
TransPerformanceMode(const OH_NN_PerformanceMode & mode)56 V1_0::PerformanceMode NNToHDI::TransPerformanceMode(const OH_NN_PerformanceMode& mode)
57 {
58     switch (mode) {
59         case OH_NN_PERFORMANCE_LOW:
60             return V1_0::PerformanceMode::PERFORMANCE_LOW;
61         case OH_NN_PERFORMANCE_MEDIUM:
62             return V1_0::PerformanceMode::PERFORMANCE_MEDIUM;
63         case OH_NN_PERFORMANCE_HIGH:
64             return V1_0::PerformanceMode::PERFORMANCE_HIGH;
65         case OH_NN_PERFORMANCE_EXTREME:
66             return V1_0::PerformanceMode::PERFORMANCE_EXTREME;
67         default:
68             return V1_0::PerformanceMode::PERFORMANCE_NONE;
69     }
70 }
TransPriority(const OH_NN_Priority & priority)71 V1_0::Priority NNToHDI::TransPriority(const OH_NN_Priority& priority)
72 {
73     switch (priority) {
74         case OH_NN_PRIORITY_LOW:
75             return V1_0::Priority::PRIORITY_LOW;
76         case OH_NN_PRIORITY_MEDIUM:
77             return V1_0::Priority::PRIORITY_MEDIUM;
78         case OH_NN_PRIORITY_HIGH:
79             return V1_0::Priority::PRIORITY_HIGH;
80         default:
81             return V1_0::Priority::PRIORITY_NONE;
82     }
83 }
84 
TransDataType(const OH_NN_DataType & dataType)85 V1_0::DataType NNToHDI::TransDataType(const OH_NN_DataType& dataType)
86 {
87     switch (dataType) {
88         case OH_NN_BOOL:
89             return V1_0::DataType::DATA_TYPE_BOOL;
90         case OH_NN_INT8:
91             return V1_0::DataType::DATA_TYPE_INT8;
92         case OH_NN_INT16:
93             return V1_0::DataType::DATA_TYPE_INT16;
94         case OH_NN_INT32:
95             return V1_0::DataType::DATA_TYPE_INT32;
96         case OH_NN_INT64:
97             return V1_0::DataType::DATA_TYPE_INT64;
98         case OH_NN_UINT8:
99             return V1_0::DataType::DATA_TYPE_UINT8;
100         case OH_NN_UINT16:
101             return V1_0::DataType::DATA_TYPE_UINT16;
102         case OH_NN_UINT32:
103             return V1_0::DataType::DATA_TYPE_UINT32;
104         case OH_NN_UINT64:
105             return V1_0::DataType::DATA_TYPE_UINT64;
106         case OH_NN_FLOAT16:
107             return V1_0::DataType::DATA_TYPE_FLOAT16;
108         case OH_NN_FLOAT32:
109             return V1_0::DataType::DATA_TYPE_FLOAT32;
110         case OH_NN_FLOAT64:
111             return V1_0::DataType::DATA_TYPE_FLOAT64;
112         default:
113             return V1_0::DataType::DATA_TYPE_UNKNOWN;
114     }
115 }
116 
TransFormat(const OH_NN_Format & format)117 V1_0::Format NNToHDI::TransFormat(const OH_NN_Format& format)
118 {
119     switch (format) {
120         case OH_NN_FORMAT_NCHW:
121             return V1_0::Format::FORMAT_NCHW;
122         case OH_NN_FORMAT_NHWC:
123             return V1_0::Format::FORMAT_NHWC;
124         default:
125             return V1_0::Format::FORMAT_NONE;
126     }
127 }
128 
TransIOTensor(const IOTensor & tensor)129 V1_0::IOTensor NNToHDI::TransIOTensor(const IOTensor& tensor)
130 {
131     V1_0::IOTensor iTensor;
132     iTensor.name = tensor.name;
133     iTensor.dataType = TransDataType(tensor.dataType);
134     iTensor.dimensions = tensor.dimensions;
135     iTensor.format = TransFormat(tensor.format);
136 
137     V1_0::SharedBuffer iBuffer {INVALID_FD, 0, 0, 0};
138     if (tensor.data != nullptr) {
139         auto memManager = MemoryManager::GetInstance();
140         Memory memory;
141         auto ret = memManager->GetMemory(tensor.data, memory);
142         if (ret != OH_NN_SUCCESS) {
143             LOGE("Invalid Tensor buffer, cannot transform to fd.");
144         } else {
145             iBuffer.fd = memory.fd;
146             iBuffer.bufferSize = memory.length;
147             iBuffer.offset = 0;
148             iBuffer.dataSize = memory.length;
149         }
150     }
151     iTensor.data = iBuffer;
152 
153     return iTensor;
154 }
155 
GetTypeSize(OH_NN_DataType type)156 uint32_t GetTypeSize(OH_NN_DataType type)
157 {
158     switch (type) {
159         case OH_NN_BOOL:
160             return sizeof(bool);
161         case OH_NN_INT8:
162         case OH_NN_UINT8:
163             return BIT8_TO_BYTE;
164         case OH_NN_INT16:
165         case OH_NN_UINT16:
166         case OH_NN_FLOAT16:
167             return BIT16_TO_BYTE;
168         case OH_NN_INT32:
169         case OH_NN_UINT32:
170         case OH_NN_FLOAT32:
171             return BIT32_TO_BYTE;
172         case OH_NN_INT64:
173         case OH_NN_UINT64:
174         case OH_NN_FLOAT64:
175             return BIT64_TO_BYTE;
176         default:
177             return 0;
178     }
179 }
180 
TransformDataType(OH_NN_DataType type)181 mindspore::lite::DataType NNToMS::TransformDataType(OH_NN_DataType type)
182 {
183     switch (type) {
184         case OH_NN_BOOL:
185             return mindspore::lite::DATA_TYPE_BOOL;
186         case OH_NN_INT8:
187             return mindspore::lite::DATA_TYPE_INT8;
188         case OH_NN_INT16:
189             return mindspore::lite::DATA_TYPE_INT16;
190         case OH_NN_INT32:
191             return mindspore::lite::DATA_TYPE_INT32;
192         case OH_NN_INT64:
193             return mindspore::lite::DATA_TYPE_INT64;
194         case OH_NN_UINT8:
195             return mindspore::lite::DATA_TYPE_UINT8;
196         case OH_NN_UINT16:
197             return mindspore::lite::DATA_TYPE_UINT16;
198         case OH_NN_UINT32:
199             return mindspore::lite::DATA_TYPE_UINT32;
200         case OH_NN_UINT64:
201             return mindspore::lite::DATA_TYPE_UINT64;
202         case OH_NN_FLOAT16:
203             return mindspore::lite::DATA_TYPE_FLOAT16;
204         case OH_NN_FLOAT32:
205             return mindspore::lite::DATA_TYPE_FLOAT32;
206         case OH_NN_FLOAT64:
207             return mindspore::lite::DATA_TYPE_FLOAT64;
208         default:
209             return mindspore::lite::DATA_TYPE_UNKNOWN;
210     }
211 }
212 
TransformFormat(OH_NN_Format type)213 mindspore::lite::Format NNToMS::TransformFormat(OH_NN_Format type)
214 {
215     switch (type) {
216         case OH_NN_FORMAT_NCHW:
217             return mindspore::lite::FORMAT_NCHW;
218         case OH_NN_FORMAT_NHWC:
219             return mindspore::lite::FORMAT_NHWC;
220         default:
221             return mindspore::lite::FORMAT_NHWC;
222     }
223 }
224 
TransfromFusionType(OH_NN_FuseType type)225 mindspore::lite::ActivationType NNToMS::TransfromFusionType(OH_NN_FuseType type)
226 {
227     switch (type) {
228         case OH_NN_FUSED_NONE:
229             return mindspore::lite::ACTIVATION_TYPE_NO_ACTIVATION;
230         case OH_NN_FUSED_RELU:
231             return mindspore::lite::ACTIVATION_TYPE_RELU;
232         case OH_NN_FUSED_RELU6:
233             return mindspore::lite::ACTIVATION_TYPE_RELU6;
234         default:
235             return mindspore::lite::ACTIVATION_TYPE_UNKNOWN;
236     }
237 }
238 
TransformQuantType(OHOS::NeuralNetworkRuntime::Ops::OpsQuantType type)239 mindspore::lite::QuantType NNToMS::TransformQuantType(OHOS::NeuralNetworkRuntime::Ops::OpsQuantType type)
240 {
241     switch (type) {
242         case OHOS::NeuralNetworkRuntime::Ops::OpsQuantType::QUANT_NONE:
243             return mindspore::lite::QUANT_TYPE_NONE;
244         case OHOS::NeuralNetworkRuntime::Ops::OpsQuantType::QUANT_ALL:
245             return mindspore::lite::QUANT_TYPE_ALL;
246         default: return mindspore::lite::QUANT_TYPE_NONE;
247     }
248 }
249 
TransformPadModeValue(int8_t padMode)250 mindspore::lite::PadMode NNToMS::TransformPadModeValue(int8_t padMode)
251 {
252     // The value is an optional value of the int8_t type. The value 0 indicates the same,
253     // and the value 1 indicates valid.
254     return (padMode == 0) ? mindspore::lite::PadMode::PAD_MODE_SAME :
255             mindspore::lite::PadMode::PAD_MODE_VALID;
256 }
257 
TransformDataType(mindspore::lite::DataType type)258 OH_NN_DataType MSToNN::TransformDataType(mindspore::lite::DataType type)
259 {
260     switch (type) {
261         case mindspore::lite::DATA_TYPE_BOOL:
262             return OH_NN_BOOL;
263         case mindspore::lite::DATA_TYPE_INT8:
264             return OH_NN_INT8;
265         case mindspore::lite::DATA_TYPE_INT16:
266             return OH_NN_INT16;
267         case mindspore::lite::DATA_TYPE_INT32:
268             return OH_NN_INT32;
269         case mindspore::lite::DATA_TYPE_INT64:
270             return OH_NN_INT64;
271         case mindspore::lite::DATA_TYPE_UINT8:
272             return OH_NN_UINT8;
273         case mindspore::lite::DATA_TYPE_UINT16:
274             return OH_NN_UINT16;
275         case mindspore::lite::DATA_TYPE_UINT32:
276             return OH_NN_UINT32;
277         case mindspore::lite::DATA_TYPE_UINT64:
278             return OH_NN_UINT64;
279         case mindspore::lite::DATA_TYPE_FLOAT16:
280             return OH_NN_FLOAT16;
281         case mindspore::lite::DATA_TYPE_FLOAT32:
282             return OH_NN_FLOAT32;
283         case mindspore::lite::DATA_TYPE_FLOAT64:
284             return OH_NN_FLOAT64;
285         default:
286             return OH_NN_UNKNOWN;
287     }
288 }
289 
TransformQuantParams(std::vector<mindspore::lite::QuantParam> msQuantParams)290 std::vector<QuantParam> MSToNN::TransformQuantParams(std::vector<mindspore::lite::QuantParam> msQuantParams)
291 {
292     std::vector<QuantParam> nnQuantParam;
293     for (const mindspore::lite::QuantParam& param : msQuantParams) {
294         nnQuantParam.emplace_back((QuantParam){param.numBits, param.scale, param.zeroPoint});
295     }
296     return nnQuantParam;
297 }
298 } // namespace NeuralNetworkRuntime
299 } // namespace OHOS