1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "frameworks/native/compilation.h"
17 #include "frameworks/native/execution_plan.h"
18 #include "frameworks/native/hdi_device_v2_0.h"
19 #include "test/unittest/common/v2_0/mock_idevice.h"
20
21 OH_NN_ReturnCode OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
22
23 namespace OHOS {
24 namespace NeuralNetworkRuntime {
GetInputDevice() const25 std::shared_ptr<Device> ExecutionPlan::GetInputDevice() const
26 {
27 sptr<OHOS::HDI::Nnrt::V2_0::INnrtDevice> idevice
28 = sptr<OHOS::HDI::Nnrt::V2_0::MockIDevice>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIDevice());
29 std::shared_ptr<Device> device = std::make_shared<HDIDeviceV2_0>(idevice);
30 return device;
31 }
32
GetOutputDevice() const33 std::shared_ptr<Device> ExecutionPlan::GetOutputDevice() const
34 {
35 sptr<OHOS::HDI::Nnrt::V2_0::INnrtDevice> idevice
36 = sptr<OHOS::HDI::Nnrt::V2_0::MockIDevice>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIDevice());
37 std::shared_ptr<Device> device = std::make_shared<HDIDeviceV2_0>(idevice);
38 return device;
39 }
40
AllocateBuffer(size_t length)41 void* HDIDeviceV2_0::AllocateBuffer(size_t length)
42 {
43 if (length == 0) {
44 LOGE("The length param is invalid, length=0");
45 return nullptr;
46 }
47
48 void* buffer = (void*)malloc(length);
49 if (buffer == nullptr) {
50 LOGE("alloct buffer failed");
51 return nullptr;
52 }
53
54 if (OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_INVALID_PARAMETER) {
55 OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
56 return nullptr;
57 }
58 return buffer;
59 }
60
ReleaseBuffer(const void * buffer)61 OH_NN_ReturnCode HDIDeviceV2_0::ReleaseBuffer(const void* buffer)
62 {
63 if (buffer == nullptr) {
64 LOGE("alloct buffer failed");
65 return OH_NN_FAILED;
66 }
67 free(const_cast<void *>(buffer));
68 buffer = nullptr;
69 return OH_NN_SUCCESS;
70 }
71
GetInputDimRanges(std::vector<std::vector<uint32_t>> & minInputDims,std::vector<std::vector<uint32_t>> & maxInputDims)72 OH_NN_ReturnCode HDIPreparedModelV2_0::GetInputDimRanges(std::vector<std::vector<uint32_t>>& minInputDims,
73 std::vector<std::vector<uint32_t>>& maxInputDims)
74 {
75 minInputDims = {{0, 0}, {0, 0}};
76 maxInputDims = {{100, 100}, {100, 100}};
77
78 return OH_NN_SUCCESS;
79 }
80
Run(const std::vector<IOTensor> & inputs,const std::vector<IOTensor> & outputs,std::vector<std::vector<int32_t>> & outputsDims,std::vector<bool> & isOutputBufferEnough)81 OH_NN_ReturnCode HDIPreparedModelV2_0::Run(const std::vector<IOTensor>& inputs, const std::vector<IOTensor>& outputs,
82 std::vector<std::vector<int32_t>>& outputsDims, std::vector<bool>& isOutputBufferEnough)
83 {
84 if (inputs.empty() || outputs.empty()) {
85 return OH_NN_INVALID_PARAMETER;
86 }
87
88 if (OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode == OH_NN_FAILED) {
89 OHOS::HDI::Nnrt::V2_0::MockIPreparedModel::m_ExpectRetCode = OH_NN_OPERATION_FORBIDDEN;
90 return OH_NN_INVALID_PARAMETER;
91 }
92
93 isOutputBufferEnough.emplace_back(true);
94 outputsDims.emplace_back(outputs[0].dimensions);
95
96 return OH_NN_SUCCESS;
97 }
98
GetExecutionPlan() const99 std::shared_ptr<ExecutionPlan> Compilation::GetExecutionPlan() const
100 {
101 sptr<OHOS::HDI::Nnrt::V2_0::IPreparedModel> hdiPreparedModel = OHOS::sptr<OHOS::HDI::Nnrt::V2_0::HDI::Nnrt::V2_0
102 ::MockIPreparedModel>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::HDI::Nnrt::V2_0::MockIPreparedModel());
103
104 std::shared_ptr<PreparedModel> preparedModel = std::make_shared<HDIPreparedModelV2_0>(hdiPreparedModel);
105 sptr<OHOS::HDI::Nnrt::V2_0::INnrtDevice> idevice
106 = OHOS::sptr<OHOS::HDI::Nnrt::V2_0::MockIDevice>(new (std::nothrow) OHOS::HDI::Nnrt::V2_0::MockIDevice());
107 std::shared_ptr<Device> device = std::make_shared<HDIDeviceV2_0>(idevice);
108 ExecutionPlan executor(preparedModel, device);
109 std::shared_ptr<ExecutionPlan> pExcutor = std::make_shared<ExecutionPlan>(executor);
110 return pExcutor;
111 }
112 } // namespace NeuralNetworkRuntime
113 } // namespace OHOS