1 /*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "neuralnetworks_hidl_hal_test"
18
19 #include "VtsHalNeuralnetworks.h"
20
21 #include <android-base/logging.h>
22
23 #include "Callbacks.h"
24
25 namespace android {
26 namespace hardware {
27 namespace neuralnetworks {
28 namespace V1_2 {
29 namespace vts {
30 namespace functional {
31
32 using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
33 using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
34 using V1_1::ExecutionPreference;
35
36 // internal helper function
createPreparedModel(const sp<IDevice> & device,const Model & model,sp<IPreparedModel> * preparedModel)37 static void createPreparedModel(const sp<IDevice>& device, const Model& model,
38 sp<IPreparedModel>* preparedModel) {
39 ASSERT_NE(nullptr, preparedModel);
40
41 // see if service can handle model
42 bool fullySupportsModel = false;
43 Return<void> supportedOpsLaunchStatus = device->getSupportedOperations_1_2(
44 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
45 ASSERT_EQ(ErrorStatus::NONE, status);
46 ASSERT_NE(0ul, supported.size());
47 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
48 [](bool valid) { return valid; });
49 });
50 ASSERT_TRUE(supportedOpsLaunchStatus.isOk());
51
52 // launch prepare model
53 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
54 ASSERT_NE(nullptr, preparedModelCallback.get());
55 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_2(
56 model, ExecutionPreference::FAST_SINGLE_ANSWER, hidl_vec<hidl_handle>(),
57 hidl_vec<hidl_handle>(), HidlToken(), preparedModelCallback);
58 ASSERT_TRUE(prepareLaunchStatus.isOk());
59 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
60
61 // retrieve prepared model
62 preparedModelCallback->wait();
63 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
64 *preparedModel = getPreparedModel_1_2(preparedModelCallback);
65
66 // The getSupportedOperations_1_2 call returns a list of operations that are
67 // guaranteed not to fail if prepareModel_1_2 is called, and
68 // 'fullySupportsModel' is true i.f.f. the entire model is guaranteed.
69 // If a driver has any doubt that it can prepare an operation, it must
70 // return false. So here, if a driver isn't sure if it can support an
71 // operation, but reports that it successfully prepared the model, the test
72 // can continue.
73 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
74 ASSERT_EQ(nullptr, preparedModel->get());
75 LOG(INFO) << "NN VTS: Unable to test Request validation because vendor service cannot "
76 "prepare model that it does not support.";
77 std::cout << "[ ] Unable to test Request validation because vendor service "
78 "cannot prepare model that it does not support."
79 << std::endl;
80 return;
81 }
82 ASSERT_EQ(ErrorStatus::NONE, prepareReturnStatus);
83 ASSERT_NE(nullptr, preparedModel->get());
84 }
85
86 // A class for test environment setup
NeuralnetworksHidlEnvironment()87 NeuralnetworksHidlEnvironment::NeuralnetworksHidlEnvironment() {}
88
~NeuralnetworksHidlEnvironment()89 NeuralnetworksHidlEnvironment::~NeuralnetworksHidlEnvironment() {}
90
getInstance()91 NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
92 // This has to return a "new" object because it is freed inside
93 // ::testing::AddGlobalTestEnvironment when the gtest is being torn down
94 static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
95 return instance;
96 }
97
registerTestServices()98 void NeuralnetworksHidlEnvironment::registerTestServices() {
99 registerTestService<IDevice>();
100 }
101
102 // The main test class for NEURALNETWORK HIDL HAL.
NeuralnetworksHidlTest()103 NeuralnetworksHidlTest::NeuralnetworksHidlTest() {}
104
~NeuralnetworksHidlTest()105 NeuralnetworksHidlTest::~NeuralnetworksHidlTest() {}
106
SetUp()107 void NeuralnetworksHidlTest::SetUp() {
108 ::testing::VtsHalHidlTargetTestBase::SetUp();
109 device = ::testing::VtsHalHidlTargetTestBase::getService<IDevice>(
110 NeuralnetworksHidlEnvironment::getInstance());
111
112 #ifdef PRESUBMIT_NOT_VTS
113 const std::string name =
114 NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
115 const std::string sampleDriver = "sample-";
116 if (device == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
117 GTEST_SKIP();
118 }
119 #endif // PRESUBMIT_NOT_VTS
120
121 ASSERT_NE(nullptr, device.get());
122 }
123
TearDown()124 void NeuralnetworksHidlTest::TearDown() {
125 device = nullptr;
126 ::testing::VtsHalHidlTargetTestBase::TearDown();
127 }
128
validateEverything(const Model & model,const std::vector<Request> & requests)129 void ValidationTest::validateEverything(const Model& model, const std::vector<Request>& requests) {
130 validateModel(model);
131
132 // create IPreparedModel
133 sp<IPreparedModel> preparedModel;
134 ASSERT_NO_FATAL_FAILURE(createPreparedModel(device, model, &preparedModel));
135 if (preparedModel == nullptr) {
136 return;
137 }
138
139 validateRequests(preparedModel, requests);
140 validateBurst(preparedModel, requests);
141 }
142
getPreparedModel_1_2(const sp<V1_2::implementation::PreparedModelCallback> & callback)143 sp<IPreparedModel> getPreparedModel_1_2(
144 const sp<V1_2::implementation::PreparedModelCallback>& callback) {
145 sp<V1_0::IPreparedModel> preparedModelV1_0 = callback->getPreparedModel();
146 return V1_2::IPreparedModel::castFrom(preparedModelV1_0).withDefault(nullptr);
147 }
148
149 } // namespace functional
150 } // namespace vts
151 } // namespace V1_2
152 } // namespace neuralnetworks
153 } // namespace hardware
154 } // namespace android
155
156 namespace android::hardware::neuralnetworks::V1_0 {
157
operator <<(::std::ostream & os,ErrorStatus errorStatus)158 ::std::ostream& operator<<(::std::ostream& os, ErrorStatus errorStatus) {
159 return os << toString(errorStatus);
160 }
161
operator <<(::std::ostream & os,DeviceStatus deviceStatus)162 ::std::ostream& operator<<(::std::ostream& os, DeviceStatus deviceStatus) {
163 return os << toString(deviceStatus);
164 }
165
166 } // namespace android::hardware::neuralnetworks::V1_0
167
168 using android::hardware::neuralnetworks::V1_2::vts::functional::NeuralnetworksHidlEnvironment;
169
main(int argc,char ** argv)170 int main(int argc, char** argv) {
171 ::testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
172 ::testing::InitGoogleTest(&argc, argv);
173 NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
174
175 int status = RUN_ALL_TESTS();
176 return status;
177 }
178