1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #pragma once
7
8 #include <armnn/IRuntime.hpp>
9 #include <test/TensorHelpers.hpp>
10
11 #include <Network.hpp>
12 #include <VerificationHelpers.hpp>
13
14 #include <fmt/format.h>
15
16 #include <iomanip>
17 #include <string>
18
19 namespace armnnUtils
20 {
21
22 template<typename TParser>
23 struct ParserPrototxtFixture
24 {
ParserPrototxtFixturearmnnUtils::ParserPrototxtFixture25 ParserPrototxtFixture()
26 : m_Parser(TParser::Create())
27 , m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions()))
28 , m_NetworkIdentifier(-1)
29 {
30 }
31
32 /// Parses and loads the network defined by the m_Prototext string.
33 /// @{
34 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName);
35 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
36 const std::string& inputName,
37 const std::string& outputName);
38 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
39 const armnn::TensorShape& outputTensorShape,
40 const std::string& inputName,
41 const std::string& outputName);
42 void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
43 const std::vector<std::string>& requestedOutputs);
44 void Setup();
45 armnn::IOptimizedNetworkPtr SetupOptimizedNetwork(
46 const std::map<std::string,armnn::TensorShape>& inputShapes,
47 const std::vector<std::string>& requestedOutputs);
48 /// @}
49
50 /// Executes the network with the given input tensor and checks the result against the given output tensor.
51 /// This overload assumes that the network has a single input and a single output.
52 template <std::size_t NumOutputDimensions>
53 void RunTest(const std::vector<float>& inputData, const std::vector<float>& expectedOutputData);
54
55 /// Executes the network with the given input tensor and checks the result against the given output tensor.
56 /// Calls RunTest with output type of uint8_t for checking comparison operators.
57 template <std::size_t NumOutputDimensions>
58 void RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
59 const std::map<std::string, std::vector<uint8_t>>& expectedOutputData);
60
61 /// Executes the network with the given input tensors and checks the results against the given output tensors.
62 /// This overload supports multiple inputs and multiple outputs, identified by name.
63 template <std::size_t NumOutputDimensions, typename T = float>
64 void RunTest(const std::map<std::string, std::vector<float>>& inputData,
65 const std::map<std::string, std::vector<T>>& expectedOutputData);
66
67 std::string m_Prototext;
68 std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser;
69 armnn::IRuntimePtr m_Runtime;
70 armnn::NetworkId m_NetworkIdentifier;
71
72 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
73 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
74 /// @{
75 std::string m_SingleInputName;
76 std::string m_SingleOutputName;
77 /// @}
78
79 /// This will store the output shape so it don't need to be passed to the single-input-single-output overload
80 /// of RunTest().
81 armnn::TensorShape m_SingleOutputShape;
82 };
83
84 template<typename TParser>
SetupSingleInputSingleOutput(const std::string & inputName,const std::string & outputName)85 void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const std::string& inputName,
86 const std::string& outputName)
87 {
88 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
89 m_SingleInputName = inputName;
90 m_SingleOutputName = outputName;
91 Setup({ }, { outputName });
92 }
93
94 template<typename TParser>
SetupSingleInputSingleOutput(const armnn::TensorShape & inputTensorShape,const std::string & inputName,const std::string & outputName)95 void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
96 const std::string& inputName,
97 const std::string& outputName)
98 {
99 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
100 m_SingleInputName = inputName;
101 m_SingleOutputName = outputName;
102 Setup({ { inputName, inputTensorShape } }, { outputName });
103 }
104
105 template<typename TParser>
SetupSingleInputSingleOutput(const armnn::TensorShape & inputTensorShape,const armnn::TensorShape & outputTensorShape,const std::string & inputName,const std::string & outputName)106 void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
107 const armnn::TensorShape& outputTensorShape,
108 const std::string& inputName,
109 const std::string& outputName)
110 {
111 // Stores the input name, the output name and the output tensor shape
112 // so they don't need to be passed to the single-input-single-output RunTest().
113 m_SingleInputName = inputName;
114 m_SingleOutputName = outputName;
115 m_SingleOutputShape = outputTensorShape;
116 Setup({ { inputName, inputTensorShape } }, { outputName });
117 }
118
119 template<typename TParser>
Setup(const std::map<std::string,armnn::TensorShape> & inputShapes,const std::vector<std::string> & requestedOutputs)120 void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
121 const std::vector<std::string>& requestedOutputs)
122 {
123 std::string errorMessage;
124
125 armnn::INetworkPtr network =
126 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
127 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
128 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
129 if (ret != armnn::Status::Success)
130 {
131 throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
132 errorMessage,
133 CHECK_LOCATION().AsString()));
134 }
135 }
136
137 template<typename TParser>
Setup()138 void ParserPrototxtFixture<TParser>::Setup()
139 {
140 std::string errorMessage;
141
142 armnn::INetworkPtr network =
143 m_Parser->CreateNetworkFromString(m_Prototext.c_str());
144 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
145 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
146 if (ret != armnn::Status::Success)
147 {
148 throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}",
149 errorMessage,
150 CHECK_LOCATION().AsString()));
151 }
152 }
153
154 template<typename TParser>
SetupOptimizedNetwork(const std::map<std::string,armnn::TensorShape> & inputShapes,const std::vector<std::string> & requestedOutputs)155 armnn::IOptimizedNetworkPtr ParserPrototxtFixture<TParser>::SetupOptimizedNetwork(
156 const std::map<std::string,armnn::TensorShape>& inputShapes,
157 const std::vector<std::string>& requestedOutputs)
158 {
159 armnn::INetworkPtr network =
160 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
161 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
162 return optimized;
163 }
164
165 template<typename TParser>
166 template <std::size_t NumOutputDimensions>
RunTest(const std::vector<float> & inputData,const std::vector<float> & expectedOutputData)167 void ParserPrototxtFixture<TParser>::RunTest(const std::vector<float>& inputData,
168 const std::vector<float>& expectedOutputData)
169 {
170 RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
171 }
172
173 template<typename TParser>
174 template <std::size_t NumOutputDimensions>
RunComparisonTest(const std::map<std::string,std::vector<float>> & inputData,const std::map<std::string,std::vector<uint8_t>> & expectedOutputData)175 void ParserPrototxtFixture<TParser>::RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
176 const std::map<std::string, std::vector<uint8_t>>&
177 expectedOutputData)
178 {
179 RunTest<NumOutputDimensions, uint8_t>(inputData, expectedOutputData);
180 }
181
182 template<typename TParser>
183 template <std::size_t NumOutputDimensions, typename T>
RunTest(const std::map<std::string,std::vector<float>> & inputData,const std::map<std::string,std::vector<T>> & expectedOutputData)184 void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData,
185 const std::map<std::string, std::vector<T>>& expectedOutputData)
186 {
187 // Sets up the armnn input tensors from the given vectors.
188 armnn::InputTensors inputTensors;
189 for (auto&& it : inputData)
190 {
191 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
192 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
193 }
194
195 // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
196 std::map<std::string, boost::multi_array<T, NumOutputDimensions>> outputStorage;
197 armnn::OutputTensors outputTensors;
198 for (auto&& it : expectedOutputData)
199 {
200 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
201 outputStorage.emplace(it.first, MakeTensor<T, NumOutputDimensions>(bindingInfo.second));
202 outputTensors.push_back(
203 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
204 }
205
206 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
207
208 // Compares each output tensor to the expected values.
209 for (auto&& it : expectedOutputData)
210 {
211 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
212 if (bindingInfo.second.GetNumElements() != it.second.size())
213 {
214 throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} elements. "
215 "{2} elements supplied. {3}",
216 it.first,
217 bindingInfo.second.GetNumElements(),
218 it.second.size(),
219 CHECK_LOCATION().AsString()));
220 }
221
222 // If the expected output shape is set, the output tensor checks will be carried out.
223 if (m_SingleOutputShape.GetNumDimensions() != 0)
224 {
225
226 if (bindingInfo.second.GetShape().GetNumDimensions() == NumOutputDimensions &&
227 bindingInfo.second.GetShape().GetNumDimensions() == m_SingleOutputShape.GetNumDimensions())
228 {
229 for (unsigned int i = 0; i < m_SingleOutputShape.GetNumDimensions(); ++i)
230 {
231 if (m_SingleOutputShape[i] != bindingInfo.second.GetShape()[i])
232 {
233 // This exception message could not be created by fmt:format because of an oddity in
234 // the operator << of TensorShape.
235 std::stringstream message;
236 message << "Output tensor " << it.first << " is expected to have "
237 << bindingInfo.second.GetShape() << "shape. "
238 << m_SingleOutputShape << " shape supplied. "
239 << CHECK_LOCATION().AsString();
240 throw armnn::Exception(message.str());
241 }
242 }
243 }
244 else
245 {
246 throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} dimensions. "
247 "{2} dimensions supplied. {3}",
248 it.first,
249 bindingInfo.second.GetShape().GetNumDimensions(),
250 NumOutputDimensions,
251 CHECK_LOCATION().AsString()));
252 }
253 }
254
255 auto outputExpected = MakeTensor<T, NumOutputDimensions>(bindingInfo.second, it.second);
256 if (std::is_same<T, uint8_t>::value)
257 {
258 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first], true));
259 }
260 else
261 {
262 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
263 }
264 }
265 }
266
267 } // namespace armnnUtils
268