• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "JsonPrinterTestImpl.hpp"
7 #include "armnn/utility/StringUtils.hpp"
8 
9 #include <Profiling.hpp>
10 
11 #include <armnn/Descriptors.hpp>
12 #include <armnn/IRuntime.hpp>
13 #include <armnn/INetwork.hpp>
14 
15 #include <boost/test/unit_test.hpp>
16 
17 #include <sstream>
18 #include <stack>
19 #include <string>
20 
AreMatchingPair(const char opening,const char closing)21 inline bool AreMatchingPair(const char opening, const char closing)
22 {
23     return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
24 }
25 
AreParenthesesMatching(const std::string & exp)26 bool AreParenthesesMatching(const std::string& exp)
27 {
28     std::stack<char> expStack;
29     for (size_t i = 0; i < exp.length(); ++i)
30     {
31         if (exp[i] == '{' || exp[i] == '[')
32         {
33             expStack.push(exp[i]);
34         }
35         else if (exp[i] == '}' || exp[i] == ']')
36         {
37             if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
38             {
39                 return false;
40             }
41             else
42             {
43                 expStack.pop();
44             }
45         }
46     }
47     return expStack.empty();
48 }
49 
ExtractMeasurements(const std::string & exp)50 std::vector<double> ExtractMeasurements(const std::string& exp)
51 {
52     std::vector<double> numbers;
53     bool inArray = false;
54     std::string numberString;
55     for (size_t i = 0; i < exp.size(); ++i)
56     {
57         if (exp[i] == '[')
58         {
59             inArray = true;
60         }
61         else if (exp[i] == ']' && inArray)
62         {
63             try
64             {
65                 armnn::stringUtils::StringTrim(numberString, "\t,\n");
66                 numbers.push_back(std::stod(numberString));
67             }
68             catch (std::invalid_argument const&)
69             {
70                 BOOST_FAIL("Could not convert measurements to double: " + numberString);
71             }
72 
73             numberString.clear();
74             inArray = false;
75         }
76         else if (exp[i] == ',' && inArray)
77         {
78             try
79             {
80                 armnn::stringUtils::StringTrim(numberString, "\t,\n");
81                 numbers.push_back(std::stod(numberString));
82             }
83             catch (std::invalid_argument const&)
84             {
85                 BOOST_FAIL("Could not convert measurements to double: " + numberString);
86             }
87             numberString.clear();
88         }
89         else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
90         {
91             numberString += exp[i];
92         }
93     }
94     return numbers;
95 }
96 
ExtractSections(const std::string & exp)97 std::vector<std::string> ExtractSections(const std::string& exp)
98 {
99     std::vector<std::string> sections;
100 
101     std::stack<size_t> s;
102     for (size_t i = 0; i < exp.size(); i++)
103     {
104         if (exp.at(i) == '{')
105         {
106             s.push(i);
107         }
108         else if (exp.at(i) == '}')
109         {
110             size_t from = s.top();
111             s.pop();
112             sections.push_back(exp.substr(from, i - from + 1));
113         }
114     }
115 
116     return sections;
117 }
118 
GetSoftmaxProfilerJson(const std::vector<armnn::BackendId> & backends)119 std::string GetSoftmaxProfilerJson(const std::vector<armnn::BackendId>& backends)
120 {
121     using namespace armnn;
122 
123     BOOST_CHECK(!backends.empty());
124 
125     ProfilerManager& profilerManager = armnn::ProfilerManager::GetInstance();
126 
127     // Create runtime in which test will run
128     IRuntime::CreationOptions options;
129     options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
130     IRuntimePtr runtime(IRuntime::Create(options));
131 
132     // build up the structure of the network
133     INetworkPtr net(INetwork::Create());
134 
135     IConnectableLayer* input = net->AddInputLayer(0, "input");
136     SoftmaxDescriptor softmaxDescriptor;
137     // Set Axis to -1 if CL or Neon until further Axes are supported.
138     if ( backends.front() == armnn::Compute::CpuAcc || backends.front() == armnn::Compute::GpuAcc)
139     {
140         softmaxDescriptor.m_Axis = -1;
141     }
142     IConnectableLayer* softmax = net->AddSoftmaxLayer(softmaxDescriptor, "softmax");
143     IConnectableLayer* output  = net->AddOutputLayer(0, "output");
144 
145     input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
146     softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
147 
148     // set the tensors in the network
149     TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
150     inputTensorInfo.SetQuantizationOffset(100);
151     inputTensorInfo.SetQuantizationScale(10000.0f);
152     input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
153 
154     TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
155     outputTensorInfo.SetQuantizationOffset(0);
156     outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
157     softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
158 
159     // optimize the network
160     IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
161     if(!optNet)
162     {
163         BOOST_FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
164     }
165     // load it into the runtime
166     NetworkId netId;
167     auto error = runtime->LoadNetwork(netId, std::move(optNet));
168     BOOST_TEST(error == Status::Success);
169 
170     // create structures for input & output
171     std::vector<uint8_t> inputData
172         {
173             1, 10, 3, 200, 5
174             // one of inputs is sufficiently larger than the others to saturate softmax
175         };
176     std::vector<uint8_t> outputData(5);
177 
178     armnn::InputTensors inputTensors
179         {
180             {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
181         };
182     armnn::OutputTensors outputTensors
183         {
184             {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
185         };
186 
187     runtime->GetProfiler(netId)->EnableProfiling(true);
188 
189     // do the inferences
190     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
191     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
192     runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
193 
194     // retrieve the Profiler.Print() output
195     std::stringstream ss;
196     profilerManager.GetProfiler()->Print(ss);
197 
198     return ss.str();
199 }
200 
ValidateProfilerJson(std::string & result)201 inline void ValidateProfilerJson(std::string& result)
202 {
203     // ensure all measurements are greater than zero
204     std::vector<double> measurementsVector = ExtractMeasurements(result);
205     BOOST_CHECK(!measurementsVector.empty());
206 
207     // check sections contain raw and unit tags
208     // first ensure Parenthesis are balanced
209     if (AreParenthesesMatching(result))
210     {
211         // remove parent sections that will not have raw or unit tag
212         std::vector<std::string> sectionVector = ExtractSections(result);
213         for (size_t i = 0; i < sectionVector.size(); ++i)
214         {
215 
216             if (sectionVector[i].find("\"ArmNN\":") != std::string::npos
217                 || sectionVector[i].find("\"inference_measurements\":") != std::string::npos)
218             {
219                 sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
220             }
221         }
222         BOOST_CHECK(!sectionVector.empty());
223 
224         BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
225                                 [](std::string i) { return (i.find("\"raw\":") != std::string::npos); }));
226 
227         BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
228                                 [](std::string i) { return (i.find("\"unit\":") != std::string::npos); }));
229     }
230 
231     // remove the time measurements as they vary from test to test
232     result.erase(std::remove_if (result.begin(),result.end(),
233                                  [](char c) { return c == '.'; }), result.end());
234     result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
235     result.erase(std::remove_if (result.begin(),result.end(),
236                                  [](char c) { return c == '\t'; }), result.end());
237 
238     BOOST_CHECK(result.find("ArmNN") != std::string::npos);
239     BOOST_CHECK(result.find("inference_measurements") != std::string::npos);
240 
241     // ensure no spare parenthesis present in print output
242     BOOST_CHECK(AreParenthesesMatching(result));
243 }
244 
RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId> & backends)245 void RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId>& backends)
246 {
247     // setup the test fixture and obtain JSON Printer result
248     std::string result = GetSoftmaxProfilerJson(backends);
249 
250     // validate the JSON Printer result
251     ValidateProfilerJson(result);
252 
253     const armnn::BackendId& firstBackend = backends.at(0);
254     if (firstBackend == armnn::Compute::GpuAcc)
255     {
256         BOOST_CHECK(result.find("OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
257                     != std::string::npos);
258     }
259     else if (firstBackend == armnn::Compute::CpuAcc)
260     {
261         BOOST_CHECK(result.find("NeonKernelTimer/: NEFillBorderKernel") != std::string::npos);
262     }
263 }
264