1 //
2 // Copyright © 2017, 2019-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include <Graph.hpp>
7 #include <SubgraphViewSelector.hpp>
8
9 #include <armnn/backends/OptimizationViews.hpp>
10 #include <armnn/backends/SubgraphView.hpp>
11 #include <armnn/backends/TensorHandle.hpp>
12 #include <armnn/utility/NumericCast.hpp>
13
14 #include <doctest/doctest.h>
15
16 #include <fstream>
17 #include <map>
18 #include <queue>
19 #include <random>
20 #include <chrono>
21 #include <numeric>
22
23 using namespace armnn;
24
25 namespace
26 {
27
AreAnySubgraphLayersPresentInGraph(const SubgraphView::IConnectableLayers & subgraphLayers,const Graph & graph)28 bool AreAnySubgraphLayersPresentInGraph(const SubgraphView::IConnectableLayers &subgraphLayers, const Graph &graph)
29 {
30 for(auto&& layer : subgraphLayers)
31 {
32 auto posInGraph = std::find(graph.begin(), graph.end(), layer);
33 if(posInGraph != graph.end())
34 {
35 return true;
36 }
37 }
38
39 return false;
40 }
41
42 //
43 // this helper only works if all layers where the inputs connect to are not selected
44 //
CreateInputsFrom(const std::vector<Layer * > & layers,std::vector<int> ignoreSlots={})45 SubgraphView::InputSlots CreateInputsFrom(const std::vector<Layer*>& layers,
46 std::vector<int> ignoreSlots = {})
47 {
48 SubgraphView::InputSlots result;
49 for (auto&& layer : layers)
50 {
51 for (auto&& it = layer->BeginInputSlots(); it != layer->EndInputSlots(); ++it)
52 {
53 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), it->GetSlotIndex()) != ignoreSlots.end())
54 {
55 continue;
56 }
57 else
58 {
59 result.push_back(&(*it));
60 }
61 }
62 }
63 return result;
64 }
65
66 /// Duplication for IConnectableLayer
CreateIInputsFrom(const std::vector<armnn::IConnectableLayer * > & layers,std::vector<int> ignoreSlots={})67 SubgraphView::IInputSlots CreateIInputsFrom(const std::vector<armnn::IConnectableLayer*>& layers,
68 std::vector<int> ignoreSlots = {})
69 {
70 SubgraphView::IInputSlots result;
71 for (auto&& layer: layers)
72 {
73 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
74 {
75 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) != ignoreSlots.end())
76 {
77 continue;
78 }
79 else
80 {
81 result.push_back(&(layer->GetInputSlot(i)));
82 }
83 }
84 }
85 return result;
86 }
87
88 //
89 // this helper only works if all layers where the outputs connect to are not selected
90 //
CreateOutputsFrom(const std::vector<Layer * > & layers)91 SubgraphView::OutputSlots CreateOutputsFrom(const std::vector<Layer*>& layers)
92 {
93 SubgraphView::OutputSlots result;
94 for (auto && layer : layers)
95 {
96 for (auto&& it = layer->BeginOutputSlots(); it != layer->EndOutputSlots(); ++it)
97 {
98 result.push_back(&(*it));
99 }
100 }
101 return result;
102 }
103
104 /// Duplication for IConnectableLayer
CreateIOutputsFrom(const std::vector<armnn::IConnectableLayer * > & layers)105 SubgraphView::IOutputSlots CreateIOutputsFrom(const std::vector<armnn::IConnectableLayer*>& layers)
106 {
107 SubgraphView::IOutputSlots result;
108 for (auto&& layer: layers)
109 {
110 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
111 {
112 result.push_back(&(layer->GetOutputSlot(i)));
113 }
114 }
115 return result;
116 }
117
118 //
119 // this takes the inputs, outputs and layers as a copy and the move these copies into the
120 // resulting subgraph, so the pass by value is intentional
121 //
CreateSubgraphViewFrom(SubgraphView::InputSlots && inputs,SubgraphView::OutputSlots && outputs,SubgraphView::Layers && layers)122 SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::InputSlots&& inputs,
123 SubgraphView::OutputSlots&& outputs,
124 SubgraphView::Layers&& layers)
125 {
126 return std::make_unique<SubgraphView>(std::move(inputs), std::move(outputs), std::move(layers));
127 }
128
CreateSubgraphViewFrom(SubgraphView::IConnectableLayers && layers,SubgraphView::IInputSlots && inputs,SubgraphView::IOutputSlots && outputs)129 SubgraphView::SubgraphViewPtr CreateSubgraphViewFrom(SubgraphView::IConnectableLayers&& layers,
130 SubgraphView::IInputSlots&& inputs,
131 SubgraphView::IOutputSlots&& outputs)
132 {
133 return std::make_unique<SubgraphView>(std::move(layers), std::move(inputs), std::move(outputs));
134 }
135
136 template <typename T, typename Iterator>
ToSortedArray(Iterator begin,Iterator end)137 std::vector<T> ToSortedArray(Iterator begin, Iterator end)
138 {
139 std::vector<T> result(begin, end);
140 std::sort(result.begin(), result.end());
141 return result;
142 }
143
144 template <typename T>
CompareVectors(const std::vector<T> & result,const std::vector<T> & expected)145 void CompareVectors(const std::vector<T>& result, const std::vector<T>& expected)
146 {
147 CHECK(std::equal(result.begin(), result.end(), expected.begin(), expected.end()));
148 }
149
CompareSubgraphViews(SubgraphView::SubgraphViewPtr & result,SubgraphView::SubgraphViewPtr & expected)150 void CompareSubgraphViews(SubgraphView::SubgraphViewPtr& result,
151 SubgraphView::SubgraphViewPtr& expected)
152 {
153 // expect both to be valid subgraphs
154 CHECK((result.get() != nullptr));
155 CHECK((expected.get() != nullptr));
156
157 if (result.get() != nullptr && expected.get() != nullptr)
158 {
159 CHECK(result->GetIInputSlots().size() == expected->GetIInputSlots().size());
160 CHECK(result->GetIOutputSlots().size() == expected->GetIOutputSlots().size());
161 CHECK(result->GetIConnectableLayers().size() == expected->GetIConnectableLayers().size());
162
163 auto resultLayers = ToSortedArray<IConnectableLayer*>(result->GetIConnectableLayers().begin(),
164 result->GetIConnectableLayers().end());
165 auto expectedLayers = ToSortedArray<IConnectableLayer*>(expected->GetIConnectableLayers().begin(),
166 expected->GetIConnectableLayers().end());
167 CompareVectors(resultLayers, expectedLayers);
168
169 auto resultInputs = ToSortedArray<IInputSlot *>(result->GetIInputSlots().begin(),
170 result->GetIInputSlots().end());
171 auto expectedInputs = ToSortedArray<IInputSlot *>(expected->GetIInputSlots().begin(),
172 expected->GetIInputSlots().end());
173 CompareVectors(resultInputs, expectedInputs);
174
175 auto resultOutputs = ToSortedArray<IOutputSlot *>(result->GetIOutputSlots().begin(),
176 result->GetIOutputSlots().end());
177 auto expectedOutputs = ToSortedArray<IOutputSlot *>(expected->GetIOutputSlots().begin(),
178 expected->GetIOutputSlots().end());
179 CompareVectors(resultOutputs, expectedOutputs);
180 }
181 }
182
183 } // namespace <anonymous>
184
185 TEST_SUITE("SubgraphViewBackwardCompatibilityTests")
186 {
187 // Test that SubraphView has been converted to using IConnectableLayer/IInputSlot/IOutputSlot
188 // in a backward compatible manner from ILayer/InputSlot/OutputSlot
189 TEST_CASE("SubgraphViewIterators")
190 {
191 INetworkPtr net(INetwork::Create());
192 IConnectableLayer* layer = net->AddInputLayer(1, "input");
193
194 SubgraphView subgraph{layer};
195
196 // cbeginIConnectable() and cendIConnectable()
197 bool found = false;
198 if (std::find(subgraph.cbeginIConnectable(), subgraph.cendIConnectable(), layer)
199 != subgraph.cendIConnectable())
200 {
201 found = true;
202 }
203 CHECK(found);
204 found = false;
205
206 // beginIConnectable() and endIConnectable()
207 if (std::find(subgraph.beginIConnectable(), subgraph.endIConnectable(), layer)
208 != subgraph.endIConnectable())
209 {
210 found = true;
211 }
212 CHECK(found);
213 found = false;
214
215 // GetIConnectableLayers returns IConnectableLayers initialized when calling constructor given IConnectableLayers
216 const SubgraphView::IConnectableLayers& subgraphLayers = subgraph.GetIConnectableLayers();
217 for (auto& iConnectableLayer : subgraphLayers)
218 {
219 if (std::string(iConnectableLayer->GetName()) == "input")
220 {
221 found = true;
222 }
223 }
224 CHECK(found);
225 found = false;
226
227 // Test GetLayers returns layers initialized when calling constructor given IConnectableLayers
228 ARMNN_NO_DEPRECATE_WARN_BEGIN
229 const SubgraphView::Layers& subgraphLayersOld = subgraph.GetLayers();
230 ARMNN_NO_DEPRECATE_WARN_END
231 for (auto& layerOld : subgraphLayersOld)
232 {
233 if (std::string(layerOld->GetName()) == "input")
234 {
235 found = true;
236 }
237 }
238 CHECK(found);
239 }
240
241 TEST_CASE("SubgraphViewSlots")
242 {
243 // Construct graph
244 Graph graph;
245
246 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
247
248 Convolution2dDescriptor convDescriptor;
249 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
250 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
251
252 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
253
254 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
255 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
256 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
257
258 // Construct sub-graph
259 SubgraphView::SubgraphViewPtr subgraph = CreateSubgraphViewFrom({},
260 CreateIInputsFrom({convLayer1}, {1, 2}),
261 CreateIOutputsFrom({convLayer2}));
262
263 // Test that both old and new are initialized
264 CHECK(subgraph->GetIInputSlots().size() == 1);
265 CHECK(subgraph->GetIOutputSlots().size() == 1);
266
267 ARMNN_NO_DEPRECATE_WARN_BEGIN
268 CHECK(subgraph->GetInputSlots().size() == 1);
269 CHECK(subgraph->GetOutputSlots().size() == 1);
270
271 // Check old and new pointing to same address
272 CHECK(subgraph->GetOutputSlot(0) == subgraph->GetIOutputSlot(0));
273 CHECK(subgraph->GetInputSlot(0) == subgraph->GetIInputSlot(0));
274 ARMNN_NO_DEPRECATE_WARN_END
275
276 }
277
278 TEST_CASE("SubgraphViewConstructors")
279 {
280 // Construct graph
281 Graph graph;
282
283 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
284
285 Convolution2dDescriptor convDescriptor;
286 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
287 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
288
289 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
290
291 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
292 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
293 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
294
295 // Construct sub-graph
296 SubgraphView::SubgraphViewPtr subgraph =
297 CreateSubgraphViewFrom({inputLayer, convLayer1, convLayer2, outputLayer},
298 CreateIInputsFrom({convLayer1}),
299 CreateIOutputsFrom({convLayer2}));
300
301 // Copy Constructor
302 SubgraphView subgraph2(*subgraph.get());
303 CHECK(subgraph->GetIConnectableLayers() == subgraph2.GetIConnectableLayers());
304 CHECK(subgraph->GetIInputSlots() == subgraph2.GetIInputSlots());
305 CHECK(subgraph->GetIOutputSlots() == subgraph2.GetIOutputSlots());
306
307 ARMNN_NO_DEPRECATE_WARN_BEGIN
308 CHECK(subgraph->GetLayers() == subgraph2.GetLayers());
309 CHECK(subgraph->GetInputSlots() == subgraph2.GetInputSlots());
310 CHECK(subgraph->GetOutputSlots() == subgraph2.GetOutputSlots());
311 ARMNN_NO_DEPRECATE_WARN_END
312
313 // Move Constructor
314 SubgraphView subgraph3(std::move(subgraph2));
315 CHECK(subgraph->GetIConnectableLayers() == subgraph3.GetIConnectableLayers());
316 CHECK(subgraph->GetIInputSlots() == subgraph3.GetIInputSlots());
317 CHECK(subgraph->GetIOutputSlots() == subgraph3.GetIOutputSlots());
318
319 ARMNN_NO_DEPRECATE_WARN_BEGIN
320 CHECK(subgraph->GetLayers() == subgraph3.GetLayers());
321 CHECK(subgraph->GetInputSlots() == subgraph3.GetInputSlots());
322 CHECK(subgraph->GetOutputSlots() == subgraph3.GetOutputSlots());
323 ARMNN_NO_DEPRECATE_WARN_END
324
325 // Clear
326 subgraph.get()->Clear();
327 CHECK(subgraph->GetIConnectableLayers().size() == 0);
328 CHECK(subgraph->GetIInputSlots().size() == 0);
329 CHECK(subgraph->GetIOutputSlots().size() == 0);
330 }
331
332 } // SubgraphViewBackwardCompatibilityTests Test Suite end
333
334 TEST_SUITE("SubgraphSubstitution")
335 {
336 TEST_CASE("SingleInputSingleOutput")
337 {
338 // Construct graph
339 Graph graph;
340
341 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
342
343 Convolution2dDescriptor convDescriptor;
344 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
345 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
346 Layer* const weightsLayer1 = graph.AddLayer<ConstantLayer>("weights1");
347 Layer* const weightsLayer2 = graph.AddLayer<ConstantLayer>("weights2");
348 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
349
350 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
351 weightsLayer1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
352 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
353 weightsLayer2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
354 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
355
356 // Construct sub-graph
357 SubgraphView::SubgraphViewPtr subgraph =
358 CreateSubgraphViewFrom({},
359 CreateIInputsFrom({convLayer1}, {1}),
360 CreateIOutputsFrom({convLayer2}));
361
362 // Save sub-graph connections for comparison after substitution
363 // Using GetIInputSlot/GetIIOutputSlot functions
364 IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
365 IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
366
367 // Construct dummy pre-compiled layer
368 PreCompiledDescriptor preCompiledDescriptor(1, 1);
369
370 IConnectableLayer* const preCompiledLayer =
371 graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
372
373 // Substitute sub-graph with pre-compiled layer
374 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
375
376 // Check that connections are correct after substitution
377 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
378 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
379 }
380
381 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph1")
382 {
383 // Construct graph.
384 Graph graph;
385
386 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
387
388 Convolution2dDescriptor convDescriptor;
389 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
390 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
391
392 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
393
394 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
395 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
396 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
397
398 // Construct sub-graph
399 SubgraphView::SubgraphViewPtr subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}, {1}),
400 CreateOutputsFrom({convLayer2}),
401 {});
402
403 // Save sub-graph connections for comparison after substitution
404 IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
405 IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
406
407 PreCompiledDescriptor preCompiledDescriptor(1, 1);
408 CompiledBlobPtr compiledBlobPtr;
409 BackendId backend = Compute::CpuRef;
410
411 // Construct dummy pre-compiled layer
412 INetworkPtr network = INetwork::Create();
413 IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
414 std::move(compiledBlobPtr),
415 backend);
416
417 // Substitute sub-graph with pre-compiled layer
418 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
419
420 // Check that connections are correct after substitution
421 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
422 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
423 }
424
425 TEST_CASE("SingleInputSingleOutputAddPrecompiledLayerSubstituteSubgraph2")
426 {
427 // Construct graph.
428 Graph graph;
429
430 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
431
432 Convolution2dDescriptor convDescriptor;
433 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
434 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
435
436 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
437
438 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
439 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
440 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
441
442 // Construct sub-graph
443 SubgraphView::SubgraphViewPtr subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}, {1}),
444 CreateOutputsFrom({convLayer2}),
445 {});
446
447 // Save sub-graph connections for comparison after substitution
448 IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
449 IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
450
451 PreCompiledDescriptor preCompiledDescriptor(1, 1);
452 CompiledBlobPtr compiledBlobPtr;
453 BackendId backend = Compute::CpuRef;
454
455 // Construct dummy pre-compiled layer
456 INetworkPtr network = INetwork::Create();
457 IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
458 std::move(compiledBlobPtr),
459 backend);
460 SubgraphView substituteSubgraph(preCompiledLayer);
461
462 // Substitute sub-graph with pre-compiled layer
463 graph.SubstituteSubgraph(*subgraph, substituteSubgraph);
464
465 // Check that connections are correct after substitution
466 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
467 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
468 }
469
470 TEST_CASE("SingleInputSingleOutputSubstituteGraph")
471 {
472 // Construct graph
473 Graph graph;
474
475 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
476
477 Convolution2dDescriptor convDescriptor;
478 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
479 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
480
481 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
482
483 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
484 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
485 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
486
487 // Construct sub-graph
488 SubgraphView::SubgraphViewPtr subgraph =
489 CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}, {1}),
490 CreateOutputsFrom({convLayer2}),
491 {});
492
493 // Save sub-graph connections for comparison after substitution
494 IOutputSlot* subgraphInputConn = subgraph->GetIInputSlot(0)->GetConnection();
495 IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
496
497 // Construct second graph with a single pre-compiled layer
498 Graph substituteGraph;
499 PreCompiledDescriptor preCompiledDescriptor(1, 1);
500 Layer* const preCompiledLayer = substituteGraph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
501
502 SubgraphView::SubgraphViewPtr substituteSubgraph =
503 CreateSubgraphViewFrom(CreateInputsFrom({preCompiledLayer}),
504 CreateOutputsFrom({preCompiledLayer}),
505 {preCompiledLayer});
506 // Substitute subgraph with pre-compiled layer
507 graph.SubstituteSubgraph(*subgraph, *substituteSubgraph);
508
509 // Check that connections are correct after substitution
510 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn);
511 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
512 }
513
514 TEST_CASE("MultiInputSingleOutput")
515 {
516 // Construct graph
517 Graph graph;
518
519 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
520
521 ViewsDescriptor splitterDescriptor(2);
522 Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
523
524 Convolution2dDescriptor convDescriptor;
525 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
526 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
527
528 OriginsDescriptor concatDescriptor(2);
529 Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
530
531 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
532
533 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
534 splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
535 splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
536 convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
537 convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
538 concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
539
540 // Construct sub-graph
541 auto subgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}, {1}),
542 CreateOutputsFrom({concatLayer}),
543 {});
544
545 // Save sub-graph connections for comparison after substitution
546 IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
547 IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
548
549 IInputSlot* subgraphOutputConn = subgraph->GetIOutputSlot(0)->GetConnection(0);
550
551 // Construct dummy pre-compiled layer
552 PreCompiledDescriptor preCompiledDescriptor(2, 1);
553 Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
554
555 // Substitute sub-graph with pre-compiled layer
556 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
557
558 // Check that connections are correct after substitution
559 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
560 CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
561
562 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn);
563 }
564
565 TEST_CASE("SingleInputMultiOutput")
566 {
567 // Construct graph
568 Graph graph;
569
570 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
571
572 Convolution2dDescriptor convDescriptor;
573 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
574 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
575 OriginsDescriptor concatDescriptor(2);
576 Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
577 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
578
579 ViewsDescriptor splitterDescriptor(2);
580 Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
581
582 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
583 splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
584 splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
585 convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
586 convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
587 concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
588
589 // Construct sub-graph
590 SubgraphView::SubgraphViewPtr subgraph =
591 CreateSubgraphViewFrom(CreateInputsFrom({splitterLayer}),
592 CreateOutputsFrom({convLayer1, convLayer2}),
593 {});
594
595 // Save sub-graph connections for comparison after substitution
596 IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
597
598 IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
599 IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
600
601 // Construct dummy pre-compiled layer
602 PreCompiledDescriptor preCompiledDescriptor(1, 2);
603 Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
604
605 // Substitute sub-graph with pre-compiled layer
606 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
607
608 // Check that connections are correct after substitution
609 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
610
611 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
612 CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
613 }
614
615 TEST_CASE("MultiInputMultiOutput")
616 {
617 // Construct graph
618 Graph graph;
619
620 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
621
622 ViewsDescriptor splitterDescriptor(2);
623 Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
624
625 Convolution2dDescriptor convDescriptor;
626 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
627 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
628
629 OriginsDescriptor concatDescriptor(2);
630 Layer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
631
632 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
633
634 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
635 splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
636 splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
637 convLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
638 convLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
639 concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
640
641 // Construct sub-graph
642 SubgraphView::SubgraphViewPtr subgraph =
643 CreateSubgraphViewFrom(CreateInputsFrom({convLayer1, convLayer2}, {1}),
644 CreateOutputsFrom({convLayer1, convLayer2}),
645 {});
646
647 // Save sub-graph connections for comparison after substitution
648 IOutputSlot* subgraphInputConn1 = subgraph->GetIInputSlot(0)->GetConnection();
649 IOutputSlot* subgraphInputConn2 = subgraph->GetIInputSlot(1)->GetConnection();
650
651 IInputSlot* subgraphOutputConn1 = subgraph->GetIOutputSlot(0)->GetConnection(0);
652 IInputSlot* subgraphOutputConn2 = subgraph->GetIOutputSlot(1)->GetConnection(0);
653
654 // Construct dummy pre-compiled layer
655 PreCompiledDescriptor preCompiledDescriptor(2, 2);
656 Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
657
658 // Substitute sub-graph with pre-compiled layer
659 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
660
661 // Check that connections are correct after substitution
662 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
663 CHECK_EQ(preCompiledLayer->GetInputSlot(1).GetConnection(), subgraphInputConn2);
664
665 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
666 CHECK_EQ(preCompiledLayer->GetOutputSlot(1).GetConnection(0), subgraphOutputConn2);
667 }
668
669 TEST_CASE("EraseReplacedIConnectableLayers")
670 {
671 // Construct graph
672 Graph graph;
673
674 graph.AddLayer<InputLayer>(0, "input");
675
676 ViewsDescriptor splitterDescriptor(2);
677 IConnectableLayer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
678
679 Convolution2dDescriptor convDescriptor;
680 IConnectableLayer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
681 IConnectableLayer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
682
683 OriginsDescriptor concatDescriptor(2);
684 IConnectableLayer* const concatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
685
686 graph.AddLayer<OutputLayer>(0, "output");
687
688 // Construct sub-graph
689 SubgraphView::SubgraphViewPtr subgraph = CreateSubgraphViewFrom({splitterLayer,
690 convLayer1,
691 convLayer2,
692 concatLayer},
693 {},
694 {});
695
696 // Construct dummy pre-compiled layer
697 PreCompiledDescriptor preCompiledDescriptor(0, 0);
698 Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
699
700 // Save sub-graph layers for later verification
701 const SubgraphView::IConnectableLayers subgraphLayers = subgraph->GetIConnectableLayers();
702
703 // Substitute sub-graph with pre-compiled layer
704 graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
705
706 // Check that the layers belonging to the sub-graph have been erased from the graph after substitution
707 CHECK(!AreAnySubgraphLayersPresentInGraph(subgraphLayers, graph));
708 }
709
710 }
711
712 TEST_SUITE("SubgraphSelection")
713 {
714 TEST_CASE("SubgraphForEmptyGraph")
715 {
716 Graph graph;
717 SubgraphView subgraph(graph);
718
719 CHECK(subgraph.GetIInputSlots().empty());
720 CHECK(subgraph.GetIOutputSlots().empty());
721 CHECK(subgraph.GetIConnectableLayers().empty());
722 }
723
724 TEST_CASE("SubgraphForEntireGraph")
725 {
726 Graph graph;
727
728 auto output = graph.AddLayer<OutputLayer>(0, "output");
729 auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
730 ActivationDescriptor{},
731 "mid0");
732 auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
733 ActivationDescriptor{},
734 "mid1");
735 graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
736
737 SubgraphView subgraph(graph);
738
739 CHECK(subgraph.GetIInputSlots().empty());
740 CHECK(subgraph.GetIOutputSlots().empty());
741 CHECK(subgraph.GetIConnectableLayers().size() == graph.GetNumLayers());
742 }
743
744 TEST_CASE("NoSubgraphsForNoMatch")
745 {
746 Graph graph;
747
748 auto output = graph.AddLayer<OutputLayer>(0, "output");
749 graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
750
751 SubgraphViewSelector::Subgraphs subgraphs =
__anonc2781dcd0202(const Layer &) 752 SubgraphViewSelector::SelectSubgraphs(graph, [](const Layer &) { return false; });
753
754 CHECK(subgraphs.empty());
755 }
756
757 TEST_CASE("OneSubgraphsSelectedASingleMatch")
758 {
759 Graph graph;
760
761 auto output = graph.AddLayer<OutputLayer>(0, "output");
762 graph.InsertNewLayer<InputLayer>(output->GetInputSlot(0), 0, "input");
763
764 SubgraphViewSelector::Subgraphs subgraphs =
765 SubgraphViewSelector::SelectSubgraphs(
766 graph,
767 // select the output layer only
768 [](const Layer & l)
__anonc2781dcd0302(const Layer & l) 769 {
770 bool isOutput = l.GetNameStr().compare("output") == 0;
771 return isOutput;
772 });
773
774 CHECK(subgraphs.size() == 1);
775 if (subgraphs.size() == 1)
776 {
777 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({output}),
778 // outputs of 'output' will be empty
779 CreateOutputsFrom({output}),
780 {output});
781
782 CompareSubgraphViews(subgraphs[0], expected);
783 }
784 }
785
786 TEST_CASE("MultipleLayersSelectedInTheMiddle")
787 {
788 Graph graph;
789
790 auto output = graph.AddLayer<OutputLayer>(0, "output");
791 auto mid0 = graph.InsertNewLayer<ActivationLayer>(output->GetInputSlot(0),
792 ActivationDescriptor{},
793 "mid0");
794 auto mid1 = graph.InsertNewLayer<ActivationLayer>(mid0->GetInputSlot(0),
795 ActivationDescriptor{},
796 "mid1");
797 graph.InsertNewLayer<InputLayer>(mid1->GetInputSlot(0), 0, "input");
798
799 SubgraphViewSelector::Subgraphs subgraphs =
800 SubgraphViewSelector::SelectSubgraphs(
801 graph,
802 // select the middle layers only
803 [](const Layer & l)
__anonc2781dcd0402(const Layer & l) 804 {
805 bool toSelect = (l.GetType() == LayerType::Activation);
806 return toSelect;
807 });
808
809 CHECK(subgraphs.size() == 1);
810 if (subgraphs.size() == 1)
811 {
812 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({mid1}),
813 CreateOutputsFrom({mid0}),
814 {mid1, mid0});
815
816 CompareSubgraphViews(subgraphs[0], expected);
817 }
818 }
819
820 TEST_CASE("DisjointGraphs")
821 {
822 // The input graph has two disjoint sections and all layers are selected.
823 // This should result in two subgraphs being produced.
824 Graph graph;
825
826 // the graph is constructed in reverse order
827 auto o0 = graph.AddLayer<OutputLayer>(0, "output0");
828 auto n0 = graph.InsertNewLayer<ActivationLayer>(o0->GetInputSlot(0), ActivationDescriptor{}, "intermediate0");
829 auto i0 = graph.InsertNewLayer<InputLayer>(n0->GetInputSlot(0), 0, "input0");
830
831 auto o1 = graph.AddLayer<OutputLayer>(1, "output1");
832 auto n1 = graph.InsertNewLayer<ActivationLayer>(o1->GetInputSlot(0), ActivationDescriptor{}, "intermediate1");
833 auto i1 = graph.InsertNewLayer<InputLayer>(n1->GetInputSlot(0), 1, "input1");
834
835 SubgraphViewSelector::Subgraphs subgraphs =
836 SubgraphViewSelector::SelectSubgraphs(graph,
837 // select the middle layers only
__anonc2781dcd0502(const Layer&) 838 [](const Layer&) {
839 return true;
840 });
841
842 // expected results to test against
843 auto expected1 = CreateSubgraphViewFrom({}, {}, { o0, n0, i0 });
844 auto expected2 = CreateSubgraphViewFrom({}, {}, { o1, n1, i1 });
845 CHECK(subgraphs.size() == 2);
846 if (subgraphs.size() == 2)
847 {
848 CHECK((subgraphs[0] != nullptr));
849 CHECK((subgraphs[1] != nullptr));
850 CompareSubgraphViews(subgraphs[0], expected1);
851 CompareSubgraphViews(subgraphs[1], expected2);
852 }
853 }
854
855 TEST_CASE("IslandInTheMiddle")
856 {
857 // This case represent the scenario when a non-selected X1 node placed in the middle
858 // of the selected M* nodes.
859 // This checks that we don't merge M6 and M3 and create a dependency loop.
860 /*
861 M0
862 / \
863 M1 M4
864 | |
865 M2 X1 < the island in the middle !
866 | |
867 M3 M5
868 \ /
869 M6
870 */
871 Graph graph;
872
873 OriginsDescriptor concatDescriptor(2);
874 auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
875 auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
876 ActivationDescriptor{},
877 "m3");
878 auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
879 ActivationDescriptor{},
880 "m2");
881 auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
882 ActivationDescriptor{},
883 "m1");
884 auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
885
886 auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
887 ActivationDescriptor{},
888 "m5");
889 auto x1 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0),
890 ActivationDescriptor{},
891 "x1");
892 auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
893 ActivationDescriptor{},
894 "m4");
895
896 // Connect the other branch to the input layer
897 m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
898
899 // All selected 'M*' layers will be of Activation type
900 SubgraphViewSelector::Subgraphs subgraphs =
901 SubgraphViewSelector::SelectSubgraphs(
902 graph,
903 // select the middle layers only
904 [](const Layer& l)
__anonc2781dcd0602(const Layer& l) 905 {
906 bool toSelect = std::string(l.GetName())[0] == 'm';
907 return toSelect;
908 });
909
910 // expected results to test against
911 auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
912 CreateOutputsFrom({ m3, m4 }),
913 { m0, m1, m2, m3, m4 });
914
915 auto smallerSubgraph =
916 CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), & m6->GetInputSlot(0) },
917 std::vector<OutputSlot*>{},
918 { m5, m6 });
919
920 CHECK(subgraphs.size() == 2);
921 if (subgraphs.size() == 2)
922 {
923 // we need to have valid subgraph pointers here
924 CHECK((subgraphs[0] != nullptr));
925 CHECK((subgraphs[1] != nullptr));
926 CompareSubgraphViews(subgraphs[0], largerSubgraph);
927 CompareSubgraphViews(subgraphs[1], smallerSubgraph);
928 }
929 }
930
931 TEST_CASE("MultipleSimpleSubgraphs")
932 {
933 // This test case represents the scenario when we have two distinct subgraphs
934 // in a simple linear network. The selected nodes are the M* and the
935 // non-selected ones are the X*
936 // W2 ->->
937 // |
938 // X1 -> M1 -> M2 -> X2 -> M3 -> X3
939 //
940 // The expected results is two subgraphs, one with {M1, M2} and another one
941 // with {M3}
942 //
943 Graph graph;
944
945 // the graph is constructed in reverse order
946 auto x3 = graph.AddLayer<OutputLayer>(0, "output");
947
948 auto m3 = graph.InsertNewLayer<ActivationLayer>(x3->GetInputSlot(0),
949 ActivationDescriptor{},
950 "m3");
951
952 auto x2 = graph.InsertNewLayer<Convolution2dLayer>(m3->GetInputSlot(0),
953 Convolution2dDescriptor{},
954 "x2");
955
956 auto w2 = graph.InsertNewLayer<ConstantLayer>(x2->GetInputSlot(1), "w2");
957
958 auto m2 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0),
959 ActivationDescriptor{},
960 "m2");
961 auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
962 ActivationDescriptor{},
963 "m1");
964 graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "x1");
965
966 IgnoreUnused(w2);
967 // All selected 'M*' layers will be of Activation type
968 SubgraphViewSelector::Subgraphs subgraphs =
969 SubgraphViewSelector::SelectSubgraphs(
970 graph,
971 // select the middle layers only
972 [](const Layer & l)
__anonc2781dcd0702(const Layer & l) 973 {
974 bool toSelect = (l.GetType() == LayerType::Activation);
975 return toSelect;
976 });
977
978 // expected results to test against
979 auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m1}),
980 CreateOutputsFrom({m2}),
981 {m1, m2});
982
983 auto smallerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m3}),
984 CreateOutputsFrom({m3}),
985 {m3});
986
987 CHECK(subgraphs.size() == 2);
988 if (subgraphs.size() == 2)
989 {
990 // we need to have valid subgraph pointers here
991 CHECK((subgraphs[0] != nullptr));
992 CHECK((subgraphs[1] != nullptr));
993 CompareSubgraphViews(subgraphs[0], smallerSubgraph);
994 CompareSubgraphViews(subgraphs[1], largerSubgraph);
995 }
996 }
997
998 TEST_CASE("SimpleLinearTest")
999 {
1000 //X1 -> M1 -> M2 -> X2
1001 //Where the input slots of M1 and the output slots of M2 are to be the sub graph boundaries.
1002 Graph graph;
1003
1004 ActivationDescriptor activationDefaults;
1005
1006 auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1007 auto layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1008 auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1009 auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1010
1011 // X1
1012 // |
1013 // M1
1014 // |
1015 // M2
1016 // |
1017 // X2
1018
1019 layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1020 layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1021 layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1022
1023 SubgraphViewSelector::Subgraphs subgraphs =
1024 SubgraphViewSelector::SelectSubgraphs(
1025 graph,
1026 // select the activation layers M1 and M2
1027 [](const Layer & l)
__anonc2781dcd0802(const Layer & l) 1028 {
1029 bool toSelect = (l.GetType() == LayerType::Activation);
1030 return toSelect;
1031 });
1032
1033 CHECK(subgraphs.size() == 1);
1034 if(subgraphs.size() == 1)
1035 {
1036 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1037 CreateOutputsFrom({layerM2}),
1038 {layerM1, layerM2});
1039
1040 CompareSubgraphViews(subgraphs[0], expected);
1041 }
1042 }
1043
1044 TEST_CASE("MultiInputSingleOutput")
1045 {
1046 //X1 -> M1 -> M3 -> X3
1047 //X2 -> M2 -> M3 -> X3
1048 //Where the input slots of {M1, M2} and the output slots of M3 are to be the subgraph boundaries.
1049 Graph graph;
1050
1051 ActivationDescriptor activationDefaults;
1052
1053 auto layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1054 auto layerX2 = graph.AddLayer<InputLayer>(1, "layerX2");
1055 auto layerM1 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM1");
1056 auto layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1057 auto layerM3 = graph.AddLayer<AdditionLayer>("layerM3");
1058 auto layerX3 = graph.AddLayer<OutputLayer>(0, "layerX3");
1059
1060 // X1 X2
1061 // | |
1062 // M1 M2
1063 // \ |
1064 // \ |
1065 // \|
1066 // M3
1067 // |
1068 // |
1069 // X3
1070
1071 layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1072 layerX2->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1073 layerM1->GetOutputSlot(0).Connect(layerM3->GetInputSlot(0));
1074 layerM2->GetOutputSlot(0).Connect(layerM3->GetInputSlot(1));
1075 layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1076
1077 SubgraphViewSelector::Subgraphs subgraphs =
1078 SubgraphViewSelector::SelectSubgraphs(
1079 graph,
1080 // select Activation and Addition Layers M1, M2 and M3
1081 [](const Layer & l)
__anonc2781dcd0902(const Layer & l) 1082 {
1083 bool toSelect = (l.GetType() == LayerType::Activation
1084 || l.GetType() == LayerType::Addition);
1085 return toSelect;
1086 });
1087
1088 CHECK(subgraphs.size() == 1);
1089 if (subgraphs.size() == 1)
1090 {
1091 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1, layerM2}),
1092 CreateOutputsFrom({layerM3}),
1093 {layerM1, layerM2, layerM3});
1094
1095 CompareSubgraphViews(subgraphs[0], expected);
1096 }
1097 }
1098
1099 TEST_CASE("SingleInputMultiOutput")
1100 {
1101 //X1 -> M1 -> M2 -> X2
1102 //X1 -> M1 -> M3 -> X3
1103 //Where the input slots of M1 and the output slots of {M2, M3} are to be the subgraph boundaries.
1104 Graph graph;
1105
1106 ActivationDescriptor activationDefaults;
1107 ViewsDescriptor viewDefaults(2,4);
1108
1109 Layer* layerX1 = graph.AddLayer<InputLayer>(0, "layerX1");
1110 Layer* layerM1 = graph.AddLayer<SplitterLayer>(viewDefaults, "layerM1");
1111 Layer* layerM2 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM2");
1112 Layer* layerM3 = graph.AddLayer<ActivationLayer>(activationDefaults, "layerM3");
1113 Layer* layerX2 = graph.AddLayer<OutputLayer>(0, "layerX2");
1114 Layer* layerX3 = graph.AddLayer<OutputLayer>(1, "layerX3");
1115
1116 // X1
1117 // |
1118 // M1
1119 // /|
1120 // / |
1121 // / |
1122 // M2 M3
1123 // | |
1124 // | |
1125 // X2 X3
1126
1127 layerX1->GetOutputSlot(0).Connect(layerM1->GetInputSlot(0));
1128 layerM1->GetOutputSlot(0).Connect(layerM2->GetInputSlot(0));
1129 layerM1->GetOutputSlot(1).Connect(layerM3->GetInputSlot(0));
1130 layerM2->GetOutputSlot(0).Connect(layerX2->GetInputSlot(0));
1131 layerM3->GetOutputSlot(0).Connect(layerX3->GetInputSlot(0));
1132
1133 SubgraphViewSelector::Subgraphs subgraphs =
1134 SubgraphViewSelector::SelectSubgraphs(
1135 graph,
1136 // select Activation and Splitter Layers M1, M2 and M3
1137 [](const Layer & l)
__anonc2781dcd0a02(const Layer & l) 1138 {
1139 bool toSelect = (l.GetType() == LayerType::Activation
1140 || l.GetType() == LayerType::Splitter);
1141 return toSelect;
1142 });
1143
1144 CHECK(subgraphs.size() == 1);
1145 if(subgraphs.size() == 1)
1146 {
1147 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({layerM1}),
1148 CreateOutputsFrom({layerM2, layerM3}),
1149 {layerM1, layerM2, layerM3});
1150
1151 CompareSubgraphViews(subgraphs[0], expected);
1152 }
1153 }
1154
1155 TEST_CASE("MultiInputMultiOutput")
1156 {
1157 // This case represents the scenario with multiple inputs and multiple outputs
1158 //
1159 // X1 -> M1 -> M3 -> M4 -> X3
1160 // X2 -> M2 -> M3 -> M5 -> X4
1161 //
1162 // Where the input slots of {M1, M2} and the output slots of {M4, M5} are to be the subgraph
1163 // boundaries.
1164
1165 Graph graph;
1166
1167 ActivationDescriptor activationDefaults;
1168 OriginsDescriptor concatDescriptor(2);
1169
1170 auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1171 auto x2 = graph.AddLayer<InputLayer>(1, "x2");
1172
1173 auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1174 auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1175 auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1176
1177 auto m4 = graph.AddLayer<ActivationLayer>(activationDefaults, "m4");
1178 auto m5 = graph.AddLayer<ActivationLayer>(activationDefaults, "m5");
1179
1180 auto x3 = graph.AddLayer<OutputLayer>(0, "x3");
1181 auto x4 = graph.AddLayer<OutputLayer>(1, "x4");
1182
1183 x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1184 x2->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1185
1186 m1->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1187 m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1188
1189 m3->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1190 m3->GetOutputSlot(0).Connect(m5->GetInputSlot(0));
1191
1192 m4->GetOutputSlot(0).Connect(x3->GetInputSlot(0));
1193 m5->GetOutputSlot(0).Connect(x4->GetInputSlot(0));
1194
1195
1196 SubgraphViewSelector::Subgraphs subgraphs =
1197 SubgraphViewSelector::SelectSubgraphs(
1198 graph,
1199 // select Activation and Concat Layers M1, M2, M3, M4, M5
1200 [](const Layer & l)
__anonc2781dcd0b02(const Layer & l) 1201 {
1202 bool toSelect = (l.GetType() == LayerType::Activation
1203 || l.GetType() == LayerType::Concat);
1204 return toSelect;
1205 });
1206
1207
1208 CHECK(subgraphs.size() == 1);
1209 if (subgraphs.size() == 1)
1210 {
1211 auto expected = CreateSubgraphViewFrom(CreateInputsFrom({m1, m2}),
1212 CreateOutputsFrom({m4, m5}),
1213 {m1, m2, m3, m4, m5});
1214
1215 CompareSubgraphViews(subgraphs[0], expected);
1216 }
1217 }
1218
1219 TEST_CASE("ValidMerge")
1220 {
1221 // Checks that a node that has multiple choices for merge candidates (M3 in this case) correctly merges with the
1222 // one that it can (M0), and doesn't merge with the ones it can't (X2 and M2).
1223 //
1224 // X1
1225 // |
1226 // M1
1227 // / \'
1228 // X2 M2 M0
1229 // \ | /
1230 // M3
1231 //
1232 Graph graph;
1233
1234 ActivationDescriptor activationDefaults;
1235 OriginsDescriptor concatDescriptor(3);
1236
1237 auto x1 = graph.AddLayer<InputLayer>(0, "x1");
1238 auto x2 = graph.AddLayer<ActivationLayer>(activationDefaults, "x2");
1239 auto m0 = graph.AddLayer<InputLayer>(1, "m0");
1240 auto m1 = graph.AddLayer<ActivationLayer>(activationDefaults, "m1");
1241 auto m2 = graph.AddLayer<ActivationLayer>(activationDefaults, "m2");
1242 auto m3 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m3");
1243
1244 x1->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1245 m1->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1246 m1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1247 x2->GetOutputSlot(0).Connect(m3->GetInputSlot(0));
1248 m2->GetOutputSlot(0).Connect(m3->GetInputSlot(1));
1249 m0->GetOutputSlot(0).Connect(m3->GetInputSlot(2));
1250
1251 SubgraphViewSelector::Subgraphs subgraphs = SubgraphViewSelector::SelectSubgraphs(
1252 graph,
__anonc2781dcd0c02(const Layer& l) 1253 [](const Layer& l) {
1254 return std::string(l.GetName())[0] == 'm';
1255 });
1256
1257 // expected results to test against
1258 auto expectedSubgraph0 = CreateSubgraphViewFrom(
1259 std::vector<InputSlot*>{ &m3->GetInputSlot(0), & m3->GetInputSlot(1) },
1260 CreateOutputsFrom({ }),
1261 { m0, m3 });
1262
1263 auto expectedSubgraph1 =
1264 CreateSubgraphViewFrom(
1265 CreateInputsFrom({ m1 }),
1266 std::vector<OutputSlot*>{ &m1->GetOutputSlot(0), &m2->GetOutputSlot(0) },
1267 { m1, m2 });
1268
1269 CHECK(subgraphs.size() == 2);
1270 if (subgraphs.size() == 2)
1271 {
1272 // we need to have valid subgraph pointers here
1273 CHECK((subgraphs[0] != nullptr));
1274 CHECK((subgraphs[1] != nullptr));
1275 CompareSubgraphViews(subgraphs[0], expectedSubgraph0);
1276 CompareSubgraphViews(subgraphs[1], expectedSubgraph1);
1277 }
1278 }
1279
1280 TEST_CASE("PropagatedDependencies")
1281 {
1282 // Version of IslandInTheMiddle with longer chain
1283 // to make sure antecedents are propagated.
1284 /*
1285 M0
1286 / \
1287 M1 M4
1288 | |
1289 M2 X1 < the island in the middle !
1290 | |
1291 | M10
1292 | |
1293 | X2 < another island in the middle !
1294 | |
1295 M3 M5
1296 \ /
1297 M6
1298 */
1299 Graph graph;
1300
1301 OriginsDescriptor concatDescriptor(2);
1302 auto m6 = graph.AddLayer<ConcatLayer>(concatDescriptor, "m6");
1303 auto m3 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(0),
1304 ActivationDescriptor{},
1305 "m3");
1306 auto m2 = graph.InsertNewLayer<ActivationLayer>(m3->GetInputSlot(0),
1307 ActivationDescriptor{},
1308 "m2");
1309 auto m1 = graph.InsertNewLayer<ActivationLayer>(m2->GetInputSlot(0),
1310 ActivationDescriptor{},
1311 "m1");
1312 auto m0 = graph.InsertNewLayer<InputLayer>(m1->GetInputSlot(0), 0, "m0");
1313
1314 auto m5 = graph.InsertNewLayer<ActivationLayer>(m6->GetInputSlot(1),
1315 ActivationDescriptor{},
1316 "m5");
1317 auto x2 = graph.InsertNewLayer<ActivationLayer>(m5->GetInputSlot(0), ActivationDescriptor{}, "x2");
1318 auto m10 = graph.InsertNewLayer<ActivationLayer>(x2->GetInputSlot(0), ActivationDescriptor{}, "m10");
1319 auto x1 = graph.InsertNewLayer<ActivationLayer>(m10->GetInputSlot(0),
1320 ActivationDescriptor{},
1321 "x1");
1322 auto m4 = graph.InsertNewLayer<ActivationLayer>(x1->GetInputSlot(0),
1323 ActivationDescriptor{},
1324 "m4");
1325
1326 // Connect the other branch to the input layer
1327 m0->GetOutputSlot(0).Connect(m4->GetInputSlot(0));
1328
1329 // All selected 'M*' layers will be of Activation type
1330 SubgraphViewSelector::Subgraphs subgraphs =
1331 SubgraphViewSelector::SelectSubgraphs(
1332 graph,
1333 // select the middle layers only
1334 [](const Layer& l)
__anonc2781dcd0d02(const Layer& l) 1335 {
1336 bool toSelect = std::string(l.GetName())[0] == 'm';
1337 return toSelect;
1338 });
1339
1340 // expected results to test against
1341 auto largerSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({ m0 }),
1342 CreateOutputsFrom({ m3, m4 }),
1343 { m0, m1, m2, m3, m4 });
1344
1345 auto mediumSubgraph = CreateSubgraphViewFrom(std::vector<InputSlot*>{ &m5->GetInputSlot(0), &m6->GetInputSlot(0) },
1346 std::vector<OutputSlot*>{}, { m5, m6 });
1347
1348 auto smallerSubgraph =
1349 CreateSubgraphViewFrom(CreateInputsFrom({ m10 }), CreateOutputsFrom({ m10 }), { m10 });
1350
1351 CHECK(subgraphs.size() == 3);
1352 if (subgraphs.size() == 3)
1353 {
1354 // we need to have valid subgraph pointers here
1355 CHECK((subgraphs[0] != nullptr));
1356 CHECK((subgraphs[1] != nullptr));
1357 CHECK((subgraphs[2] != nullptr));
1358 CompareSubgraphViews(subgraphs[0], largerSubgraph);
1359 CompareSubgraphViews(subgraphs[1], mediumSubgraph);
1360 CompareSubgraphViews(subgraphs[2], smallerSubgraph);
1361 }
1362 }
1363
1364 TEST_CASE("Random")
1365 {
1366 // Creates random networks, splits them into subgraphs and checks the resulting subgraphs obey the required
1367 // dependency rules. We can easily generate very large networks which helps cover corner cases the other
1368 // small, manually crafted tests have missed. We can also use this to measure performance on large networks.
1369 constexpr bool debug = false; // Enable this to dump dot files and performance timings.
1370
1371 std::mt19937 randomGenerator;
1372
1373 // Helper function to get a random number in [0, maxExclusive)
__anonc2781dcd0e02(auto maxExclusive) 1374 auto GetRandom = [&randomGenerator](auto maxExclusive) {
1375 // Note we could use uniform_int_distribution here, but that gives inconsistent results across platforms
1376 // which makes it harder to reproduce results.
1377 // It appears that uniform_real_distribution is consistent across MSVC and gcc so we use that and round it.
1378 std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1379 return static_cast<decltype(maxExclusive)>(uniform(randomGenerator) * static_cast<float>(maxExclusive));
1380 };
1381 // Helper function to get a bool that has probability 'trueProb' of being true.
__anonc2781dcd0f02(float trueProb) 1382 auto GetRandomFlag = [&randomGenerator](float trueProb) {
1383 std::uniform_real_distribution<float> uniform(0.0f, 1.0f);
1384 return uniform(randomGenerator) < trueProb;
1385 };
1386
1387 constexpr uint32_t numTests = 100;
1388 for (uint32_t testIdx = 0; testIdx < numTests; ++testIdx)
1389 {
1390 randomGenerator.seed(testIdx); // Set a deterministic seed for reproducibility.
1391
1392 // Create random graph
1393 Graph graph;
1394 {
1395 // First add the layers, without any connections. The following random constants determine the number of
1396 // each layer to add, along with the chance that each layer will be 'supported' (i.e. selected for
1397 // inclusion in the resulting subgraphs).
1398 uint32_t numInputs = 1 + GetRandom(4u);
1399 uint32_t numConstants = 1 + GetRandom(4u);
1400 uint32_t numOutputs = 1 + GetRandom(4u);
1401 uint32_t numConcats = 0 + GetRandom(500u);
1402 uint32_t numSplits = 0 + GetRandom(500u);
1403 float supportedProb = 0.7f;
1404
1405 for (uint32_t i = 0; i < numInputs; ++i)
1406 {
1407 std::string name = "input" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1408 graph.AddLayer<InputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1409 }
1410 for (uint32_t i = 0; i < numConstants; ++i)
1411 {
1412 std::string name = "constant" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1413 graph.AddLayer<ConstantLayer>(name.c_str());
1414 }
1415 for (uint32_t i = 0; i < numOutputs; ++i)
1416 {
1417 std::string name = "output" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1418 graph.AddLayer<OutputLayer>(static_cast<LayerBindingId>(i), name.c_str());
1419 }
1420 for (uint32_t i = 0; i < numConcats; ++i)
1421 {
1422 std::string name = "concat" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1423 numInputs = 1 + GetRandom(3u);
1424 OriginsDescriptor concatDesc(numInputs);
1425 graph.AddLayer<ConcatLayer>(concatDesc, name.c_str());
1426 }
1427 for (uint32_t i = 0; i < numSplits; ++i)
1428 {
1429 std::string name = "split" + std::to_string(i) + (GetRandomFlag(supportedProb) ? "S" : "N");
1430 numOutputs = 1 + GetRandom(3u);
1431 ViewsDescriptor splitDesc(numOutputs);
1432 graph.AddLayer<SplitterLayer>(splitDesc, name.c_str());
1433 }
1434
1435 // Associate each layer with a "depth" parameter. This is used when creating connections to ensure
1436 // that we don't have any loops, by only connecting to layers with a lower "depth".
1437 // This can be thought of as distance from the "top" of the graph (assuming the graph flows top-to-bottom).
1438 // Unfortunately this approach ends up producing very "wide" graphs,
1439 // which probably isn't very representative of 'real' networks.
1440 uint32_t maxLayerDepth = 5 + GetRandom(2000u);
1441 std::map<Layer*, uint32_t> layerDepths;
1442 std::map<uint32_t, std::vector<Layer*>> layersAtDepth;
1443 for (Layer* layer : graph)
1444 {
1445 uint32_t depth;
1446 if (layer->GetType() == LayerType::Input || layer->GetType() == LayerType::Constant)
1447 {
1448 // There needs to be at least one input-like layer above everything else, otherwise would be
1449 // nothing for them to connect to!
1450 depth = 0;
1451 }
1452 else
1453 {
1454 // Other layers are randomly assigned to later depths.
1455 depth = 1 + GetRandom(maxLayerDepth);
1456 }
1457 layerDepths[layer] = depth;
1458 layersAtDepth[depth].push_back(layer);
1459 }
1460
1461 // Connect layers to each other. Every input slot of every layer must be connected, but it doesn't
1462 // matter if an output slot goes unused.
1463 for (Layer* layer : graph)
1464 {
1465 for (uint32_t inputSlotIdx = 0; inputSlotIdx < layer->GetNumInputSlots(); ++inputSlotIdx)
1466 {
1467 InputSlot& inputSlot = layer->GetInputSlot(inputSlotIdx);
1468 uint32_t maxLayerDepthToConnectTo = layerDepths[layer];
1469 // This prevents a connection causing a loop
1470 // Finding a layer to connect to may take multiple attempts, so keep trying until it works.
1471 while (inputSlot.GetConnectedOutputSlot() == nullptr)
1472 {
1473 uint32_t layerDepth = GetRandom(maxLayerDepthToConnectTo);
1474 const std::vector<Layer*>& layersToChooseFrom = layersAtDepth[layerDepth];
1475 if (layersToChooseFrom.size() == 0)
1476 {
1477 continue;
1478 }
1479 Layer* layerToConnectWith = layersToChooseFrom[GetRandom(layersToChooseFrom.size())];
1480 if (layerToConnectWith->GetNumOutputSlots() == 0)
1481 {
1482 continue;
1483 }
1484 uint32_t outputSlotIdx = GetRandom(layerToConnectWith->GetNumOutputSlots());
1485 layerToConnectWith->GetOutputSlot(outputSlotIdx).Connect(inputSlot);
1486 }
1487 }
1488 }
1489 }
1490
1491 if (debug)
1492 {
1493 std::ofstream f("INPUT_" + std::to_string(testIdx) + ".dot");
1494 graph.SerializeToDot(f);
1495 }
1496
1497 // Run the splitting algorithm, selecting all nodes ending in an 'S' (as randomly assigned above).
1498 auto startTime = std::chrono::high_resolution_clock::now();
1499
1500 SubgraphViewSelector::Subgraphs subgraphs =
1501 SubgraphViewSelector::SelectSubgraphs(graph,
__anonc2781dcd1002(const Layer& l) 1502 [](const Layer& l) { return std::string(l.GetName()).back() == 'S'; });
1503
1504 auto endTime = std::chrono::high_resolution_clock::now();
1505 auto duration = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
1506 if (debug)
1507 {
1508 std::cout << "Test " << testIdx << ": " << duration.count() << " microseconds" << std::endl;
1509 }
1510
1511 // Build a map of which subgraph is assigned to each layer.
1512 // This helps some of the following code.
1513 std::map<Layer*, SubgraphView*> layerToSubgraph;
1514 for (Layer* layer : graph)
1515 {
1516 size_t i = 0;
1517 for (auto& subgraph : subgraphs)
1518 {
1519 std::string name = std::to_string(i++);
1520 if (std::find(subgraph->cbeginIConnectable(), subgraph->cendIConnectable(), layer)
1521 != subgraph->cendIConnectable())
1522 {
1523 layerToSubgraph[layer] = subgraph.get();
1524 break;
1525 }
1526 }
1527 }
1528
1529 if (debug)
1530 {
1531 // Before dumping the dot file, set each Layer's BackendId property so that the dot file
1532 // shows the resulting subgraph assignments.
1533 for (Layer* layer : graph)
1534 {
1535 std::string name = "NotAssigned";
1536 auto subgraphIt = layerToSubgraph.find(layer);
1537 if (subgraphIt != layerToSubgraph.end())
1538 {
1539 auto subgraphIdx = std::distance(subgraphs.begin(),
1540 std::find_if(subgraphs.begin(), subgraphs.end(),
__anonc2781dcd1102(auto& s) 1541 [&](auto& s) { return s.get() == subgraphIt->second; }));
1542 name = std::to_string(subgraphIdx);
1543 }
1544 layer->SetBackendId(armnn::BackendId(name));
1545 }
1546
1547 std::ofstream f("GRAPH_" + std::to_string(testIdx) + ".dot");
1548 graph.SerializeToDot(f);
1549 }
1550
1551 // Check the dependencies between subgraphs to make sure that the algorithm has produced a valid result.
1552 // Starting from each of the input slots of each subgraph, recurse up the graph and ensure that we never
1553 // encounter a layer that belongs to the subgraph that we started from.
1554 for (auto& subgraph : subgraphs)
1555 {
1556 for (IInputSlot* inSlot : subgraph->GetIInputSlots())
1557 {
1558 std::queue<Layer*> toProcess;
1559 toProcess.push(&PolymorphicDowncast<InputSlot*>(inSlot)->GetConnectedOutputSlot()->GetOwningLayer());
1560 while (toProcess.size() > 0)
1561 {
1562 Layer* l = toProcess.front();
1563 toProcess.pop();
1564
1565 CHECK(layerToSubgraph[l] != subgraph.get());
1566
1567 for (const InputSlot& is : l->GetInputSlots())
1568 {
1569 toProcess.push(&is.GetConnectedOutputSlot()->GetOwningLayer());
1570 }
1571 }
1572 }
1573 }
1574 }
1575 }
1576
1577 }
1578
1579 TEST_SUITE("IntegrationTests")
1580 {
1581 TEST_CASE("SingleSubgraph")
1582 {
1583 // This test case represents the scenario when we have one subgraph
1584 // in which two layers have GpuAcc backend assigned
1585
1586 //Construct graph
1587 Graph graph;
1588
1589 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1590
1591 Convolution2dDescriptor convDescriptor;
1592 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1593 convLayer1->SetBackendId(Compute::GpuAcc);
1594
1595 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1596 convLayer2->SetBackendId(Compute::GpuAcc);
1597
1598 Layer* const weights1 = graph.AddLayer<ConstantLayer>("weights1");
1599 weights1->SetBackendId(Compute::GpuAcc);
1600 Layer* const weights2 = graph.AddLayer<ConstantLayer>("weights2");
1601 weights2->SetBackendId(Compute::GpuAcc);
1602
1603 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1604
1605 inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1606 weights1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
1607 convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
1608 weights2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
1609 convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1610
1611 // GpuAcc sub graph selector
1612 SubgraphViewSelector::Subgraphs subgraphs =
1613 SubgraphViewSelector::SelectSubgraphs(
1614 graph,
1615 // select the GpuAcc layers only
__anonc2781dcd1202(const Layer & l)1616 [](const Layer & l){
1617 bool toSelect = (l.GetBackendId() == Compute::GpuAcc);
1618 return toSelect;
1619 });
1620
1621 CHECK(subgraphs.size() == 1);
1622 if(subgraphs.size() == 1)
1623 {
1624 CHECK((subgraphs[0] != nullptr));
1625
1626 if (subgraphs[0].get() != nullptr)
1627 {
1628 unsigned int numInputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1629 unsigned int numOutputSlots = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1630
1631 CHECK((numInputSlots == 1));
1632 CHECK((numOutputSlots == 1));
1633
1634 // Save sub-graph connections for comparison after substitution
1635 IOutputSlot* subgraphInputConn1 = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1636 IInputSlot* subgraphOutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1637
1638 // Construct dummy pre-compiled layer
1639 PreCompiledDescriptor preCompiledDescriptor(numInputSlots, numOutputSlots);
1640 Layer* const preCompiledLayer = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
1641
1642 // Substitute sub-graph with pre-compiled layer
1643 graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer);
1644
1645 // Check that connections are correct after substitution
1646 CHECK_EQ(preCompiledLayer->GetInputSlot(0).GetConnection(), subgraphInputConn1);
1647
1648 CHECK_EQ(preCompiledLayer->GetOutputSlot(0).GetConnection(0), subgraphOutputConn1);
1649 }
1650 }
1651 }
1652
1653 TEST_CASE("MultipleSubgraphs")
1654 {
1655 // This test case represents the scenario when we have two subgraphs
1656 // in which two layers have CpuAcc backend assigned
1657
1658 //Construct graph
1659 Graph graph;
1660
1661 Layer* const inputLayer = graph.AddLayer<InputLayer>(0, "input");
1662
1663 ViewsDescriptor splitterDescriptor(2);
1664 Layer* const splitterLayer = graph.AddLayer<SplitterLayer>(splitterDescriptor, "splitter");
1665 splitterLayer->SetBackendId(Compute::CpuAcc);
1666
1667 Convolution2dDescriptor convDescriptor;
1668 Layer* const convLayer1 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv1");
1669 Layer* const convLayer2 = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv2");
1670
1671 Layer* const weights1 = graph.AddLayer<ConstantLayer>("weights1");
1672 Layer* const weights2 = graph.AddLayer<ConstantLayer>("weights2");
1673
1674 OriginsDescriptor concatDescriptor(2);
1675 Layer* const pConcatLayer = graph.AddLayer<ConcatLayer>(concatDescriptor, "concat");
1676 pConcatLayer->SetBackendId(Compute::CpuAcc);
1677
1678 Layer* const outputLayer = graph.AddLayer<OutputLayer>(0, "output");
1679
1680 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
1681 splitterLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
1682 splitterLayer->GetOutputSlot(1).Connect(convLayer2->GetInputSlot(0));
1683 weights1->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(1));
1684 convLayer1->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(0));
1685 weights2->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(1));
1686 convLayer2->GetOutputSlot(0).Connect(pConcatLayer->GetInputSlot(1));
1687 pConcatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
1688
1689 // CpuAcc sub graph selector
1690 SubgraphViewSelector::Subgraphs subgraphs =
1691 SubgraphViewSelector::SelectSubgraphs(
1692 graph,
1693 // select the CpuAcc layers only
__anonc2781dcd1302(const Layer & l)1694 [](const Layer & l){
1695 bool toSelect = (l.GetBackendId() == Compute::CpuAcc);
1696 return toSelect;
1697 });
1698
1699 CHECK(subgraphs.size() == 2);
1700 if(subgraphs.size() == 2)
1701 {
1702 CHECK((subgraphs[0] != nullptr));
1703 CHECK((subgraphs[1] != nullptr));
1704
1705 if (subgraphs[0].get() != nullptr && subgraphs[1].get() != nullptr)
1706 {
1707 unsigned int numInputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIInputSlots().size());
1708 unsigned int numOutputSlots1 = armnn::numeric_cast<unsigned int>(subgraphs[0]->GetIOutputSlots().size());
1709
1710 unsigned int numInputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIInputSlots().size());
1711 unsigned int numOutputSlots2 = armnn::numeric_cast<unsigned int>(subgraphs[1]->GetIOutputSlots().size());
1712
1713 // Save sub-graph connections for comparison after substitution
1714 IOutputSlot* subgraph1InputConn = subgraphs[0]->GetIInputSlot(0)->GetConnection();
1715 IInputSlot* subgraph1OutputConn1 = subgraphs[0]->GetIOutputSlot(0)->GetConnection(0);
1716 IInputSlot* subgraph1OutputConn2 = subgraphs[0]->GetIOutputSlot(1)->GetConnection(0);
1717
1718 // Save sub-graph connections for comparison after substitution
1719 IOutputSlot* subgraph2InputConn1 = subgraphs[1]->GetIInputSlot(0)->GetConnection();
1720 IOutputSlot* subgraph2InputConn2 = subgraphs[1]->GetIInputSlot(1)->GetConnection();
1721 IInputSlot* subgraph2OutputConn = subgraphs[1]->GetIOutputSlot(0)->GetConnection(0);
1722
1723 PreCompiledDescriptor preCompiledDescriptor1(numInputSlots1, numOutputSlots1);
1724 Layer* const preCompiledLayer1 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor1, "pre-compiled1");
1725
1726 PreCompiledDescriptor preCompiledDescriptor2(numInputSlots2, numOutputSlots2);
1727 Layer* const preCompiledLayer2 = graph.AddLayer<PreCompiledLayer>(preCompiledDescriptor2, "pre-compiled2");
1728
1729 // Substitute sub-graph with pre-compiled layer
1730 graph.SubstituteSubgraph(*subgraphs[0], preCompiledLayer1);
1731 graph.SubstituteSubgraph(*subgraphs[1], preCompiledLayer2);
1732
1733 // Check that connections are correct after substitution
1734 CHECK_EQ(preCompiledLayer1->GetInputSlot(0).GetConnection(), subgraph1InputConn);
1735 CHECK_EQ(preCompiledLayer1->GetOutputSlot(0).GetConnection(0), subgraph1OutputConn1);
1736 CHECK_EQ(preCompiledLayer1->GetOutputSlot(1).GetConnection(0), subgraph1OutputConn2);
1737
1738 CHECK_EQ(preCompiledLayer2->GetInputSlot(0).GetConnection(), subgraph2InputConn1);
1739 CHECK_EQ(preCompiledLayer2->GetInputSlot(1).GetConnection(), subgraph2InputConn2);
1740 CHECK_EQ(preCompiledLayer2->GetOutputSlot(0).GetConnection(0), subgraph2OutputConn);
1741 }
1742 }
1743 }
1744
1745 TEST_CASE("SubgraphCycles")
1746 {
1747 // This case represent the scenario when a naive split could lead to a cyclic dependency between two subgraphs
1748 //
1749 // X0 -> M0 -> X1 -> M2 -> X2
1750 // X0 -> M0 -> M1 -> M2 -> X2
1751 //
1752 /*
1753 X0
1754 |
1755 |
1756 M0
1757 / |
1758 / |
1759 X1 M1
1760 \ /
1761 M2
1762 |
1763 X2
1764 */
1765 // The expected result for this is that M0,M1 will be part of one subgraph and M2 in another and the
1766 // input and output slots in the subgraphs will be set accordingly.
1767 //
1768 Graph graph;
1769
1770 OriginsDescriptor originsDescriptor(2);
1771 auto x0 = graph.AddLayer<InputLayer>(0, "x0");
1772 auto m0 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m0");
1773 auto x1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x1");
1774 auto m1 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "m1");
1775 auto m2 = graph.AddLayer<AdditionLayer>("m2");
1776 auto x2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "x2");
1777
1778 x0->GetOutputSlot(0).Connect(m0->GetInputSlot(0));
1779 m0->GetOutputSlot(0).Connect(x1->GetInputSlot(0));
1780 m0->GetOutputSlot(0).Connect(m1->GetInputSlot(0));
1781 x1->GetOutputSlot(0).Connect(m2->GetInputSlot(0));
1782 m1->GetOutputSlot(0).Connect(m2->GetInputSlot(1));
1783 m2->GetOutputSlot(0).Connect(x2->GetInputSlot(0));
1784
1785 // All selected 'M*' layers will be have 'm' in the name
1786 SubgraphViewSelector::Subgraphs subgraphs =
1787 SubgraphViewSelector::SelectSubgraphs(
1788 graph,
1789 // select the middle layers only
1790 [](const Layer & l)
__anonc2781dcd1402(const Layer & l) 1791 {
1792 bool toSelect = (l.GetNameStr().find('m') != std::string::npos);
1793 return toSelect;
1794 });
1795
1796 // expected results to test against
1797 auto inputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m0}),
1798 CreateOutputsFrom({m0, m1}),
1799 {m0, m1});
1800
1801 auto outputSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({m2}),
1802 CreateOutputsFrom({m2}),
1803 {m2});
1804
1805 CHECK(subgraphs.size() == 2);
1806 if (subgraphs.size() == 2)
1807 {
1808 // we need to have valid subgraph pointers here
1809 CHECK((subgraphs[0] != nullptr));
1810 CHECK((subgraphs[1] != nullptr));
1811 CompareSubgraphViews(subgraphs[0], inputSubgraph);
1812 CompareSubgraphViews(subgraphs[1], outputSubgraph);
1813 }
1814 }
1815
1816 TEST_CASE("SubgraphOrder")
1817 {
1818 Graph graph;
1819
1820 auto input = graph.AddLayer<InputLayer>(0, "Input");
1821 auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1822 auto output = graph.AddLayer<OutputLayer>(1, "Output");
1823
1824 input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1825 activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1826
1827 //Add in out of order
1828 auto view = CreateSubgraphViewFrom({},
1829 {},
1830 {output, input, activation});
1831
1832 // Check the layers are sorted topologically in the view
1833 int idx=0;
1834 LayerType expectedSorted[] = {LayerType::Input, LayerType::Activation, LayerType::Output};
1835 view->ForEachLayer([&idx, &expectedSorted](const Layer* l)
__anonc2781dcd1502(const Layer* l) 1836 {
1837 CHECK((expectedSorted[idx] == l->GetType()));
1838 idx++;
1839 }
1840 );
1841 }
1842
1843 TEST_CASE("SubgraphViewWorkingCopy")
1844 {
1845 Graph graph;
1846
1847 auto input = graph.AddLayer<InputLayer>(0, "Input");
1848 auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
1849 auto output = graph.AddLayer<OutputLayer>(1, "Output");
1850
1851 input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
1852 activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1853
1854 //Add in out of order
1855 auto view = CreateSubgraphViewFrom({output, input, activation},
1856 {},
1857 {});
1858
1859 SubgraphView workingCopy = view->GetWorkingCopy();
1860
1861 // Check the layers are sorted topologically in the view
1862 int idx=0;
1863 LayerType expectedSorted[] = {LayerType::Input, LayerType::Activation, LayerType::Output};
1864 workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
__anonc2781dcd1602(const IConnectableLayer* l) 1865 {
1866 CHECK((expectedSorted[idx] == l->GetType()));
1867 idx++;
1868 }
1869 );
1870 }
1871
ReplaceConstantMultiplicationWithDepthwise(SubgraphView & subgraph,IConnectableLayer * layer)1872 bool ReplaceConstantMultiplicationWithDepthwise(SubgraphView& subgraph,
1873 IConnectableLayer* layer)
1874 {
1875 if (layer->GetType() == LayerType::Multiplication)
1876 {
1877 IInputSlot* patternSubgraphInput = &layer->GetInputSlot(0);
1878 IInputSlot* patternSubgraphConstant = &layer->GetInputSlot(1);
1879
1880 const IConnectableLayer* inputLayer = &patternSubgraphInput->GetConnection()->GetOwningIConnectableLayer();
1881 const IConnectableLayer* constantLayer = &layer->GetInputSlot(1).GetConnection()->GetOwningIConnectableLayer();
1882
1883 // Figure out which of the two inputs is the constant
1884 if (constantLayer->GetType() != LayerType::Constant)
1885 {
1886 std::swap(patternSubgraphInput, patternSubgraphConstant);
1887 std::swap(inputLayer, constantLayer);
1888 }
1889
1890 if (constantLayer->GetType() == LayerType::Constant)
1891 {
1892 const TensorInfo& inputInfo = inputLayer->GetOutputSlot(0).GetTensorInfo();
1893 const TensorInfo& constInfo = constantLayer->GetOutputSlot(0).GetTensorInfo();
1894
1895 // Add a Depthwise only where the constant input is a scalar that takes the form { 1, 1, 1, C }.
1896 // The scalar is used as weights for the convolution.
1897 if (constInfo.GetShape() == TensorShape({ 1, 1, 1, inputInfo.GetShape()[3] }))
1898 {
1899 auto replacementGraph = INetwork::Create();
1900
1901 DepthwiseConvolution2dDescriptor desc;
1902 desc.m_DataLayout = DataLayout::NHWC;
1903
1904 TensorInfo weightInfo = constInfo;
1905 const TensorInfo& outputInfo = layer->GetOutputSlot(0).GetTensorInfo();
1906 unsigned int M = outputInfo.GetShape()[3] / inputInfo.GetShape()[3];
1907 ARMNN_ASSERT_MSG(M == 1, "Constant multiplication only support 1x1x1xC, so M should always be 1 here");
1908 weightInfo.SetShape({ 1, 1, 1, constInfo.GetShape()[3] * M }); //1HW(I*M)
1909
1910 const void* weightData = PolymorphicPointerDowncast<const ConstantLayer>(constantLayer)
1911 ->m_LayerOutput->GetConstTensor<void>();
1912 TensorInfo weightsInfo = constInfo;
1913 ConstTensor weights(weightsInfo, weightData);
1914
1915 const auto depthwiseLayer = replacementGraph->AddDepthwiseConvolution2dLayer(
1916 desc, "Replacement for Constant-Multiplication");
1917
1918 auto& outslot = layer->GetOutputSlot(0);
1919 SubgraphView::IOutputSlots outputs{ &outslot };
1920 SubgraphView::IConnectableLayers layers;
1921 layers.push_back(layer);
1922 layers.push_back(const_cast<IConnectableLayer*>(constantLayer));
1923
1924 SubgraphView patternSubgraph(std::move(layers),
1925 {patternSubgraphInput, patternSubgraphConstant},
1926 {&layer->GetOutputSlot(0)});
1927
1928 subgraph.SubstituteSubgraph(patternSubgraph, depthwiseLayer );
1929
1930 return true;
1931 }
1932 }
1933 }
1934 return false;
1935 }
1936
ReplaceTestMultiplication(SubgraphView & subgraph,IConnectableLayer * layer)1937 bool ReplaceTestMultiplication(SubgraphView& subgraph,
1938 IConnectableLayer* layer)
1939 {
1940 if (layer->GetType() == LayerType::Multiplication)
1941 {
1942
1943 switch (layer->GetType())
1944 {
1945 case LayerType::Multiplication:
1946 return ReplaceConstantMultiplicationWithDepthwise(subgraph, layer);
1947 break;
1948 default:
1949 throw Exception("Found unknown MultiplicationSupportedMode value");
1950 break;
1951 }
1952 }
1953 return false;
1954 }
1955
ReplaceUnsupportedLayers(SubgraphView & subgraph)1956 void ReplaceUnsupportedLayers(SubgraphView& subgraph)
1957 {
1958 using ReplacementFunc = bool (*)(SubgraphView&, IConnectableLayer*);
1959 const ReplacementFunc replacementFuncs[] = {
1960 &ReplaceTestMultiplication,
1961 };
1962
1963 subgraph.ForEachLayer([replacementFuncs, &subgraph](IConnectableLayer* layer)
1964 {
1965 auto madeChange = false;
1966 for (const ReplacementFunc f : replacementFuncs)
1967 {
1968 madeChange = f(subgraph, layer);
1969 if (madeChange)
1970 {
1971 goto nextIteration;
1972 }
1973 }
1974 nextIteration:;
1975 }
1976 );
1977 }
1978
1979 TEST_CASE("SubgraphViewWorkingCopyReplacementFunc")
1980 {
1981 Graph graph;
1982
1983 const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
1984 const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
1985 const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
1986
1987 std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
1988 std::iota(constData.begin(), constData.end(), 0);
1989 ConstTensor constTensor(constInfo, constData);
1990
1991 // Add the original pattern
1992 IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
1993 auto constant = graph.AddLayer<ConstantLayer>("const");
1994
1995 constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
1996 IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
1997 IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
1998
1999 // Create connections between layers
2000 input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2001 constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2002 mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2003
2004 input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2005 constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2006 mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2007
2008 //Add in out of order
2009 auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2010 {},
2011 {});
2012
2013 SubgraphView workingCopy = view->GetWorkingCopy();
2014
2015 // Check the WorkingCopy is as expected before replacement
2016 CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2017 int idx=0;
2018 LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2019 workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
__anonc2781dcd1802(const IConnectableLayer* l) 2020 {
2021 CHECK((expectedSorted[idx] == l->GetType()));
2022 idx++;
2023 }
2024 );
2025
2026 // Replace Multiplication and Constant with Depthwise
2027 ReplaceUnsupportedLayers(workingCopy);
2028
2029 // Check the layers are as expected
2030 CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2031 idx=0;
2032 LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2033 workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
__anonc2781dcd1902(const IConnectableLayer* l) 2034 {
2035 CHECK((expectedSortedReplaced[idx] == l->GetType()));
2036 idx++;
2037 }
2038 );
2039 }
2040
2041 TEST_CASE("SubgraphViewWorkingCopySubstituteSubgraph")
2042 {
2043 Graph graph;
2044
2045 auto input = graph.AddLayer<InputLayer>(0, "Input");
2046 auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
2047 auto output = graph.AddLayer<OutputLayer>(1, "Output");
2048
2049 input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
2050 activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2051
2052 //Add in out of order
2053 auto view = CreateSubgraphViewFrom({output, input, activation},
2054 {},
2055 {});
2056
2057 // Check SubstituteSubgraphView throws when called on original SubgraphView
2058 SubgraphView temp(input);
2059 CHECK_THROWS_AS(view->SubstituteSubgraph(temp, input), NullPointerException);
2060
2061 // Check that GetWorkingCopy() being called on a working copy throws an exception
2062 auto workingCopy = view->GetWorkingCopy();
2063 CHECK_THROWS_AS(workingCopy.GetWorkingCopy(), Exception);
2064 }
2065
2066 TEST_CASE("SubgraphViewPartialWorkingCopySubstituteSubgraph")
2067 {
2068 Graph graph;
2069
2070 auto input = graph.AddLayer<InputLayer>(0, "Input");
2071 auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
2072 auto output = graph.AddLayer<OutputLayer>(1, "Output");
2073
2074 input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
2075 activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2076
2077 //Add in out of order
2078 auto view = CreateSubgraphViewFrom({activation},
2079 {&activation->GetInputSlot(0)},
2080 {&activation->GetOutputSlot(0)});
2081
2082 auto workingCopy = view->GetWorkingCopy();
2083
2084 // First (and only) layer in the subgraph is the Activation
2085 CHECK(std::string((*workingCopy.beginIConnectable())->GetName()) == "Activation");
2086
2087 // Substitute the "Activation" layer for an equivalent layer
2088 auto activation2 = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation2");
2089 SubgraphView pattern(*workingCopy.beginIConnectable());
2090 workingCopy.SubstituteSubgraph(pattern, activation2);
2091
2092 CHECK(std::string((*workingCopy.beginIConnectable())->GetName()) == "Activation2");
2093 }
2094
2095 // Workaround function used to get the original OutputSlot connected to the InputSlot of a SubgraphView
2096 // As working copy SubgraphViews do not have connections on boundary it finds the corresponding InputSlot
2097 // on the Original SubgraphView and then returns the OutputSlot connected to it.
2098 // Using this function to test against the simpler API: SubgraphView::GetOriginalInputSlots().
GetConnection(IInputSlot * inputSlot,const SubgraphView & workingCopy,const SubgraphView & original)2099 const IOutputSlot* GetConnection(IInputSlot* inputSlot,
2100 const SubgraphView& workingCopy,
2101 const SubgraphView& original)
2102 {
2103 const IOutputSlot* res = inputSlot->GetConnection();
2104 if (res)
2105 {
2106 return res;
2107 }
2108
2109 const SubgraphView::IInputSlots& workingCopyInputSlots = workingCopy.GetIInputSlots();
2110 const SubgraphView::IInputSlots& originalInputSlots = original.GetIInputSlots();
2111 for (SubgraphView::InputSlots::size_type i = 0; i < workingCopyInputSlots.size(); i++)
2112 {
2113 if (workingCopyInputSlots[i] == inputSlot)
2114 {
2115 return originalInputSlots[i]->GetConnection();
2116 }
2117 }
2118 return nullptr;
2119 }
2120
2121 // Workaround function used to get the original InputSlot connected to the OutputSlot of a SubgraphView
2122 // As working copy SubgraphViews do not have connections on boundary it finds the corresponding OutputSlot
2123 // on the Original SubgraphView and then returns the InputSlot connected to it using index parameter.
2124 // Using this function to test against the simpler API: SubgraphView::GetOriginalOutputSlots().
GetConnection(IOutputSlot * outputSlot,unsigned int index,const SubgraphView & workingCopy,const SubgraphView & original)2125 const IInputSlot* GetConnection(IOutputSlot* outputSlot,
2126 unsigned int index,
2127 const SubgraphView& workingCopy,
2128 const SubgraphView& original)
2129 {
2130 const IInputSlot* res;
2131 // Check within range
2132 if (index < outputSlot->GetNumConnections() && outputSlot->GetNumConnections() > 0)
2133 {
2134 res = outputSlot->GetConnection(index);
2135 return res;
2136 }
2137
2138 const SubgraphView::IOutputSlots& workingCopyOutputSlots = workingCopy.GetIOutputSlots();
2139 const SubgraphView::IOutputSlots& originalOutputSlots = original.GetIOutputSlots();
2140 for (SubgraphView::OutputSlots::size_type i = 0; i < workingCopyOutputSlots.size(); i++)
2141 {
2142 if (workingCopyOutputSlots[i] == outputSlot)
2143 {
2144 // Check within range
2145 if (index < originalOutputSlots[i]->GetNumConnections() && originalOutputSlots[i]->GetNumConnections() > 0)
2146 {
2147 return originalOutputSlots[i]->GetConnection(index);
2148 }
2149 }
2150 }
2151 return nullptr;
2152 }
2153
CheckOutOfScopeWorkingCopy()2154 SubgraphView CheckOutOfScopeWorkingCopy()
2155 {
2156 Graph graph;
2157
2158 auto input = graph.AddLayer<InputLayer>(0, "Input");
2159 auto activation = graph.AddLayer<ActivationLayer>(ActivationDescriptor{}, "Activation");
2160 auto output = graph.AddLayer<OutputLayer>(1, "Output");
2161
2162 input->GetOutputSlot(0).Connect(activation->GetInputSlot(0));
2163 activation->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2164
2165 //Add in out of order
2166 auto shared = CreateSubgraphViewFrom({activation},
2167 {&activation->GetInputSlot(0)},
2168 {&activation->GetOutputSlot(0)});
2169
2170 auto workingCopy = shared->GetWorkingCopy();
2171
2172 // Check InputSlots are same as original
2173 auto boundaryOutputSlot = GetConnection(workingCopy.GetIInputSlots()[0], workingCopy, *shared);
2174 CHECK(boundaryOutputSlot);
2175
2176 auto inputSlots = workingCopy.GetOriginalInputSlots();
2177 CHECK(inputSlots[0]->GetConnection() == boundaryOutputSlot);
2178
2179 // Check OutputSlots are same as original
2180 auto boundaryInputSlot = GetConnection(workingCopy.GetIOutputSlots()[0], 0U, workingCopy, *shared);
2181 CHECK(boundaryInputSlot);
2182
2183 auto outputSlots = workingCopy.GetOriginalOutputSlots();
2184 CHECK(outputSlots[0]->GetConnection(0) == boundaryInputSlot);
2185
2186 return workingCopy;
2187 }
2188
2189 TEST_CASE("SubgraphViewWorkingCopyOriginalSlots")
2190 {
2191 auto result = CheckOutOfScopeWorkingCopy();
2192 auto outputSlots = result.GetOriginalOutputSlots();
2193 }
2194
2195 TEST_CASE("SubgraphViewWorkingCopyOptimizationViews")
2196 {
2197 Graph graph;
2198
2199 const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2200 const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2201 const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2202
2203 std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2204 std::iota(constData.begin(), constData.end(), 0);
2205 ConstTensor constTensor(constInfo, constData);
2206
2207 // Add the original pattern
2208 IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2209 auto constant = graph.AddLayer<ConstantLayer>("const");
2210
2211 constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2212 IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2213 IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2214
2215 // Create connections between layers
2216 input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2217 constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2218 mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2219
2220 input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2221 constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2222 mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2223
2224 //Add in out of order
2225 auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2226 {},
2227 {});
2228
2229 SubgraphView workingCopy = view->GetWorkingCopy();
2230
2231 // Check the WorkingCopy is as expected before replacement
2232 int idx=0;
2233 LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2234 workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
__anonc2781dcd1a02(const IConnectableLayer* l) 2235 {
2236 CHECK((expectedSorted[idx] == l->GetType()));
2237 idx++;
2238 }
2239 );
2240
2241 // Replace Multiplication and Constant with Depthwise
2242 ReplaceUnsupportedLayers(workingCopy);
2243
2244 // Check the layers are as expected
2245 idx=0;
2246 LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2247 workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
__anonc2781dcd1b02(const IConnectableLayer* l) 2248 {
2249 CHECK((expectedSortedReplaced[idx] == l->GetType()));
2250 idx++;
2251 }
2252 );
2253
2254
2255 // At this stage NPU would take the working copy and create CompiledBlocPtr with it.
2256
2257 // We will just check that the procompiledLayer can still be added to the optimizationViews via a SubgraphView.
2258 OptimizationViews optimizationViews;
2259
2260 CompiledBlobPtr ptr;
2261 IConnectableLayer* preCompiledLayer = optimizationViews.GetINetwork()->AddPrecompiledLayer(
2262 PreCompiledDescriptor(view->GetNumInputSlots(), view->GetNumOutputSlots()),
2263 std::move(ptr),
2264 EmptyOptional(),
2265 "pre-compiled");
2266
2267
2268 optimizationViews.AddSubstitution({ *view, SubgraphView(preCompiledLayer) });
2269 CHECK(optimizationViews.Validate(*view));
2270 }
2271
2272 TEST_CASE("SubgraphViewWorkingCopyReplaceSlots")
2273 {
2274 Graph graph;
2275 const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2276 const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2277 const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2278
2279 std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2280 std::iota(constData.begin(), constData.end(), 0);
2281 ConstTensor constTensor(constInfo, constData);
2282
2283 // Add the original pattern
2284 IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2285 auto constant = graph.AddLayer<ConstantLayer>("const");
2286
2287 constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2288 IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2289 IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2290
2291 // Create connections between layers
2292 input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2293 constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2294 mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2295
2296 input->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2297 constant->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2298 mul->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2299
2300 auto view = CreateSubgraphViewFrom({output, input, mul, constant},
2301 CreateIInputsFrom({mul}),
2302 CreateIOutputsFrom({mul}));
2303
2304 SubgraphView workingCopy = view->GetWorkingCopy();
2305
2306 // Check the WorkingCopy is as expected before replacement
2307 CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2308 int idx=0;
2309 LayerType expectedSorted[] = {LayerType::Input, LayerType::Constant, LayerType::Multiplication, LayerType::Output};
2310 workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
__anonc2781dcd1c02(const IConnectableLayer* l) 2311 {
2312 CHECK((expectedSorted[idx] == l->GetType()));
2313 idx++;
2314 }
2315 );
2316
2317 // Replace Multiplication and Constant with Depthwise
2318 ReplaceUnsupportedLayers(workingCopy);
2319
2320 // Check the layers are as expected
2321 idx=0;
2322 LayerType expectedSortedReplaced[] = {LayerType::Input, LayerType::DepthwiseConvolution2d, LayerType::Output};
2323 CHECK(workingCopy.GetIConnectableLayers().size() == 3);
2324 workingCopy.ForEachIConnectableLayer([&idx, &expectedSortedReplaced](const IConnectableLayer* l)
__anonc2781dcd1d02(const IConnectableLayer* l) 2325 {
2326 CHECK((expectedSortedReplaced[idx] == l->GetType()));
2327 idx++;
2328 }
2329 );
2330 }
2331
2332 TEST_CASE("SubgraphViewWorkingCopyCloneInputAndOutputSlots")
2333 {
2334 Graph graph;
2335
2336 const TensorInfo inputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2337 const TensorInfo constInfo({ 1, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0, true);
2338 const TensorInfo outputInfo({ 1, 8, 8, 16 }, DataType::QAsymmU8, 1.0f, 0);
2339
2340 std::vector<uint8_t> constData(constInfo.GetNumElements(), 0);
2341 std::iota(constData.begin(), constData.end(), 0);
2342 ConstTensor constTensor(constInfo, constData);
2343
2344 // Add the original pattern
2345 IConnectableLayer* input = graph.AddLayer<InputLayer>(0, "input");
2346 auto constant = graph.AddLayer<ConstantLayer>("const");
2347
2348 constant->m_LayerOutput = std::make_shared<ScopedTensorHandle>(constTensor);
2349 IConnectableLayer* mul = graph.AddLayer<MultiplicationLayer>("mul");
2350 armnn::ViewsDescriptor splitterDesc(2,4);
2351 IConnectableLayer* split = graph.AddLayer<SplitterLayer>(splitterDesc, "split");
2352 IConnectableLayer* abs = graph.AddLayer<ActivationLayer>(ActivationFunction::Abs, "abs");
2353 IConnectableLayer* relu = graph.AddLayer<ActivationLayer>(ActivationFunction::ReLu, "relu");
2354 armnn::OriginsDescriptor concatDesc(2, 4);
2355 IConnectableLayer* concat = graph.AddLayer<ConcatLayer>(concatDesc, "constant");
2356 IConnectableLayer* output = graph.AddLayer<OutputLayer>(0, "output");
2357
2358 // Create connections between layers
2359 input->GetOutputSlot(0).SetTensorInfo(inputInfo);
2360 constant->GetOutputSlot(0).SetTensorInfo(constInfo);
2361 mul->GetOutputSlot(0).SetTensorInfo(outputInfo);
2362
2363 input->GetOutputSlot(0).Connect(mul->GetInputSlot(1));
2364 constant->GetOutputSlot(0).Connect(mul->GetInputSlot(0));
2365 mul->GetOutputSlot(0).Connect(split->GetInputSlot(0));
2366 split->GetOutputSlot(0).Connect(abs->GetInputSlot(0));
2367 split->GetOutputSlot(1).Connect(relu->GetInputSlot(0));
2368 abs->GetOutputSlot(0).Connect(concat->GetInputSlot(0));
2369 relu->GetOutputSlot(0).Connect(concat->GetInputSlot(1));
2370 concat->GetOutputSlot(0).Connect(output->GetInputSlot(0));
2371
2372 // constant input //
2373 // \ / //
2374 // mul //
2375 // | //
2376 // splitter //
2377 // / \ //
2378 // abs relu //
2379 // \ / //
2380 // concat //
2381 // | //
2382 // output //
2383 // //
2384 // SubgraphView layers: constant mul splitter abs
2385
2386 // Add just the InputSlot connected to the InputLayer to the SubgraphView's InputSlots
2387 SubgraphView::IInputSlots inputSlots;
2388 inputSlots.push_back(&mul->GetInputSlot(1));
2389
2390 // Add just the OutputSlot connected to the splitter and abs to the SubgraphView's InputSlots
2391 SubgraphView::IOutputSlots outputSlots;
2392 outputSlots.push_back(&split->GetOutputSlot(1));
2393 outputSlots.push_back(&abs->GetOutputSlot(0));
2394
2395 //Add in out of order
2396 auto view = CreateSubgraphViewFrom({constant, mul, split, abs},
2397 std::move(inputSlots),
2398 std::move(outputSlots));
2399
2400 SubgraphView workingCopy = view->GetWorkingCopy();
2401
2402 // Check that only 1 input slot is added.
2403 CHECK(workingCopy.GetIInputSlots().size() == 1);
2404 CHECK(workingCopy.GetIInputSlots()[0]->GetSlotIndex() == 1);
2405
2406 CHECK(workingCopy.GetIOutputSlots().size() == 2);
2407 CHECK(workingCopy.GetIOutputSlots()[0]->GetOwningIConnectableLayer().GetType() == armnn::LayerType::Splitter);
2408 CHECK(workingCopy.GetIOutputSlots()[1]->GetOwningIConnectableLayer().GetType() == armnn::LayerType::Activation);
2409
2410 // Check the WorkingCopy is as expected before replacement
2411 CHECK(workingCopy.GetIConnectableLayers().size() == 4);
2412 int idx=0;
2413 LayerType expectedSorted[] = {LayerType::Constant,
2414 LayerType::Multiplication,
2415 LayerType::Splitter,
2416 LayerType::Activation};
2417 workingCopy.ForEachIConnectableLayer([&idx, &expectedSorted](const IConnectableLayer* l)
__anonc2781dcd1e02(const IConnectableLayer* l) 2418 {
2419 CHECK((expectedSorted[idx] == l->GetType()));
2420 idx++;
2421 }
2422 );
2423 }
2424
2425 TEST_CASE("MultipleOutputSlotsSubstituteGraph")
2426 {
2427 // Construct graph //
2428 // //
2429 // input //
2430 // | //
2431 // splitter //
2432 // / \ //
2433 // conv2d output //
2434 // | //
2435 // output //
2436 // //
2437 // SubgraphView layers: splitter conv2d
2438
2439 Graph graph;
2440 Layer* inputLayer = graph.AddLayer<InputLayer>(0, "input");
2441
2442 SplitterDescriptor splitDescriptor(2,4);
2443 Convolution2dDescriptor convDescriptor;
2444 Layer* splitLayer = graph.AddLayer<SplitterLayer>(splitDescriptor, "split");
2445 Layer* convLayer = graph.AddLayer<Convolution2dLayer>(convDescriptor, "conv");
2446
2447 Layer* outputLayer1 = graph.AddLayer<OutputLayer>(0, "output1");
2448 Layer* outputLayer2 = graph.AddLayer<OutputLayer>(1, "output2");
2449
2450 inputLayer->GetOutputSlot(0).Connect(splitLayer->GetInputSlot(0));
2451 splitLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2452 splitLayer->GetOutputSlot(1).Connect(outputLayer1->GetInputSlot(0));
2453 convLayer->GetOutputSlot(0).Connect(outputLayer2->GetInputSlot(0));
2454
2455 // main subgraph creation
2456 SubgraphView::IInputSlots inputSlots = {&splitLayer->GetInputSlot(0)};
2457 SubgraphView::IOutputSlots outputSlots = {&splitLayer->GetOutputSlot(1), &convLayer->GetOutputSlot(0)};
2458 auto view = CreateSubgraphViewFrom({splitLayer, convLayer},
2459 std::move(inputSlots),
2460 std::move(outputSlots));
2461
2462 // substitute subgraph creation
2463 OptimizationViews optimizationViews;
2464 CompiledBlobPtr ptr;
2465 IConnectableLayer* preCompiledLayer =
2466 optimizationViews.GetINetwork()->AddPrecompiledLayer(PreCompiledDescriptor(),
2467 std::move(ptr),
2468 EmptyOptional(),
2469 "pre-compiled");
2470
2471 auto substituteSubgraph = CreateSubgraphViewFrom({preCompiledLayer},
2472 {&preCompiledLayer->GetInputSlot(0)},
2473 {&preCompiledLayer->GetOutputSlot(0)});
2474
2475 // need to call GetWorkingCopy() in order for SubstituteSubgraph() to work later on
2476 SubgraphView viewCopy = view->GetWorkingCopy();
2477 IConnectableLayer* convCopyLayer = nullptr;
2478 IOutputSlot* splitOutputSlot = nullptr;
2479 for (auto layer : viewCopy.GetIConnectableLayers())
2480 {
2481 // GetWorkingCopy() has caused address pointer of spliter output slot to change.
2482 // Finding new address pointer...
2483 if (layer->GetType() == LayerType::Splitter)
2484 {
2485 splitOutputSlot = &layer->GetOutputSlot(1);
2486 }
2487
2488 // GetWorkingCopy() has caused address pointer of convolution layer to change.
2489 // Finding new address pointer...
2490 if (layer->GetType() == LayerType::Convolution2d)
2491 {
2492 convCopyLayer = layer;
2493 }
2494 }
2495
2496 // pattern subgraph creation
2497 SubgraphView::SubgraphViewPtr subgraph =
2498 CreateSubgraphViewFrom({convCopyLayer},
2499 {&convCopyLayer->GetInputSlot(0)},
2500 {&convCopyLayer->GetOutputSlot(0)});
2501
2502 // main substitute subgraph calculation
2503 viewCopy.SubstituteSubgraph(*subgraph, *substituteSubgraph);
2504
2505 // expecting convolution output slot to be changed with precompiled output slot
2506 // splitOutputSlot MUST remain as an expected output slot
2507 SubgraphView::IOutputSlots expectedOutputSlots = {splitOutputSlot,
2508 &preCompiledLayer->GetOutputSlot(0)};
2509
2510 CHECK(expectedOutputSlots == viewCopy.GetIOutputSlots());
2511 }
2512
2513 TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraph")
2514 {
2515 // Construct graph //
2516 // //
2517 // input //
2518 // | //
2519 // conv2d //
2520 // | //
2521 // const relu //
2522 // \ / \ //
2523 // add output //
2524 // | //
2525 // output //
2526 // //
2527 // SubgraphView layers: conv2d relu add const
2528
2529 Graph graph;
2530 Layer* inputLayer = graph.AddLayer<InputLayer>(0, "input");
2531
2532 Layer* convLayer = graph.AddLayer<Convolution2dLayer>(Convolution2dDescriptor(), "conv");
2533 Layer* reluLayer = graph.AddLayer<ActivationLayer>(ActivationDescriptor(), "activation");
2534 Layer* constLayer = graph.AddLayer<ConstantLayer>("const");
2535 Layer* addLayer = graph.AddLayer<AdditionLayer>("add");
2536
2537 Layer* outputLayer1 = graph.AddLayer<OutputLayer>(0, "output1");
2538 Layer* outputLayer2 = graph.AddLayer<OutputLayer>(1, "output2");
2539
2540 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2541 convLayer->GetOutputSlot(0).Connect(reluLayer->GetInputSlot(0));
2542 constLayer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
2543 reluLayer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
2544 reluLayer->GetOutputSlot(0).Connect(outputLayer1->GetInputSlot(0));
2545 addLayer->GetOutputSlot(0).Connect(outputLayer2->GetInputSlot(0));
2546
2547 // main subgraph creation
2548 SubgraphView::IInputSlots inputSlots = {&convLayer->GetInputSlot(0)};
2549 SubgraphView::IOutputSlots outputSlots = {&reluLayer->GetOutputSlot(0), &addLayer->GetOutputSlot(0)};
2550 auto view = CreateSubgraphViewFrom({convLayer, reluLayer, addLayer, constLayer},
2551 std::move(inputSlots),
2552 std::move(outputSlots));
2553
2554 // substitute subgraph creation
2555 OptimizationViews optimizationViews;
2556 IConnectableLayer* standInLayer = optimizationViews.GetINetwork()->AddStandInLayer(StandInDescriptor(1,1),
2557 "standin");
2558
2559 auto substituteSubgraph = CreateSubgraphViewFrom({standInLayer},
2560 {&standInLayer->GetInputSlot(0)},
2561 {&standInLayer->GetOutputSlot(0)});
2562
2563 // need to call GetWorkingCopy() in order for SubstituteSubgraph() to work later on
2564 SubgraphView viewCopy = view->GetWorkingCopy();
2565 IConnectableLayer* addCopyLayer = nullptr;
2566 IInputSlot* convInputSlot = nullptr;
2567 IOutputSlot* activationOutputSlot = nullptr;
2568 for (auto layer : viewCopy.GetIConnectableLayers())
2569 {
2570 // GetWorkingCopy() has caused address pointer of convolution2d input slot to change.
2571 // Finding new address pointer...
2572 if (layer->GetType() == LayerType::Convolution2d)
2573 {
2574 convInputSlot = &layer->GetInputSlot(0);
2575 }
2576
2577 // GetWorkingCopy() has caused address pointer of activation output slot to change.
2578 // Finding new address pointer...
2579 if (layer->GetType() == LayerType::Activation)
2580 {
2581 activationOutputSlot = &layer->GetOutputSlot(0);
2582 }
2583
2584 // GetWorkingCopy() has caused address pointer of convolution layer to change.
2585 // Finding new address pointer...
2586 if (layer->GetType() == LayerType::Addition)
2587 {
2588 addCopyLayer = layer;
2589 }
2590 }
2591
2592 // pattern subgraph creation
2593 IConnectableLayer* constCopyLayer = &addCopyLayer->GetInputSlot(0).GetConnection()->GetOwningIConnectableLayer();
2594 SubgraphView::SubgraphViewPtr subgraph = CreateSubgraphViewFrom({addCopyLayer, constCopyLayer},
2595 {&addCopyLayer->GetInputSlot(0)},
2596 {&addCopyLayer->GetOutputSlot(0)});
2597
2598 // main substitute subgraph calculation
2599 viewCopy.SubstituteSubgraph(*subgraph, *substituteSubgraph);
2600
2601 // expecting addition output slot to be changed with standin output slot
2602 // activationOutputSlot MUST remain as an expected output slot
2603 SubgraphView::IOutputSlots expectedOutputSlots = {activationOutputSlot,
2604 &standInLayer->GetOutputSlot(0)};
2605
2606 // convInputSlot MUST remain as an expected input slot
2607 SubgraphView::IInputSlots expectedInputSlots = {convInputSlot};
2608
2609 CHECK(expectedOutputSlots == viewCopy.GetIOutputSlots());
2610 CHECK(expectedInputSlots == viewCopy.GetIInputSlots());
2611 }
2612
2613
2614
2615 TEST_CASE("MultipleInputMultipleOutputSlots_SubstituteGraphNewSlots")
2616 {
2617 // Construct graph //
2618 // //
2619 // input //
2620 // | //
2621 // conv2d //
2622 // | //
2623 // const relu //
2624 // \ / \ //
2625 // add output //
2626 // | //
2627 // output //
2628 // //
2629 // SubgraphView layers: conv2d relu add const
2630
2631 Graph graph;
2632 Layer* inputLayer = graph.AddLayer<InputLayer>(0, "input");
2633
2634 Layer* convLayer = graph.AddLayer<Convolution2dLayer>(Convolution2dDescriptor(), "conv");
2635 Layer* reluLayer = graph.AddLayer<ActivationLayer>(ActivationDescriptor(), "activation");
2636 Layer* constLayer = graph.AddLayer<ConstantLayer>("const");
2637 Layer* addLayer = graph.AddLayer<AdditionLayer>("add");
2638
2639 Layer* outputLayer1 = graph.AddLayer<OutputLayer>(0, "output1");
2640 Layer* outputLayer2 = graph.AddLayer<OutputLayer>(1, "output2");
2641
2642 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
2643 convLayer->GetOutputSlot(0).Connect(reluLayer->GetInputSlot(0));
2644 constLayer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(0));
2645 reluLayer->GetOutputSlot(0).Connect(addLayer->GetInputSlot(1));
2646 reluLayer->GetOutputSlot(0).Connect(outputLayer1->GetInputSlot(0));
2647 addLayer->GetOutputSlot(0).Connect(outputLayer2->GetInputSlot(0));
2648
2649 // main subgraph creation
2650 SubgraphView::IInputSlots inputSlots = {&convLayer->GetInputSlot(0)};
2651 SubgraphView::IOutputSlots outputSlots = {&reluLayer->GetOutputSlot(0), &addLayer->GetOutputSlot(0)};
2652 auto view = CreateSubgraphViewFrom({convLayer, reluLayer, addLayer, constLayer},
2653 std::move(inputSlots),
2654 std::move(outputSlots));
2655
2656 // need to call GetWorkingCopy() in order for SubstituteSubgraph() to work later on
2657 SubgraphView viewCopy = view->GetWorkingCopy();
2658 IConnectableLayer* addCopyLayer = nullptr;
2659 for (auto layer : viewCopy.GetIConnectableLayers())
2660 {
2661 // GetWorkingCopy() has caused address pointer of convolution layer to change.
2662 // Finding new address pointer...
2663 if (layer->GetType() == LayerType::Addition)
2664 {
2665 addCopyLayer = layer;
2666 }
2667 }
2668
2669 // substitute subgraph creation
2670 OptimizationViews optimizationViews;
2671 IConnectableLayer* standInLayer = optimizationViews.GetINetwork()->AddStandInLayer(StandInDescriptor(2,2),
2672 "standin");
2673 // Extra inputSlot (needed explicit use of vector to prevent ambiguity)
2674 auto substituteSubgraph1 = CreateSubgraphViewFrom({standInLayer},
2675 {&standInLayer->GetInputSlot(0),
2676 &standInLayer->GetInputSlot(1)},
2677 std::vector<IOutputSlot*>{&standInLayer->GetOutputSlot(0)});
2678 // Extra outputSlot
2679 auto substituteSubgraph2 = CreateSubgraphViewFrom({standInLayer},
2680 {&standInLayer->GetInputSlot(0)},
2681 std::vector<IOutputSlot*>{&standInLayer->GetOutputSlot(0),
2682 &standInLayer->GetOutputSlot(1)});
2683
2684 // pattern subgraph creation
2685 IConnectableLayer* constCopyLayer = &addCopyLayer->GetInputSlot(0).GetConnection()->GetOwningIConnectableLayer();
2686
2687 // Mismatched number of input slots (needed explicit use of vector to prevent ambiguity)
2688 SubgraphView::SubgraphViewPtr patternSubgraph1 =
2689 CreateSubgraphViewFrom({addCopyLayer, constCopyLayer},
2690 {&addCopyLayer->GetInputSlot(0)},
2691 std::vector<IOutputSlot*>{&addCopyLayer->GetOutputSlot(0)});
2692
2693 // Mismatched number of output slots
2694 SubgraphView::SubgraphViewPtr patternSubgraph2 = CreateSubgraphViewFrom({addCopyLayer, constCopyLayer},
2695 {&addCopyLayer->GetInputSlot(0)},
2696 {&addCopyLayer->GetOutputSlot(0)});
2697
2698
2699
2700
2701 // Ensure that a substitute subgraphView has same number of InputSlots as the pattern subgraphView
2702 CHECK_THROWS_AS(viewCopy.SubstituteSubgraph(*patternSubgraph1, *substituteSubgraph1),
2703 armnn::InvalidArgumentException);
2704
2705 // Ensure that a substitute subgraphView has same number of OutputSlots as the pattern subgraphView
2706 CHECK_THROWS_AS(viewCopy.SubstituteSubgraph(*patternSubgraph2, *substituteSubgraph2),
2707 armnn::InvalidArgumentException);
2708
2709 }
2710
2711 }
2712