1 //
2 // Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include <GraphUtils.hpp>
7
8
9 #include <Network.hpp>
10
11 #include <doctest/doctest.h>
12
13 namespace
14 {
15
AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer & layer)16 bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer)
17 {
18 bool allConnected = true;
19 for (unsigned int i = 0; i < layer.GetNumInputSlots(); ++i)
20 {
21 const bool inputConnected = layer.GetInputSlot(i).GetConnection() != nullptr;
22 allConnected &= inputConnected;
23 }
24 return allConnected;
25 }
26
27 }
28
29 TEST_SUITE("Network")
30 {
31 TEST_CASE("LayerGuids")
32 {
33 armnn::NetworkImpl net;
34 LayerGuid inputId = net.AddInputLayer(0)->GetGuid();
35 LayerGuid addId = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Add)->GetGuid();
36 LayerGuid outputId = net.AddOutputLayer(0)->GetGuid();
37
38 CHECK(inputId != addId);
39 CHECK(addId != outputId);
40 CHECK(inputId != outputId);
41 }
42
43 TEST_CASE("NetworkBasic")
44 {
45 armnn::NetworkImpl net;
46 CHECK(net.PrintGraph() == armnn::Status::Success);
47 }
48
49 TEST_CASE("LayerNamesAreOptionalForINetwork")
50 {
51 armnn::INetworkPtr inet(armnn::INetwork::Create());
52 inet->AddInputLayer(0);
53 inet->AddElementwiseBinaryLayer(armnn::BinaryOperation::Add);
54 inet->AddActivationLayer(armnn::ActivationDescriptor());
55 inet->AddOutputLayer(0);
56 }
57
58 TEST_CASE("LayerNamesAreOptionalForNetwork")
59 {
60 armnn::NetworkImpl net;
61 net.AddInputLayer(0);
62 net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Add);
63 net.AddActivationLayer(armnn::ActivationDescriptor());
64 net.AddOutputLayer(0);
65 }
66
67 TEST_CASE("NetworkModification")
68 {
69 armnn::NetworkImpl net;
70
71 armnn::IConnectableLayer* const inputLayer = net.AddInputLayer(0, "input layer");
72 CHECK(inputLayer);
73
74 unsigned int dims[] = { 10,1,1,1 };
75 std::vector<float> convWeightsData(10);
76 armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32, 0.0f, 0, true), convWeightsData);
77
78 armnn::Convolution2dDescriptor convDesc2d;
79 armnn::IConnectableLayer* const weightsLayer = net.AddConstantLayer(weights, "conv const weights");
80 armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d, "conv layer");
81 CHECK(convLayer);
82 CHECK(weightsLayer);
83
84 inputLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(0));
85 weightsLayer->GetOutputSlot(0).Connect(convLayer->GetInputSlot(1));
86
87 armnn::FullyConnectedDescriptor fullyConnectedDesc;
88
89 // Constant layer that now holds weights data for FullyConnected
90 armnn::IConnectableLayer* const constantWeightsLayer = net.AddConstantLayer(weights, "fc const weights");
91 armnn::IConnectableLayer* const fullyConnectedLayer = net.AddFullyConnectedLayer(fullyConnectedDesc,
92 "fully connected");
93 CHECK(constantWeightsLayer);
94 CHECK(fullyConnectedLayer);
95
96 constantWeightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
97 convLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
98
99 armnn::Pooling2dDescriptor pooling2dDesc;
100 armnn::IConnectableLayer* const poolingLayer = net.AddPooling2dLayer(pooling2dDesc, "pooling2d");
101 CHECK(poolingLayer);
102
103 fullyConnectedLayer->GetOutputSlot(0).Connect(poolingLayer->GetInputSlot(0));
104
105 armnn::ActivationDescriptor activationDesc;
106 armnn::IConnectableLayer* const activationLayer = net.AddActivationLayer(activationDesc, "activation");
107 CHECK(activationLayer);
108
109 poolingLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
110
111 armnn::NormalizationDescriptor normalizationDesc;
112 armnn::IConnectableLayer* const normalizationLayer = net.AddNormalizationLayer(normalizationDesc, "normalization");
113 CHECK(normalizationLayer);
114
115 activationLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
116
117 armnn::SoftmaxDescriptor softmaxDesc;
118 armnn::IConnectableLayer* const softmaxLayer = net.AddSoftmaxLayer(softmaxDesc, "softmax");
119 CHECK(softmaxLayer);
120
121 normalizationLayer->GetOutputSlot(0).Connect(softmaxLayer->GetInputSlot(0));
122
123 armnn::BatchNormalizationDescriptor batchNormDesc;
124
125 armnn::TensorInfo tensorInfo({ 1 }, armnn::DataType::Float32, 0.0f, 0, true);
126 std::vector<float> data(tensorInfo.GetNumBytes() / sizeof(float));
127 armnn::ConstTensor invalidTensor(tensorInfo, data);
128
129 armnn::IConnectableLayer* const batchNormalizationLayer = net.AddBatchNormalizationLayer(batchNormDesc,
130 invalidTensor,
131 invalidTensor,
132 invalidTensor,
133 invalidTensor,
134 "batch norm");
135 CHECK(batchNormalizationLayer);
136
137 softmaxLayer->GetOutputSlot(0).Connect(batchNormalizationLayer->GetInputSlot(0));
138
139 armnn::IConnectableLayer* const additionLayer = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Add,
140 "addition");
141 CHECK(additionLayer);
142
143 batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));
144 batchNormalizationLayer->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(1));
145
146 armnn::IConnectableLayer* const multiplicationLayer = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Mul,
147 "multiplication");
148 CHECK(multiplicationLayer);
149
150 additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(0));
151 additionLayer->GetOutputSlot(0).Connect(multiplicationLayer->GetInputSlot(1));
152
153 armnn::IConnectableLayer* const outputLayer = net.AddOutputLayer(0, "output layer");
154 CHECK(outputLayer);
155
156 multiplicationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
157
158 //Tests that all layers are present in the graph.
159 CHECK(net.GetGraph().GetNumLayers() == 13);
160
161 //Tests that the vertices exist and have correct names.
162 CHECK(GraphHasNamedLayer(net.GetGraph(), "input layer"));
163 CHECK(GraphHasNamedLayer(net.GetGraph(), "conv layer"));
164 CHECK(GraphHasNamedLayer(net.GetGraph(), "conv const weights"));
165 CHECK(GraphHasNamedLayer(net.GetGraph(), "fc const weights"));
166 CHECK(GraphHasNamedLayer(net.GetGraph(), "fully connected"));
167 CHECK(GraphHasNamedLayer(net.GetGraph(), "pooling2d"));
168 CHECK(GraphHasNamedLayer(net.GetGraph(), "activation"));
169 CHECK(GraphHasNamedLayer(net.GetGraph(), "normalization"));
170 CHECK(GraphHasNamedLayer(net.GetGraph(), "softmax"));
171 CHECK(GraphHasNamedLayer(net.GetGraph(), "batch norm"));
172 CHECK(GraphHasNamedLayer(net.GetGraph(), "addition"));
173 CHECK(GraphHasNamedLayer(net.GetGraph(), "multiplication"));
174 CHECK(GraphHasNamedLayer(net.GetGraph(), "output layer"));
175
176 auto checkOneOutputToOneInputConnection = []
177 (const armnn::IConnectableLayer* const srcLayer,
178 const armnn::IConnectableLayer* const tgtLayer,
179 int expectedSrcNumInputs = 1,
180 int expectedDstNumOutputs = 1)
__anon2167314f0202(const armnn::IConnectableLayer* const srcLayer, const armnn::IConnectableLayer* const tgtLayer, int expectedSrcNumInputs = 1, int expectedDstNumOutputs = 1) 181 {
182 CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
183 CHECK(srcLayer->GetNumOutputSlots() == 1);
184 CHECK(tgtLayer->GetNumInputSlots() == 1);
185 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
186
187 CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 1);
188 CHECK(srcLayer->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
189 CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
190 };
191 auto checkOneOutputToTwoInputsConnections = []
192 (const armnn::IConnectableLayer* const srcLayer,
193 const armnn::IConnectableLayer* const tgtLayer,
194 int expectedSrcNumInputs,
195 int expectedDstNumOutputs = 1)
__anon2167314f0302(const armnn::IConnectableLayer* const srcLayer, const armnn::IConnectableLayer* const tgtLayer, int expectedSrcNumInputs, int expectedDstNumOutputs = 1) 196 {
197 CHECK(srcLayer->GetNumInputSlots() == expectedSrcNumInputs);
198 CHECK(srcLayer->GetNumOutputSlots() == 1);
199 CHECK(tgtLayer->GetNumInputSlots() == 2);
200 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
201
202 CHECK(srcLayer->GetOutputSlot(0).GetNumConnections() == 2);
203 for (unsigned int i = 0; i < srcLayer->GetOutputSlot(0).GetNumConnections(); ++i)
204 {
205 CHECK(srcLayer->GetOutputSlot(0).GetConnection(i) == &tgtLayer->GetInputSlot(i));
206 CHECK(&srcLayer->GetOutputSlot(0) == tgtLayer->GetInputSlot(i).GetConnection());
207 }
208 };
209 auto checkOneOutputToTwoInputConnectionForTwoDifferentLayers = []
210 (const armnn::IConnectableLayer* const srcLayer1,
211 const armnn::IConnectableLayer* const srcLayer2,
212 const armnn::IConnectableLayer* const tgtLayer,
213 int expectedSrcNumInputs1 = 1,
214 int expectedSrcNumInputs2 = 1,
215 int expectedDstNumOutputs = 1)
__anon2167314f0402(const armnn::IConnectableLayer* const srcLayer1, const armnn::IConnectableLayer* const srcLayer2, const armnn::IConnectableLayer* const tgtLayer, int expectedSrcNumInputs1 = 1, int expectedSrcNumInputs2 = 1, int expectedDstNumOutputs = 1) 216 {
217 CHECK(srcLayer1->GetNumInputSlots() == expectedSrcNumInputs1);
218 CHECK(srcLayer1->GetNumOutputSlots() == 1);
219 CHECK(srcLayer2->GetNumInputSlots() == expectedSrcNumInputs2);
220 CHECK(srcLayer2->GetNumOutputSlots() == 1);
221 CHECK(tgtLayer->GetNumInputSlots() == 2);
222 CHECK(tgtLayer->GetNumOutputSlots() == expectedDstNumOutputs);
223
224 CHECK(srcLayer1->GetOutputSlot(0).GetNumConnections() == 1);
225 CHECK(srcLayer2->GetOutputSlot(0).GetNumConnections() == 1);
226 CHECK(srcLayer1->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(0));
227 CHECK(srcLayer2->GetOutputSlot(0).GetConnection(0) == &tgtLayer->GetInputSlot(1));
228 CHECK(&srcLayer1->GetOutputSlot(0) == tgtLayer->GetInputSlot(0).GetConnection());
229 CHECK(&srcLayer2->GetOutputSlot(0) == tgtLayer->GetInputSlot(1).GetConnection());
230 };
231
232 CHECK(AreAllLayerInputSlotsConnected(*convLayer));
233 CHECK(AreAllLayerInputSlotsConnected(*fullyConnectedLayer));
234 CHECK(AreAllLayerInputSlotsConnected(*poolingLayer));
235 CHECK(AreAllLayerInputSlotsConnected(*activationLayer));
236 CHECK(AreAllLayerInputSlotsConnected(*normalizationLayer));
237 CHECK(AreAllLayerInputSlotsConnected(*softmaxLayer));
238 CHECK(AreAllLayerInputSlotsConnected(*batchNormalizationLayer));
239 CHECK(AreAllLayerInputSlotsConnected(*additionLayer));
240 CHECK(AreAllLayerInputSlotsConnected(*multiplicationLayer));
241 CHECK(AreAllLayerInputSlotsConnected(*outputLayer));
242
243 // Checks connectivity.
244 checkOneOutputToTwoInputConnectionForTwoDifferentLayers(inputLayer, weightsLayer, convLayer, 0, 0);
245 checkOneOutputToTwoInputConnectionForTwoDifferentLayers(convLayer, constantWeightsLayer, fullyConnectedLayer, 2, 0);
246 checkOneOutputToOneInputConnection(fullyConnectedLayer, poolingLayer, 2, 1);
247 checkOneOutputToOneInputConnection(poolingLayer, activationLayer);
248 checkOneOutputToOneInputConnection(activationLayer, normalizationLayer);
249 checkOneOutputToOneInputConnection(normalizationLayer, softmaxLayer);
250 checkOneOutputToOneInputConnection(softmaxLayer, batchNormalizationLayer);
251 checkOneOutputToTwoInputsConnections(batchNormalizationLayer, additionLayer, 1);
252 checkOneOutputToTwoInputsConnections(additionLayer, multiplicationLayer, 2);
253 checkOneOutputToOneInputConnection(multiplicationLayer, outputLayer, 2, 0);
254 }
255
256 TEST_CASE("NetworkModification_SplitterConcat")
257 {
258 armnn::NetworkImpl net;
259
260 // Adds an input layer and an input tensor descriptor.
261 armnn::IConnectableLayer* inputLayer = net.AddInputLayer(0, "input layer");
262 CHECK(inputLayer);
263
264 // Adds a splitter layer.
265 armnn::ViewsDescriptor splitterDesc(2,4);
266
267 armnn::IConnectableLayer* splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
268 CHECK(splitterLayer);
269
270 inputLayer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
271
272 // Adds a softmax layer 1.
273 armnn::SoftmaxDescriptor softmaxDescriptor;
274 armnn::IConnectableLayer* softmaxLayer1 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
275 CHECK(softmaxLayer1);
276
277 splitterLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
278
279 // Adds a softmax layer 2.
280 armnn::IConnectableLayer* softmaxLayer2 = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
281 CHECK(softmaxLayer2);
282
283 splitterLayer->GetOutputSlot(1).Connect(softmaxLayer2->GetInputSlot(0));
284
285 // Adds a concat layer.
286 armnn::OriginsDescriptor concatDesc(2, 4);
287
288 armnn::IConnectableLayer* concatLayer = net.AddConcatLayer(concatDesc, "concat layer");
289 CHECK(concatLayer);
290
291 softmaxLayer1->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(0));
292 softmaxLayer2->GetOutputSlot(0).Connect(concatLayer->GetInputSlot(1));
293
294 // Adds an output layer.
295 armnn::IConnectableLayer* outputLayer = net.AddOutputLayer(0, "output layer");
296 CHECK(outputLayer);
297
298 concatLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
299
300 CHECK(splitterLayer->GetNumOutputSlots() == 2);
301 CHECK(splitterLayer->GetOutputSlot(0).GetConnection(0) == &softmaxLayer1->GetInputSlot(0));
302 CHECK(&splitterLayer->GetOutputSlot(0) == softmaxLayer1->GetInputSlot(0).GetConnection());
303 CHECK(splitterLayer->GetOutputSlot(1).GetConnection(0) == &softmaxLayer2->GetInputSlot(0));
304 CHECK(&splitterLayer->GetOutputSlot(1) == softmaxLayer2->GetInputSlot(0).GetConnection());
305
306 CHECK(concatLayer->GetNumInputSlots() == 2);
307 CHECK(softmaxLayer1->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(0));
308 CHECK(&softmaxLayer1->GetOutputSlot(0) == concatLayer->GetInputSlot(0).GetConnection());
309 CHECK(softmaxLayer2->GetOutputSlot(0).GetConnection(0) == &concatLayer->GetInputSlot(1));
310 CHECK(&softmaxLayer2->GetOutputSlot(0) == concatLayer->GetInputSlot(1).GetConnection());
311 }
312
313 TEST_CASE("NetworkModification_SplitterAddition")
314 {
315 armnn::NetworkImpl net;
316
317 // Adds an input layer and an input tensor descriptor.
318 armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
319 CHECK(layer);
320
321 // Adds a splitter layer.
322 armnn::ViewsDescriptor splitterDesc(2,4);
323
324 armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
325 CHECK(splitterLayer);
326
327 layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
328
329 // Adds a softmax layer 1.
330 armnn::SoftmaxDescriptor softmaxDescriptor;
331 armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
332 CHECK(softmax1Layer);
333
334 splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
335
336 // Adds a softmax layer 2.
337 armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
338 CHECK(softmax2Layer);
339
340 splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
341
342 // Adds addition layer.
343 layer = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Add, "add layer");
344 CHECK(layer);
345
346 softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
347 softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
348
349 // Adds an output layer.
350 armnn::IConnectableLayer* prevLayer = layer;
351 layer = net.AddOutputLayer(0, "output layer");
352
353 prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
354
355 CHECK(layer);
356 }
357
358 TEST_CASE("NetworkModification_SplitterMultiplication")
359 {
360 armnn::NetworkImpl net;
361
362 // Adds an input layer and an input tensor descriptor.
363 armnn::IConnectableLayer* layer = net.AddInputLayer(0, "input layer");
364 CHECK(layer);
365
366 // Adds a splitter layer.
367 armnn::ViewsDescriptor splitterDesc(2,4);
368 armnn::IConnectableLayer* const splitterLayer = net.AddSplitterLayer(splitterDesc, "splitter layer");
369 CHECK(splitterLayer);
370
371 layer->GetOutputSlot(0).Connect(splitterLayer->GetInputSlot(0));
372
373 // Adds a softmax layer 1.
374 armnn::SoftmaxDescriptor softmaxDescriptor;
375 armnn::IConnectableLayer* const softmax1Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_1");
376 CHECK(softmax1Layer);
377
378 splitterLayer->GetOutputSlot(0).Connect(softmax1Layer->GetInputSlot(0));
379
380 // Adds a softmax layer 2.
381 armnn::IConnectableLayer* const softmax2Layer = net.AddSoftmaxLayer(softmaxDescriptor, "softmax_2");
382 CHECK(softmax2Layer);
383
384 splitterLayer->GetOutputSlot(1).Connect(softmax2Layer->GetInputSlot(0));
385
386 // Adds multiplication layer.
387 layer = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Mul, "multiplication layer");
388 CHECK(layer);
389
390 softmax1Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
391 softmax2Layer->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
392
393 // Adds an output layer.
394 armnn::IConnectableLayer* prevLayer = layer;
395 layer = net.AddOutputLayer(0, "output layer");
396 CHECK(layer);
397
398 prevLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
399 }
400
401 TEST_CASE("Network_AddQuantize")
402 {
403 struct Test : public armnn::IStrategy
404 {
ExecuteStrategyTest405 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
406 const armnn::BaseDescriptor& descriptor,
407 const std::vector<armnn::ConstTensor>& constants,
408 const char* name,
409 const armnn::LayerBindingId id = 0) override
410 {
411 armnn::IgnoreUnused(descriptor, constants, id);
412 switch (layer->GetType())
413 {
414 case armnn::LayerType::Input: break;
415 case armnn::LayerType::Output: break;
416 case armnn::LayerType::Quantize:
417 {
418 m_Visited = true;
419
420 CHECK(layer);
421
422 std::string expectedName = std::string("quantize");
423 CHECK(std::string(layer->GetName()) == expectedName);
424 CHECK(std::string(name) == expectedName);
425
426 CHECK(layer->GetNumInputSlots() == 1);
427 CHECK(layer->GetNumOutputSlots() == 1);
428
429 const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
430 CHECK((infoIn.GetDataType() == armnn::DataType::Float32));
431
432 const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
433 CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8));
434 break;
435 }
436 default:
437 {
438 // nothing
439 }
440 }
441 }
442
443 bool m_Visited = false;
444 };
445
446
447 auto graph = armnn::INetwork::Create();
448
449 auto input = graph->AddInputLayer(0, "input");
450 auto quantize = graph->AddQuantizeLayer("quantize");
451 auto output = graph->AddOutputLayer(1, "output");
452
453 input->GetOutputSlot(0).Connect(quantize->GetInputSlot(0));
454 quantize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
455
456 armnn::TensorInfo infoIn({3,1}, armnn::DataType::Float32);
457 input->GetOutputSlot(0).SetTensorInfo(infoIn);
458
459 armnn::TensorInfo infoOut({3,1}, armnn::DataType::QAsymmU8);
460 quantize->GetOutputSlot(0).SetTensorInfo(infoOut);
461
462 Test testQuantize;
463 graph->ExecuteStrategy(testQuantize);
464
465 CHECK(testQuantize.m_Visited == true);
466
467 }
468
469 TEST_CASE("Network_AddMerge")
470 {
471 struct Test : public armnn::IStrategy
472 {
ExecuteStrategyTest473 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
474 const armnn::BaseDescriptor& descriptor,
475 const std::vector<armnn::ConstTensor>& constants,
476 const char* name,
477 const armnn::LayerBindingId id = 0) override
478 {
479 armnn::IgnoreUnused(descriptor, constants, id);
480 switch (layer->GetType())
481 {
482 case armnn::LayerType::Input: break;
483 case armnn::LayerType::Output: break;
484 case armnn::LayerType::Merge:
485 {
486 m_Visited = true;
487
488 CHECK(layer);
489
490 std::string expectedName = std::string("merge");
491 CHECK(std::string(layer->GetName()) == expectedName);
492 CHECK(std::string(name) == expectedName);
493
494 CHECK(layer->GetNumInputSlots() == 2);
495 CHECK(layer->GetNumOutputSlots() == 1);
496
497 const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo();
498 CHECK((infoIn0.GetDataType() == armnn::DataType::Float32));
499
500 const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo();
501 CHECK((infoIn1.GetDataType() == armnn::DataType::Float32));
502
503 const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo();
504 CHECK((infoOut.GetDataType() == armnn::DataType::Float32));
505 break;
506 }
507 default:
508 {
509 // nothing
510 }
511 }
512 }
513
514 bool m_Visited = false;
515 };
516
517 armnn::INetworkPtr network = armnn::INetwork::Create();
518
519 armnn::IConnectableLayer* input0 = network->AddInputLayer(0);
520 armnn::IConnectableLayer* input1 = network->AddInputLayer(1);
521 armnn::IConnectableLayer* merge = network->AddMergeLayer("merge");
522 armnn::IConnectableLayer* output = network->AddOutputLayer(0);
523
524 input0->GetOutputSlot(0).Connect(merge->GetInputSlot(0));
525 input1->GetOutputSlot(0).Connect(merge->GetInputSlot(1));
526 merge->GetOutputSlot(0).Connect(output->GetInputSlot(0));
527
528 const armnn::TensorInfo info({3,1}, armnn::DataType::Float32);
529 input0->GetOutputSlot(0).SetTensorInfo(info);
530 input1->GetOutputSlot(0).SetTensorInfo(info);
531 merge->GetOutputSlot(0).SetTensorInfo(info);
532
533 Test testMerge;
534 network->ExecuteStrategy(testMerge);
535
536 CHECK(testMerge.m_Visited == true);
537 }
538
539 TEST_CASE("StandInLayerNetworkTest")
540 {
541 // Create a simple network with a StandIn some place in it.
542 armnn::NetworkImpl net;
543 auto input = net.AddInputLayer(0);
544
545 // Add some valid layer.
546 auto floor = net.AddFloorLayer("Floor");
547
548 // Add a standin layer
549 armnn::StandInDescriptor standInDescriptor;
550 standInDescriptor.m_NumInputs = 1;
551 standInDescriptor.m_NumOutputs = 1;
552 auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
553
554 // Finally the output.
555 auto output = net.AddOutputLayer(0);
556
557 // Connect up the layers
558 input->GetOutputSlot(0).Connect(floor->GetInputSlot(0));
559
560 floor->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
561
562 standIn->GetOutputSlot(0).Connect(output->GetInputSlot(0));
563
564 // Check that the layer is there.
565 CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
566 // Check that it is connected as expected.
567 CHECK(input->GetOutputSlot(0).GetConnection(0) == &floor->GetInputSlot(0));
568 CHECK(floor->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
569 CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output->GetInputSlot(0));
570 }
571
572 TEST_CASE("StandInLayerSingleInputMultipleOutputsNetworkTest")
573 {
574 // Another test with one input and two outputs on the StandIn layer.
575 armnn::NetworkImpl net;
576
577 // Create the input.
578 auto input = net.AddInputLayer(0);
579
580 // Add a standin layer
581 armnn::StandInDescriptor standInDescriptor;
582 standInDescriptor.m_NumInputs = 1;
583 standInDescriptor.m_NumOutputs = 2;
584 auto standIn = net.AddStandInLayer(standInDescriptor, "StandIn");
585
586 // Add two outputs.
587 auto output0 = net.AddOutputLayer(0);
588 auto output1 = net.AddOutputLayer(1);
589
590 // Connect up the layers
591 input->GetOutputSlot(0).Connect(standIn->GetInputSlot(0));
592
593 // Connect the two outputs of the Standin to the two outputs.
594 standIn->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
595 standIn->GetOutputSlot(1).Connect(output1->GetInputSlot(0));
596
597 // Check that the layer is there.
598 CHECK(GraphHasNamedLayer(net.GetGraph(), "StandIn"));
599 // Check that it is connected as expected.
600 CHECK(input->GetOutputSlot(0).GetConnection(0) == &standIn->GetInputSlot(0));
601 CHECK(standIn->GetOutputSlot(0).GetConnection(0) == &output0->GetInputSlot(0));
602 CHECK(standIn->GetOutputSlot(1).GetConnection(0) == &output1->GetInputSlot(0));
603 }
604
605 TEST_CASE("ObtainConv2DDescriptorFromIConnectableLayer")
606 {
607 armnn::NetworkImpl net;
608
609 armnn::Convolution2dDescriptor convDesc2d;
610 convDesc2d.m_PadLeft = 2;
611 convDesc2d.m_PadRight = 3;
612 convDesc2d.m_PadTop = 4;
613 convDesc2d.m_PadBottom = 5;
614 convDesc2d.m_StrideX = 2;
615 convDesc2d.m_StrideY = 1;
616 convDesc2d.m_DilationX = 3;
617 convDesc2d.m_DilationY = 3;
618 convDesc2d.m_BiasEnabled = false;
619 convDesc2d.m_DataLayout = armnn::DataLayout::NCHW;
620 armnn::IConnectableLayer* const convLayer = net.AddConvolution2dLayer(convDesc2d, "conv layer");
621 CHECK(convLayer);
622
623 const armnn::BaseDescriptor& descriptor = convLayer->GetParameters();
624 CHECK(descriptor.IsNull() == false);
625 const armnn::Convolution2dDescriptor& originalDescriptor =
626 static_cast<const armnn::Convolution2dDescriptor&>(descriptor);
627 CHECK(originalDescriptor.m_PadLeft == 2);
628 CHECK(originalDescriptor.m_PadRight == 3);
629 CHECK(originalDescriptor.m_PadTop == 4);
630 CHECK(originalDescriptor.m_PadBottom == 5);
631 CHECK(originalDescriptor.m_StrideX == 2);
632 CHECK(originalDescriptor.m_StrideY == 1);
633 CHECK(originalDescriptor.m_DilationX == 3);
634 CHECK(originalDescriptor.m_DilationY == 3);
635 CHECK(originalDescriptor.m_BiasEnabled == false);
636 CHECK(originalDescriptor.m_DataLayout == armnn::DataLayout::NCHW);
637 }
638
639 TEST_CASE("CheckNotNullDescriptor")
640 {
641 armnn::NetworkImpl net;
642 armnn::IConnectableLayer* const addLayer = net.AddElementwiseBinaryLayer(armnn::BinaryOperation::Add);
643
644 CHECK(addLayer);
645
646 const armnn::BaseDescriptor& descriptor = addLayer->GetParameters();
647 // additional layer has no descriptor so a NullDescriptor will be returned
648 CHECK(descriptor.IsNull() == false);
649 }
650
651 TEST_CASE("CheckNullDescriptor")
652 {
653 armnn::NetworkImpl net;
654 armnn::IConnectableLayer* const addLayer = net.AddPreluLayer();
655
656 CHECK(addLayer);
657
658 const armnn::BaseDescriptor& descriptor = addLayer->GetParameters();
659 // Prelu has no descriptor so a NullDescriptor will be returned
660 CHECK(descriptor.IsNull() == true);
661 }
662
663 }
664