• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2020-2021,2023 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <TestUtils.hpp>
7 
8 #include <Optimizer.hpp>
9 
10 #include <doctest/doctest.h>
11 
12 TEST_SUITE("Optimizer")
13 {
14 using namespace armnn::optimizations;
15 
16 TEST_CASE("MoveTransposeUpTest")
17 {
18     const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
19     const armnn::TensorInfo transposed({ 1, 3, 5, 2 }, armnn::DataType::Float32);
20 
21     armnn::Graph graph;
22 
23     armnn::LayerBindingId inputId = 0;
24 
25     armnn::Layer* head = graph.AddLayer<armnn::OutputLayer>(0, "output");
26 
27     std::string transposeLayerName = "original_transpose";
28 
29     // Insert transpose
30     head = graph.InsertNewLayer<armnn::TransposeLayer>(head->GetInputSlot(0),
31                                                        armnn::TransposeDescriptor({ 0, 3, 1, 2 }),
32                                                        transposeLayerName.c_str());
33 
34     head->GetOutputHandler().SetTensorInfo(transposed);
35 
36     // Inserts layers that don't care about data format.
37     head = graph.InsertNewLayer<armnn::ActivationLayer>(head->GetInputSlot(0), armnn::ActivationDescriptor{}, "");
38     head->GetOutputHandler().SetTensorInfo(info);
39 
40     head = graph.InsertNewLayer<armnn::ElementwiseBinaryLayer>(head->GetInputSlot(0), armnn::BinaryOperation::Add, "");
41     head->GetOutputHandler().SetTensorInfo(info);
42 
43     // Inserts input for 2nd input of Addition.
44     graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "")
45         ->GetOutputHandler()
46         .SetTensorInfo(info);
47 
48     head = graph.InsertNewLayer<armnn::FakeQuantizationLayer>(head->GetInputSlot(0),
49                                                               armnn::FakeQuantizationDescriptor{}, "");
50     head->GetOutputHandler().SetTensorInfo(info);
51 
52     head = graph.InsertNewLayer<armnn::FloorLayer>(head->GetInputSlot(0), "");
53     head->GetOutputHandler().SetTensorInfo(info);
54 
55     head = graph.InsertNewLayer<armnn::MemCopyLayer>(head->GetInputSlot(0), "");
56     head->GetOutputHandler().SetTensorInfo(info);
57 
58     head = graph.InsertNewLayer<armnn::ElementwiseBinaryLayer>(head->GetInputSlot(0), armnn::BinaryOperation::Mul, "");
59     head->GetOutputHandler().SetTensorInfo(info);
60 
61     // Inserts input for 2nd input of Multiplication.
62     graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "")
63         ->GetOutputHandler()
64         .SetTensorInfo(info);
65 
66     // Inserts input.
67     graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(0), inputId++, "")
68         ->GetOutputHandler()
69         .SetTensorInfo(info);
70 
71     CHECK(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>,
72                              &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>,
73                              &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::MemCopyLayer>,
74                              &IsLayerOfType<armnn::FloorLayer>, &IsLayerOfType<armnn::FakeQuantizationLayer>,
75                              &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::ActivationLayer>,
76                              &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::OutputLayer>));
77 
78     armnn::Optimizer::Pass(graph, armnn::MakeOptimizations(MoveTransposeUp()));
79 
80     // The transpose is moved to the top. New transposes for layers with multiple inputs.
81     CHECK(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>,
82                              &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>,
83                              &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::TransposeLayer>,
84                              &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::ElementwiseBinaryLayer>,
85                              &IsLayerOfType<armnn::MemCopyLayer>, &IsLayerOfType<armnn::FloorLayer>,
86                              &IsLayerOfType<armnn::FakeQuantizationLayer>,
87                              &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::ActivationLayer>,
88                              &IsLayerOfType<armnn::OutputLayer>));
89 
90     std::list<std::string> testRelatedLayers = { transposeLayerName };
91 
92     CHECK(CheckRelatedLayers<armnn::TransposeLayer>(graph, testRelatedLayers));
93 }
94 
95 }