1 // 2 // Copyright © 2017,2019-2023 Arm Ltd and Contributors. All rights reserved. 3 // SPDX-License-Identifier: MIT 4 // 5 6 #include <TestUtils.hpp> 7 8 #include <Optimizer.hpp> 9 10 #include <doctest/doctest.h> 11 12 TEST_SUITE("Optimizer") 13 { 14 using namespace armnn::optimizations; 15 16 TEST_CASE("MovePermuteUpTest") 17 { 18 const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32); 19 const armnn::TensorInfo permuted({ 1, 3, 5, 2 }, armnn::DataType::Float32); 20 21 armnn::Graph graph; 22 23 armnn::LayerBindingId inputId = 0; 24 25 armnn::Layer* head = graph.AddLayer<armnn::OutputLayer>(0, "output"); 26 27 std::string permuteLayerName = "original_permute"; 28 29 // Insert permute 30 head = graph.InsertNewLayer<armnn::PermuteLayer>(head->GetInputSlot(0), armnn::PermuteDescriptor({ 0, 2, 3, 1 }), 31 permuteLayerName.c_str()); 32 33 head->GetOutputHandler().SetTensorInfo(permuted); 34 35 // Inserts layers that don't care about data format. 36 head = graph.InsertNewLayer<armnn::ActivationLayer>(head->GetInputSlot(0), armnn::ActivationDescriptor{}, ""); 37 head->GetOutputHandler().SetTensorInfo(info); 38 39 head = graph.InsertNewLayer<armnn::ElementwiseBinaryLayer>(head->GetInputSlot(0), armnn::BinaryOperation::Add, ""); 40 head->GetOutputHandler().SetTensorInfo(info); 41 42 // Inserts input for 2nd input of Addition. 43 graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "") 44 ->GetOutputHandler() 45 .SetTensorInfo(info); 46 47 head = graph.InsertNewLayer<armnn::FakeQuantizationLayer>(head->GetInputSlot(0), 48 armnn::FakeQuantizationDescriptor{}, ""); 49 head->GetOutputHandler().SetTensorInfo(info); 50 51 head = graph.InsertNewLayer<armnn::FloorLayer>(head->GetInputSlot(0), ""); 52 head->GetOutputHandler().SetTensorInfo(info); 53 54 head = graph.InsertNewLayer<armnn::MemCopyLayer>(head->GetInputSlot(0), ""); 55 head->GetOutputHandler().SetTensorInfo(info); 56 57 head = graph.InsertNewLayer<armnn::ElementwiseBinaryLayer>(head->GetInputSlot(0), armnn::BinaryOperation::Mul, ""); 58 head->GetOutputHandler().SetTensorInfo(info); 59 60 // Inserts input for 2nd input of Multiplication. 61 graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "") 62 ->GetOutputHandler() 63 .SetTensorInfo(info); 64 65 // Inserts input. 66 graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(0), inputId++, "") 67 ->GetOutputHandler() 68 .SetTensorInfo(info); 69 70 CHECK(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>, 71 &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>, 72 &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::MemCopyLayer>, 73 &IsLayerOfType<armnn::FloorLayer>, &IsLayerOfType<armnn::FakeQuantizationLayer>, 74 &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::ActivationLayer>, 75 &IsLayerOfType<armnn::PermuteLayer>, &IsLayerOfType<armnn::OutputLayer>)); 76 77 armnn::Optimizer::Pass(graph, armnn::MakeOptimizations(MovePermuteUp())); 78 79 // The permute is moved to the top. New permutes for layers with multiple inputs. 80 CHECK(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>, 81 &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>, 82 &IsLayerOfType<armnn::PermuteLayer>, &IsLayerOfType<armnn::PermuteLayer>, 83 &IsLayerOfType<armnn::PermuteLayer>, &IsLayerOfType<armnn::ElementwiseBinaryLayer>, 84 &IsLayerOfType<armnn::MemCopyLayer>, &IsLayerOfType<armnn::FloorLayer>, 85 &IsLayerOfType<armnn::FakeQuantizationLayer>, 86 &IsLayerOfType<armnn::ElementwiseBinaryLayer>, &IsLayerOfType<armnn::ActivationLayer>, 87 &IsLayerOfType<armnn::OutputLayer>)); 88 89 std::list<std::string> testRelatedLayers = { permuteLayerName }; 90 91 CHECK(CheckRelatedLayers<armnn::PermuteLayer>(graph, testRelatedLayers)); 92 } 93 94 }