• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "QuantizeLayer.hpp"
7 
8 #include "LayerCloneBase.hpp"
9 
10 #include <armnn/ILayerVisitor.hpp>
11 
12 namespace armnn
13 {
14 
QuantizeLayer(const char * name)15 QuantizeLayer::QuantizeLayer(const char* name)
16 : Layer(1, 1, LayerType::Quantize, name)
17 {}
18 
CreateWorkload(const IWorkloadFactory & factory) const19 std::unique_ptr<IWorkload> QuantizeLayer::CreateWorkload(const IWorkloadFactory& factory) const
20 {
21     QuantizeQueueDescriptor descriptor;
22     SetAdditionalInfo(descriptor);
23 
24     WorkloadInfo info = PrepInfoAndDesc(descriptor);
25 
26     return factory.CreateQuantize(descriptor, info);
27 }
28 
Clone(Graph & graph) const29 Layer* QuantizeLayer::Clone(Graph& graph) const
30 {
31     QuantizeLayer* clone = CloneBase<QuantizeLayer>(graph, GetName());
32     return clone;
33 }
34 
ValidateTensorShapesFromInputs()35 void QuantizeLayer::ValidateTensorShapesFromInputs()
36 {
37     VerifyLayerConnections(1, CHECK_LOCATION());
38 
39     const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
40 
41     VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
42 
43     auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
44 
45     ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QuantizeLayer");
46 }
47 
Accept(ILayerVisitor & visitor) const48 void QuantizeLayer::Accept(ILayerVisitor& visitor) const
49 {
50     visitor.VisitQuantizeLayer(this, GetName());
51 }
52 
53 } //namespace armnn