1 //
2 // Copyright © 2019 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include "LogSoftmaxLayer.hpp"
7
8 #include "LayerCloneBase.hpp"
9
10 #include <armnn/TypesUtils.hpp>
11
12 #include <armnn/backends/WorkloadData.hpp>
13 #include <armnn/backends/WorkloadFactory.hpp>
14
15 namespace armnn
16 {
17
LogSoftmaxLayer(const LogSoftmaxDescriptor & param,const char * name)18 LogSoftmaxLayer::LogSoftmaxLayer(const LogSoftmaxDescriptor ¶m, const char* name)
19 : LayerWithParameters(1, 1, LayerType::LogSoftmax, param, name) {}
20
CreateWorkload(const IWorkloadFactory & factory) const21 std::unique_ptr<IWorkload> LogSoftmaxLayer::CreateWorkload(const IWorkloadFactory& factory) const
22 {
23 LogSoftmaxQueueDescriptor descriptor;
24 SetAdditionalInfo(descriptor);
25
26 return factory.CreateWorkload(LayerType::LogSoftmax, descriptor, PrepInfoAndDesc(descriptor));
27 }
28
Clone(Graph & graph) const29 LogSoftmaxLayer* LogSoftmaxLayer::Clone(Graph& graph) const
30 {
31 return CloneBase<LogSoftmaxLayer>(graph, m_Param, GetName());
32 }
33
ValidateTensorShapesFromInputs()34 void LogSoftmaxLayer::ValidateTensorShapesFromInputs()
35 {
36 VerifyLayerConnections(1, CHECK_LOCATION());
37
38 const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
39
40 VerifyShapeInferenceType(outputShape, m_ShapeInferenceMethod);
41
42 auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
43 ARMNN_ASSERT(inferredShapes.size() == 1);
44
45 ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "LogSoftmaxLayer");
46 }
47
ExecuteStrategy(IStrategy & strategy) const48 void LogSoftmaxLayer::ExecuteStrategy(IStrategy& strategy) const
49 {
50 strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
51 }
52
53 } // namespace armnn
54