• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2019 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "StandInLayer.hpp"
7 #include "LayerCloneBase.hpp"
8 
9 namespace armnn
10 {
11 
StandInLayer(const StandInDescriptor & param,const char * name)12 StandInLayer::StandInLayer(const StandInDescriptor& param, const char* name)
13     : LayerWithParameters(param.m_NumInputs, param.m_NumOutputs, LayerType::StandIn, param, name)
14 {
15 }
16 
CreateWorkload(const IWorkloadFactory & factory) const17 std::unique_ptr<IWorkload> StandInLayer::CreateWorkload(const IWorkloadFactory& factory) const
18 {
19     IgnoreUnused(factory);
20     // This throws in the event that it's called. We would expect that any backend that
21     // "claims" to support the StandInLayer type would actually substitute it with a PrecompiledLayer
22     // during graph optimization. There is no interface on the IWorkloadFactory to create a StandInWorkload.
23     throw Exception("Stand in layer does not support creating workloads");
24 }
25 
Clone(Graph & graph) const26 StandInLayer* StandInLayer::Clone(Graph& graph) const
27 {
28     return CloneBase<StandInLayer>(graph, m_Param, GetName());
29 }
30 
InferOutputShapes(const std::vector<TensorShape> & inputShapes) const31 std::vector<TensorShape> StandInLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
32 {
33     IgnoreUnused(inputShapes);
34     throw Exception("Stand in layer does not support infering output shapes");
35 }
36 
ValidateTensorShapesFromInputs()37 void StandInLayer::ValidateTensorShapesFromInputs()
38 {
39 
40     // Cannot validate this layer since no implementation details can be known by the framework
41     // so do nothing here.
42 }
43 
Accept(ILayerVisitor & visitor) const44 void StandInLayer::Accept(ILayerVisitor& visitor) const
45 {
46     visitor.VisitStandInLayer(this, GetParameters(), GetName());
47 }
48 } // namespace armnn
49