Home
last modified time | relevance | path

Searched full:layer (Results 1 – 25 of 7374) sorted by relevance

12345678910>>...295

/external/rust/crates/tower-layer/src/
Dtuple.rs1 use crate::Layer;
3 impl<S> Layer<S> for () { impl
6 fn layer(&self, service: S) -> Self::Service { in layer() method
11 impl<S, L1> Layer<S> for (L1,) impl
13 L1: Layer<S>,
17 fn layer(&self, service: S) -> Self::Service { in layer() function
19 l1.layer(service) in layer()
23 impl<S, L1, L2> Layer<S> for (L1, L2) impl
25 L1: Layer<L2::Service>,
26 L2: Layer<S>,
[all …]
/external/armnn/python/pyarmnn/src/pyarmnn/swig/modules/
Darmnn_network.i35 of the layer and any mismatch is reported.
70 An input connection slot for a layer. Slot lifecycle is managed by the layer.
72 The input slot can be connected to an output slot of the preceding layer in the graph.
83 Returns output slot of a preceding layer that is connected to the given input slot.
86 IOutputSlot: Borrowed reference to an output connection slot for a preceding layer.
95 An output connection slot for a layer. Slot lifecycle is managed by the layer.
200 Calculates the index of this slot for the layer.
210 Returns the index of the layer. Same value as `IConnectableLayer.GetGuid`.
213 int: Layer id.
234 Interface for a layer that is connectable to other layers via `IInputSlot` and `IOutputSlot`.
[all …]
/external/tensorflow/tensorflow/python/keras/mixed_precision/
Dlayer_test.py15 """Tests keras.layers.Layer works properly with mixed precision."""
98 layer = mp_test_util.MultiplyLayer(assert_type=dtype)
99 self.assertEqual(layer.dtype, dtypes.float32)
100 self.assertEqual(get_layer_policy.get_layer_policy(layer).name,
102 y = layer(x)
103 self.assertEqual(layer.v.dtype, dtypes.float32)
105 self.assertEqual(layer.dtype_policy.name, policy_name)
106 self.assertIsInstance(layer.dtype_policy, policy.Policy)
107 self.assertEqual(layer.compute_dtype, dtype)
108 self.assertEqual(layer.dtype, dtypes.float32)
[all …]
/external/armnn/include/armnn/
DINetwork.hpp22 /// @brief An input connection slot for a layer.
23 /// The input slot can be connected to an output slot of the preceding layer in the graph.
39 /// @brief An output connection slot for a layer.
67 /// @brief Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
71 /// Returns the name of the layer
95 /// Returns the unique id of the layer
98 /// Apply a visitor to this layer
101 /// Provide a hint for the optimizer as to which backend to prefer for this layer.
102 …// By providing a BackendSelectionHint there is no guarantee the input backend supports that layer.
105 /// layer (IsLayerSupported returns true for a specific backend).
[all …]
/external/tensorflow/tensorflow/python/keras/saving/saved_model/
Dsave_impl.py71 def should_skip_serialization(layer): argument
72 """Skip serializing extra objects and functions if layer inputs aren't set."""
73 saved_model_input_spec_set = (isinstance(layer, training_lib.Model) and
74layer._saved_model_inputs_spec is not None) # pylint: disable=protected-access
75 if not layer.built and not saved_model_input_spec_set:
76 logging.warning('Skipping full serialization of Keras layer {}, because '
77 'it is not built.'.format(layer))
82 def wrap_layer_objects(layer, serialization_cache): argument
83 """Returns extra trackable objects to attach to the serialized layer.
86 layer: Keras Layer object.
[all …]
/external/armnn/src/armnnTestUtils/
DCreateWorkload.hpp32 // Calls CreateWorkload for a layer, and checks the returned pointer is of the correct type.
34 std::unique_ptr<Workload> MakeAndCheckWorkload(Layer& layer, in MakeAndCheckWorkload() argument
38 std::unique_ptr<IWorkload> workload = layer.CreateWorkload(factory);
42 layer.SetBackendId(factory.GetBackendId());
43 CHECK(factory.IsLayerSupported(layer, layer.GetDataType(), reasonIfUnsupported, modelOptions));
52 for (auto&& layer : graph.TopologicalSort()) in CreateTensorHandles()
54 layer->CreateTensorHandles(tmpRegistry, factory); in CreateTensorHandles()
69 // Creates the layer we're testing. in CreateActivationWorkloadTest()
75 ActivationLayer* const layer = graph.AddLayer<ActivationLayer>(layerDesc, "layer"); in CreateActivationWorkloadTest() local
78 Layer* const input = graph.AddLayer<InputLayer>(0, "input"); in CreateActivationWorkloadTest()
[all …]
DMockBackend.cpp14 #include "Layer.hpp"
62 bool IsLayerSupported(const armnn::Layer* layer) in IsLayerSupported() argument
64 ARMNN_ASSERT(layer != nullptr); in IsLayerSupported()
66 armnn::LayerType layerType = layer->GetType(); in IsLayerSupported()
75 // Layer supported in IsLayerSupported()
78 // Layer unsupported in IsLayerSupported()
83 bool IsLayerSupported(const armnn::Layer& layer) in IsLayerSupported() argument
85 return IsLayerSupported(&layer); in IsLayerSupported()
88 bool IsLayerOptimizable(const armnn::Layer* layer) in IsLayerOptimizable() argument
90 ARMNN_ASSERT(layer != nullptr); in IsLayerOptimizable()
[all …]
/external/armnn/src/armnnSerializer/
DSerializer.hpp25 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
57 /// Creates the Input Slots and Output Slots and LayerBase for the layer.
59 const armnn::IConnectableLayer* layer,
62 /// Creates the serializer AnyLayer for the layer and adds it to m_serializedLayers.
63 …void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnnSerializer::Layer serialize…
78 /// Creates the serializer InputSlots for the layer.
80 const armnn::IConnectableLayer* layer);
82 /// Creates the serializer OutputSlots for the layer.
84 const armnn::IConnectableLayer* layer);
101 /// layer within our FlatBuffer index.
[all …]
DSerializer.cpp114 // Build FlatBuffer for Input Layer
115 void SerializerStrategy::SerializeInputLayer(const armnn::IConnectableLayer* layer, LayerBindingId … in SerializeInputLayer() argument
120 auto flatBufferInputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Input); in SerializeInputLayer()
126 // Push layer binding id to outputIds. in SerializeInputLayer()
133 CreateAnyLayer(flatBufferInputLayer.o, serializer::Layer::Layer_InputLayer); in SerializeInputLayer()
136 // Build FlatBuffer for Output Layer
137 void SerializerStrategy::SerializeOutputLayer(const armnn::IConnectableLayer* layer, in SerializeOutputLayer() argument
143 … auto flatBufferOutputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Output); in SerializeOutputLayer()
149 // Push layer binding id to outputIds. in SerializeOutputLayer()
155 CreateAnyLayer(flatBufferOutputLayer.o, serializer::Layer::Layer_OutputLayer); in SerializeOutputLayer()
[all …]
/external/rust/crates/tracing-subscriber/src/layer/
Dlayered.rs5 layer::{Context, Layer},
17 /// [`Layer`]s.
19 /// [`Layer`]: crate::Layer
23 /// The layer.
24 layer: L, field
26 /// The inner value that `self.layer` was layered onto.
28 /// If this is also a `Layer`, then this `Layered` will implement `Layer`.
34 // level hints when per-layer filters are in use.
41 /// Does `self.layer` have per-layer filters?
46 /// `Layered`s have per-layer filters.
[all …]
Dmod.rs1 //! The [`Layer`] trait, a composable abstraction for building [`Subscriber`]s.
18 //! span IDs. The [`Layer`] trait represents this composable subset of the
24 //! Since a [`Layer`] does not implement a complete strategy for collecting
26 //! [`Layer`] trait is generic over a type parameter (called `S` in the trait
28 //! with. Thus, a [`Layer`] may be implemented that will only compose with a
30 //! added to constrain what types implementing `Subscriber` a `Layer` can wrap.
32 //! `Layer`s may be added to a `Subscriber` by using the [`SubscriberExt::with`]
35 //! `Layer` with the `Subscriber`.
39 //! use tracing_subscriber::Layer;
47 //! impl<S: Subscriber> Layer<S> for MyLayer {
[all …]
/external/tensorflow/tensorflow/python/keras/engine/
Dfunctional.py95 # The key of _layer_call_argspecs is a layer. tf.Module._flatten will fail to
96 # flatten the key since it is trying to convert Trackable/Layer to a string.
120 # layer is added or removed.
182 layer, node_index, tensor_index = x._keras_history # pylint: disable=protected-access
183 self._output_layers.append(layer)
184 self._output_coordinates.append((layer, node_index, tensor_index))
188 layer, node_index, tensor_index = x._keras_history # pylint: disable=protected-access
189 # It's supposed to be an input layer, so only one node
193 self._input_layers.append(layer)
194 self._input_coordinates.append((layer, node_index, tensor_index))
[all …]
Dsequential.py55 >>> # Optionally, the first layer can receive an `input_shape` argument:
125 # created. It is false when there isn't any layer, or the layers doesn't
139 for layer in layers:
140 self.add(layer)
155 def add(self, layer): argument
156 """Adds a layer instance on top of the layer stack.
159 layer: layer instance.
162 TypeError: If `layer` is not a layer instance.
163 ValueError: In case the `layer` argument does not
165 ValueError: In case the `layer` argument has
[all …]
Dbase_layer.py16 """Contains the base Layer class, from which all layers inherit."""
88 # Prefix that is added to the TF op layer names.
97 @keras_export('keras.layers.Layer')
98 class Layer(module.Module, version_utils.LayerVersionSelector): class
101 A layer is a callable object that takes as input one or more tensors and
106 Users will just instantiate a layer and then treat it as a callable.
109 trainable: Boolean, whether the layer's variables should be trainable.
110 name: String name of the layer.
111 dtype: The dtype of the layer's computations and weights. Can also be a
116 dynamic: Set this to `True` if your layer should only be run eagerly, and
[all …]
/external/armnn/src/armnn/
DGraph.cpp33 std::unordered_map<const Layer*, Layer*> otherToClonedMap; in Graph()
37 Layer* const layer = otherLayer->Clone(*this); in Graph() local
38 otherToClonedMap.emplace(otherLayer, layer); in Graph()
44 Layer* const thisLayer = otherToClonedMap[otherLayer]; in Graph()
51 const Layer& otherTgtLayer = otherInputSlot->GetOwningLayer(); in Graph()
52 Layer* const thisTgtLayer = otherToClonedMap[&otherTgtLayer]; in Graph()
100 const armnn::Layer *layer = it; in Print() local
102 auto outputTensorShape = layer->GetOutputSlots()[i].GetTensorInfo().GetShape(); in Print()
142 for (auto&& layer : m_Layers) in SerializeToDot()
144 DotNode node(stream, layer->GetGuid(), GetLayerTypeAsCString(layer->GetType())); in SerializeToDot()
[all …]
DNetwork.cpp8 #include "Layer.hpp"
757 const Layer* layer, in ReturnWithError() argument
762 failureMsg << "Layer of type " << GetLayerTypeAsCString(layer->GetType()) in ReturnWithError()
771 bool CheckScaleSetOnQuantizedType(Layer* layer, Optional<std::vector<std::string>&> errMessages) in CheckScaleSetOnQuantizedType() argument
774 unsigned int numOutputs = layer->GetNumOutputSlots(); in CheckScaleSetOnQuantizedType()
776 OutputSlot& outputSlot = layer->GetOutputSlot(i); in CheckScaleSetOnQuantizedType()
784 ss << "output " << i << " of layer " << GetLayerTypeAsCString(layer->GetType()) in CheckScaleSetOnQuantizedType()
785 << " (" << layer->GetNameStr() << ") is of type" in CheckScaleSetOnQuantizedType()
792 layer->GetType() == armnn::LayerType::Softmax) in CheckScaleSetOnQuantizedType()
795 ss << "Quantization parameters for Softmax layer (Scale: " << in CheckScaleSetOnQuantizedType()
[all …]
/external/armnn/src/armnn/test/
DConstTensorLayerVisitor.hpp31 void ExecuteStrategy(const armnn::IConnectableLayer* layer, in ExecuteStrategy() argument
38 switch (layer->GetType()) in ExecuteStrategy()
42 CheckLayerPointer(layer); in ExecuteStrategy()
49 m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType())); in ExecuteStrategy()
72 void ExecuteStrategy(const armnn::IConnectableLayer* layer, in ExecuteStrategy() argument
79 switch (layer->GetType()) in ExecuteStrategy()
83 CheckLayerPointer(layer); in ExecuteStrategy()
90 m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType())); in ExecuteStrategy()
113 void ExecuteStrategy(const armnn::IConnectableLayer* layer, in ExecuteStrategy() argument
120 switch (layer->GetType()) in ExecuteStrategy()
[all …]
DOptimizerTests.cpp42 LstmLayer* const layer = graph.AddLayer<LstmLayer>(layerDesc, "layer"); in CreateLSTMLayerHelper() local
48 layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
50 layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
52 layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
54 layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
56 layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
58 layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
60 layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
62 layer->m_BasicParameters.m_CellBias = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
64 layer->m_BasicParameters.m_OutputGateBias = std::make_unique<ScopedTensorHandle> in CreateLSTMLayerHelper()
[all …]
/external/rust/crates/tracing-subscriber/src/
Dreload.rs1 //! Wrapper for a `Layer` to allow it to be dynamically reloaded.
3 //! This module provides a [`Layer` type] implementing the [`Layer` trait] or [`Filter` trait]
8 //! This can be used in cases where a subset of `Layer` or `Filter` functionality
10 //! change at runtime. Note that this layer introduces a (relatively small)
15 //! Reloading a [global filtering](crate::layer#global-filtering) layer:
21 //! let (filter, reload_handle) = reload::Layer::new(filter);
24 //! .with(fmt::Layer::default())
35 //! Reloading a [`Filtered`](crate::filter::Filtered) layer:
40 //! let filtered_layer = fmt::Layer::default().with_filter(filter::LevelFilter::WARN);
41 //! let (filtered_layer, reload_handle) = reload::Layer::new(filtered_layer);
[all …]
/external/tensorflow/tensorflow/python/keras/utils/
Dvis_utils.py54 def is_wrapped_model(layer): argument
57 return (isinstance(layer, wrappers.Wrapper) and
58 isinstance(layer.layer, functional.Functional))
80 show_dtype: whether to display layer dtypes.
81 show_layer_names: whether to display layer names.
143 for i, layer in enumerate(layers):
144 layer_id = str(id(layer))
146 # Append a wrapped layer's label to node's label, if it exists.
147 layer_name = layer.name
148 class_name = layer.__class__.__name__
[all …]
/external/libavc/decoder/svc/
Disvcd_resamp_svc.h122 current layer. can be used to store
127 current layer. can be used to store
136 layer with respect to upper left luma
137 sample of current layer.
141 layer
145 reference layer with respect to bottom
150 reference layer
158 current layer. can be used to store
162 phase for a pixel in current layer.
172 for each MB in current layer.can be
[all …]
/external/armnn/src/armnn/optimizations/
DConvertFp32NetworkToFp16.hpp18 void Run(Graph& graph, Layer& layer) const in Run()
20 if(layer.GetType() == LayerType::Input) in Run()
22 // if the outputs of this layer are DataType::Float32 in Run()
23 // add a ConvertFloat32ToFloat16 layer after each of the outputs in Run()
24 if (layer.GetDataType() == DataType::Float32) in Run()
26 InsertConvertFp32ToFp16LayersAfter(graph, layer); in Run()
29 else if (layer.GetType() == LayerType::Output) in Run()
31 // For DetectionPostProcess Layer output is always Float32 regardless of input type in Run()
32Layer& connectedLayer = layer.GetInputSlots()[0].GetConnectedOutputSlot()->GetOwningLayer(); in Run()
35 // if the inputs of this layer are DataType::Float32 in Run()
[all …]
/external/skia/tools/sk_app/
DWindow.cpp27 void Window::visitLayers(const std::function<void(Layer*)>& visitor) { in visitLayers()
35 bool Window::signalLayers(const std::function<bool(Layer*)>& visitor) { in signalLayers()
45 this->visitLayers([](Layer* layer) { layer->onBackendCreated(); }); in onBackendCreated() argument
49 return this->signalLayers([=](Layer* layer) { return layer->onChar(c, modifiers); }); in onChar() argument
53 return this->signalLayers([=](Layer* layer) { return layer->onKey(key, state, modifiers); }); in onKey() argument
57 return this->signalLayers([=](Layer* layer) { return layer->onMouse(x, y, state, modifiers); }); in onMouse() argument
62 [=](Layer* layer) { return layer->onMouseWheel(delta, x, y, modifiers); }); in onMouseWheel() argument
66 return this->signalLayers([=](Layer* layer) { return layer->onTouch(owner, state, x, y); }); in onTouch() argument
70 return this->signalLayers([=](Layer* layer) { return layer->onFling(state); }); in onFling() argument
74 return this->signalLayers([=](Layer* layer) { return layer->onPinch(state, scale, x, y); }); in onPinch() argument
[all …]
/external/armnn/src/backends/backendsCommon/
DWorkloadFactory.cpp6 #include <Layer.hpp>
26 using LayerList = std::list<Layer*>;
78 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer)); in IsLayerConfigurationSupported() local
96 switch(layer.GetType()) in IsLayerConfigurationSupported()
100 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer); in IsLayerConfigurationSupported()
101 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); in IsLayerConfigurationSupported()
102 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo(); in IsLayerConfigurationSupported()
113 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); in IsLayerConfigurationSupported()
114 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo(); in IsLayerConfigurationSupported()
115 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo(); in IsLayerConfigurationSupported()
[all …]
/external/mesa3d/src/gallium/auxiliary/vl/
Dvl_compositor.c284 default_rect(struct vl_compositor_layer *layer) in default_rect() argument
286 struct pipe_resource *res = layer->sampler_views[0]->texture; in default_rect()
306 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height, in calc_src_and_dst() argument
311 layer->src.tl = calc_topleft(size, src); in calc_src_and_dst()
312 layer->src.br = calc_bottomright(size, src); in calc_src_and_dst()
313 layer->dst.tl = calc_topleft(size, dst); in calc_src_and_dst()
314 layer->dst.br = calc_bottomright(size, dst); in calc_src_and_dst()
315 layer->zw.x = 0.0f; in calc_src_and_dst()
316 layer->zw.y = size.y; in calc_src_and_dst()
321 unsigned layer, struct pipe_video_buffer *buffer, in set_yuv_layer() argument
[all …]

12345678910>>...295