Home
last modified time | relevance | path

Searched full:onnx (Results 1 – 25 of 527) sorted by relevance

12345678910>>...22

/external/pytorch/third_party/
Donnx.BUILD7 srcs = ["onnx/gen_proto.py"],
9 "onnx/onnx.in.proto",
10 "onnx/onnx-operators.in.proto",
11 "onnx/onnx-data.in.proto",
18 "onnx/onnx_onnx_torch-ml.proto",
19 "onnx/onnx-ml.pb.h",
21 … onnx_torch -o $(@D)/onnx onnx -m >/dev/null && sed -i 's/onnx_onnx_torch-ml.pb.h/onnx\\/onnx_onnx…
28 "onnx/onnx-operators_onnx_torch-ml.proto",
29 "onnx/onnx-operators-ml.pb.h",
31 …$(@D)/onnx onnx-operators -m >/dev/null && sed -i 's/onnx-operators_onnx_torch-ml.pb.h/onnx\\/onnx
[all …]
/external/pytorch/test/onnx/expect/
DTestOperators.test_meshgrid_indexing.expect6 output: "onnx::Reshape_3"
20 input: "onnx::Reshape_1"
21 input: "onnx::Reshape_3"
22 output: "onnx::Shape_4"
27 output: "onnx::Reshape_5"
41 input: "onnx::Reshape_0"
42 input: "onnx::Reshape_5"
43 output: "onnx::Shape_6"
48 output: "onnx::Reshape_7"
62 input: "onnx::Reshape_2"
[all …]
DTestOperators.test_meshgrid.expect6 output: "onnx::Reshape_3"
20 input: "onnx::Reshape_0"
21 input: "onnx::Reshape_3"
22 output: "onnx::Shape_4"
32 output: "onnx::Reshape_5"
46 input: "onnx::Reshape_1"
47 input: "onnx::Reshape_5"
48 output: "onnx::Shape_6"
58 output: "onnx::Reshape_7"
72 input: "onnx::Reshape_2"
[all …]
DTestOperators.test_chunk.expect6 input: "onnx::Shape_0"
7 output: "onnx::Gather_1"
12 output: "onnx::Gather_2"
26 input: "onnx::Gather_1"
27 input: "onnx::Gather_2"
28 output: "onnx::Add_3"
38 output: "onnx::Slice_4"
52 output: "onnx::Add_5"
66 input: "onnx::Add_3"
67 input: "onnx::Add_5"
[all …]
DTestOperators.test_std.expect6 input: "onnx::ReduceMean_0"
7 output: "onnx::Sub_1"
23 input: "onnx::ReduceMean_0"
24 output: "onnx::Gather_2"
29 output: "onnx::Gather_3"
43 input: "onnx::Gather_2"
44 input: "onnx::Gather_3"
45 output: "onnx::ReduceProd_4"
55 input: "onnx::ReduceProd_4"
56 output: "onnx::Cast_5"
[all …]
DTestOperators.test_shape_value_map.expect7 output: "onnx::Gather_1"
12 output: "onnx::Gather_2"
25 input: "onnx::Gather_1"
26 input: "onnx::Gather_2"
27 output: "onnx::Unsqueeze_3"
37 output: "onnx::Unsqueeze_7"
51 input: "onnx::Unsqueeze_3"
52 input: "onnx::Unsqueeze_7"
53 output: "onnx::Concat_8"
58 output: "onnx::Concat_25"
[all …]
DTestOperators.test_unfold.expect6 output: "onnx::Slice_1"
20 output: "onnx::Slice_2"
34 output: "onnx::Slice_3"
48 input: "onnx::Slice_0"
49 input: "onnx::Slice_2"
50 input: "onnx::Slice_3"
51 input: "onnx::Slice_1"
52 output: "onnx::Unsqueeze_4"
57 output: "onnx::Slice_5"
71 output: "onnx::Slice_6"
[all …]
DTestOperators.test_pad.expect6 output: "onnx::ConstantOfShape_27"
20 output: "onnx::Concat_28"
34 input: "onnx::ConstantOfShape_27"
35 output: "onnx::Concat_10"
49 input: "onnx::Concat_28"
50 input: "onnx::Concat_10"
51 output: "onnx::Reshape_11"
61 output: "onnx::Reshape_12"
75 input: "onnx::Reshape_11"
76 input: "onnx::Reshape_12"
[all …]
DTestOperators.test_embedding_bags.expect6 output: "onnx::Loop_33"
34 output: "onnx::Gather_6"
39 output: "onnx::Gather_7"
52 input: "onnx::Gather_6"
53 input: "onnx::Gather_7"
54 output: "onnx::Unsqueeze_8"
64 output: "onnx::Unsqueeze_9"
78 input: "onnx::Unsqueeze_8"
79 input: "onnx::Unsqueeze_9"
80 output: "onnx::Concat_10"
[all …]
DTestOperators.test_expand.expect6 output: "onnx::ConstantOfShape_11"
20 input: "onnx::ConstantOfShape_11"
21 output: "onnx::Mul_3"
35 output: "onnx::Mul_4"
48 input: "onnx::Mul_3"
49 input: "onnx::Mul_4"
50 output: "onnx::Equal_5"
55 output: "onnx::Equal_6"
69 input: "onnx::Equal_6"
70 input: "onnx::Equal_5"
[all …]
DTestOperators.test_gelu.expect6 output: "onnx::Div_1"
19 input: "onnx::Div_0"
20 input: "onnx::Div_1"
21 output: "onnx::Erf_2"
26 input: "onnx::Erf_2"
27 output: "onnx::Add_3"
32 output: "onnx::Add_4"
45 input: "onnx::Add_3"
46 input: "onnx::Add_4"
47 output: "onnx::Mul_5"
[all …]
/external/pytorch/torch/onnx/_internal/diagnostics/infra/sarif/
D__init__.py4 from torch.onnx._internal.diagnostics.infra.sarif._address import Address
5 from torch.onnx._internal.diagnostics.infra.sarif._artifact import Artifact
6 from torch.onnx._internal.diagnostics.infra.sarif._artifact_change import ArtifactChange
7 from torch.onnx._internal.diagnostics.infra.sarif._artifact_content import (
10 from torch.onnx._internal.diagnostics.infra.sarif._artifact_location import (
13 from torch.onnx._internal.diagnostics.infra.sarif._attachment import Attachment
14 from torch.onnx._internal.diagnostics.infra.sarif._code_flow import CodeFlow
15 from torch.onnx._internal.diagnostics.infra.sarif._configuration_override import (
18 from torch.onnx._internal.diagnostics.infra.sarif._conversion import Conversion
19 from torch.onnx._internal.diagnostics.infra.sarif._edge import Edge
[all …]
/external/armnn/docs/
D05_01_parsers.dox13 …e model you want to run e.g. If you've got a model in onnx format (<model_name>.onnx) use our onnx
25 @section S5_onnx_parser Arm NN Onnx Parser
27 `armnnOnnxParser` is a library for loading neural networks defined in ONNX protobuf files into the …
29 ## ONNX operators that the Arm NN SDK supports
31 This reference guide provides a list of ONNX operators the Arm NN SDK currently supports.
33 The Arm NN SDK ONNX parser currently only supports fp32 operators.
38 …- See the ONNX [Add documentation](https://github.com/onnx/onnx/blob/master/docs/Operators.md#Add)…
41 …- See the ONNX [AveragePool documentation](https://github.com/onnx/onnx/blob/master/docs/Operators…
44 …- See the ONNX [Concat documentation](https://github.com/onnx/onnx/blob/master/docs/Operators.md#C…
47 …- See the ONNX [Constant documentation](https://github.com/onnx/onnx/blob/master/docs/Operators.md…
[all …]
/external/pytorch/aten/src/ATen/core/
Dinterned_strings.h16 _(namespaces, onnx) \
39 _(prim, Expand) /* onnx */ \
68 _(prim, PackPadded) /* onnx */ \
69 _(prim, PadPacked) /* onnx */ \
233 _(onnx, Add) \
234 _(onnx, Concat) \
235 _(onnx, Constant) \
236 _(onnx, ConstantFill) \
237 _(onnx, Div) \
238 _(onnx, GRU) \
[all …]
/external/pytorch/docs/source/
Donnx.rst1 torch.onnx
7 `Open Neural Network eXchange (ONNX) <https://onnx.ai/>`_ is an open standard
8 format for representing machine learning models. The ``torch.onnx`` module captures the computation…
10 `ONNX graph <https://github.com/onnx/onnx/blob/main/docs/IR.md>`_.
13 `runtimes that support ONNX <https://onnx.ai/supported-tools.html#deployModel>`_, including
14 Microsoft's `ONNX Runtime <https://www.onnxruntime.ai>`_.
16 **There are two flavors of ONNX exporter API that you can use, as listed below.**
17 Both can be called through function :func:`torch.onnx.export`.
36 torch.onnx.export(
39 "my_model.onnx", # filename of the ONNX model
[all …]
Donnx_dynamo.rst1 TorchDynamo-based ONNX Exporter
4 .. automodule:: torch.onnx
11 The ONNX exporter for TorchDynamo is a rapidly evolving beta technology.
16 The ONNX exporter leverages TorchDynamo engine to hook into Python's frame evaluation API
18 The resulting FX Graph is then polished before it is finally translated into an ONNX graph.
25 - **ONNX Exporter**: :class:`Exporter` main class that orchestrates the export process.
26 …- **ONNX Export Options**: :class:`ExportOptions` has a set of options that control the export pro…
27 - **ONNX Registry**: :class:`OnnxRegistry` is the registry of ONNX operators and functions.
30 …- **ONNX Program**: :class:`ONNXProgram` is the output of the exporter that contains the exported
31 …- **ONNX Diagnostic Options**: :class:`DiagnosticOptions` has a set of options that control the di…
[all …]
/external/pytorch/torch/onnx/_internal/diagnostics/
Drules.yaml1 # PyTorch ONNX Exporter (POE) Rules are based on sarif ReportingDescriptor format.
3 ## Rules for PyTorch (TorchScript based) ONNX Exporter (POE)
6 name: node-missing-onnx-shape-inference
8 text: Node is missing ONNX shape inference.
10 text: "Node is missing ONNX shape inference.
11 This usually happens when the node is not valid under standard ONNX operator spec."
13 Node is missing ONNX shape inference.
14 This usually happens when the node is not valid under standard ONNX operator spec.
27 text: Missing symbolic function for custom PyTorch operator, cannot translate node to ONNX.
29 text: Missing symbolic function for custom PyTorch operator, cannot translate node to ONNX.
[all …]
D_rules.py5 See tools/onnx/gen_diagnostics.py for more information.
7 Diagnostic rules for PyTorch ONNX export.
14 from torch.onnx._internal.diagnostics import infra
25 """Node is missing ONNX shape inference."""
45 """Missing symbolic function for custom PyTorch operator, cannot translate node to ONNX."""
50 …Message template: 'ONNX export failed on an operator with unrecognized namespace {op_name}. If you…
59 …Message template: 'ONNX export failed on an operator with unrecognized namespace {op_name}. If you…
65 """Missing symbolic function for standard PyTorch operator, cannot translate node to ONNX."""
72 …Message template: "Exporting the operator '{op_name}' to ONNX opset version {opset_version} is not…
83 …Message template: "Exporting the operator '{op_name}' to ONNX opset version {opset_version} is not…
[all …]
/external/pytorch/torch/onnx/_internal/fx/
Dserialization.py10 from torch.onnx import _type_utils as jit_type_utils
14 import onnx
24 dtype_override: onnx.TypeProto | None = None, # type: ignore[name-defined]
25 ) -> onnx.TensorProto: # type: ignore[name-defined]
31 name: Name of the tensor (i.e., initializer name in ONNX graph).
33 (e.g., "/tmp/initializers/weight_0" when model is "/tmp/model_name.onnx").
37 Reference for ONNX's external data format:
39 …https://github.com/onnx/onnx/blob/5dac81ac0707bdf88f56c35c0a5e8855d3534673/onnx/external_data_help…
41 …https://github.com/onnx/onnx/blob/5dac81ac0707bdf88f56c35c0a5e8855d3534673/onnx/external_data_help…
42 How to set ONNX fields?
[all …]
/external/pytorch/torch/csrc/onnx/
Dinit.cpp1 #include <onnx/onnx_pb.h>
2 #include <torch/csrc/onnx/back_compat.h>
3 #include <torch/csrc/onnx/init.h>
4 #include <torch/csrc/onnx/onnx.h>
8 #include <torch/csrc/jit/passes/onnx.h>
9 #include <torch/csrc/jit/passes/onnx/cast_all_constant_to_floating.h>
10 #include <torch/csrc/jit/passes/onnx/constant_fold.h>
11 #include <torch/csrc/jit/passes/onnx/deduplicate_initializers.h>
12 #include <torch/csrc/jit/passes/onnx/eliminate_unused_items.h>
13 #include <torch/csrc/jit/passes/onnx/eval_peephole.h>
[all …]
/external/pytorch/torch/onnx/
D__init__.py119 DiagnosticOptions.__module__ = "torch.onnx"
120 ExportOptions.__module__ = "torch.onnx"
121 ExportTypes.__module__ = "torch.onnx"
122 JitScalarType.__module__ = "torch.onnx"
123 ONNXProgram.__module__ = "torch.onnx"
124 ONNXRuntimeOptions.__module__ = "torch.onnx"
125 OnnxExporterError.__module__ = "torch.onnx"
126 OnnxRegistry.__module__ = "torch.onnx"
127 _OrtBackend.__module__ = "torch.onnx"
128 _OrtBackendOptions.__module__ = "torch.onnx"
[all …]
/external/armnn/src/armnnOnnxParser/
DOnnxParser.hpp11 #include <onnx/onnx.pb.h>
23 using ModelPtr = std::unique_ptr<onnx::ModelProto>;
28 using OperationParsingFunction = void(OnnxParserImpl::*)(const onnx::NodeProto& NodeProto);
32 using GraphPtr = std::unique_ptr<onnx::GraphProto>;
91 armnn::INetworkPtr CreateNetworkFromModel(onnx::ModelProto& model);
96 void SetupInfo(const google::protobuf::RepeatedPtrField<onnx::ValueInfoProto >* list);
102 const onnx::TensorProto::DataType& type = onnx::TensorProto::FLOAT);
107 void GetInputAndParam(const onnx::NodeProto& node,
119 …void AddConvLayerWithDepthwiseConv(const onnx::NodeProto& node, const armnn::Convolution2dDescript…
120 …void AddFullyConnected(const onnx::NodeProto& matmulNode, const onnx::NodeProto* addNode = nullptr…
[all …]
/external/pytorch/torch/onnx/_internal/
D_exporter_legacy.py32 from torch.onnx import errors
33 from torch.onnx._internal import io_adapter
34 from torch.onnx._internal.diagnostics import infra
35 from torch.onnx._internal.fx import (
43 # We can only import onnx from this module in a type-checking context to ensure that
44 # 'import torch.onnx' continues to work without having 'onnx' installed. We fully
45 # 'import onnx' inside of dynamo_export (by way of _assert_dependencies).
49 import onnx
55 from torch.onnx._internal.fx import diagnostics
58 """The default ONNX opset version the exporter will use if one is not specified explicitly
[all …]
Donnx_proto_utils.py2 """Utilities for manipulating the onnx and onnx-script dependencies and ONNX proto."""
16 from torch.onnx import _constants, _exporter_states, errors
17 from torch.onnx._internal import jit_utils, registration
23 """Export an ONNX model as a self contained ONNX test case.
30 \u2502 \u251c\u2500\u2500 model.onnx
38 model_bytes: The ONNX model in bytes.
46 import onnx
49 "Export test case to ONNX format failed: Please install ONNX."
56 os.path.join(test_case_dir, "model.onnx"),
65 proto = onnx.load_model_from_string(model_bytes) # type: ignore[attr-defined]
[all …]
/external/pytorch/torch/csrc/jit/serialization/
Dexport.cpp18 #include <torch/csrc/jit/serialization/onnx.h>
19 #include <torch/csrc/onnx/back_compat.h>
20 #include <torch/csrc/onnx/onnx.h>
25 #include <onnx/checker.h>
27 #include <onnx/onnx_pb.h>
28 #include <onnx/proto_utils.h>
30 #include <onnx/shape_inference/implementation.h>
90 namespace onnx_torch = ::torch::onnx;
91 namespace onnx = ::ONNX_NAMESPACE;
96 // https://github.com/onnx/onnx/blob/master/onnx/helper.py.
[all …]

12345678910>>...22