Lines Matching full:onnx
1 #include <onnx/onnx_pb.h>
2 #include <torch/csrc/onnx/back_compat.h>
3 #include <torch/csrc/onnx/init.h>
4 #include <torch/csrc/onnx/onnx.h>
8 #include <torch/csrc/jit/passes/onnx.h>
9 #include <torch/csrc/jit/passes/onnx/cast_all_constant_to_floating.h>
10 #include <torch/csrc/jit/passes/onnx/constant_fold.h>
11 #include <torch/csrc/jit/passes/onnx/deduplicate_initializers.h>
12 #include <torch/csrc/jit/passes/onnx/eliminate_unused_items.h>
13 #include <torch/csrc/jit/passes/onnx/eval_peephole.h>
14 #include <torch/csrc/jit/passes/onnx/fixup_onnx_controlflow.h>
15 #include <torch/csrc/jit/passes/onnx/function_extraction.h>
16 #include <torch/csrc/jit/passes/onnx/function_substitution.h>
17 #include <torch/csrc/jit/passes/onnx/list_model_parameters.h>
18 #include <torch/csrc/jit/passes/onnx/naming.h>
19 #include <torch/csrc/jit/passes/onnx/onnx_log.h>
20 #include <torch/csrc/jit/passes/onnx/pattern_conversion/autograd_function_process.h>
21 #include <torch/csrc/jit/passes/onnx/pattern_conversion/pattern_conversion.h>
22 #include <torch/csrc/jit/passes/onnx/pattern_conversion/pattern_encapsulation.h>
23 #include <torch/csrc/jit/passes/onnx/peephole.h>
24 #include <torch/csrc/jit/passes/onnx/prepare_division_for_onnx.h>
25 #include <torch/csrc/jit/passes/onnx/preprocess_for_onnx.h>
26 #include <torch/csrc/jit/passes/onnx/remove_inplace_ops_for_onnx.h>
27 #include <torch/csrc/jit/passes/onnx/scalar_type_analysis.h>
28 #include <torch/csrc/jit/passes/onnx/shape_type_inference.h>
29 #include <torch/csrc/jit/passes/onnx/unpack_quantized_weights.h>
32 namespace torch::onnx { namespace
39 // ONNX specific passes in initONNXBindings()
152 torch::jit::onnx::ONNXFunctionExtraction)) in initONNXBindings()
197 &torch::jit::onnx::ONNXClearScopeRecords) in initONNXBindings()
200 &torch::jit::onnx::ONNXTrackScopeAttributes) in initONNXBindings()
203 ::torch::jit::onnx::is_log_enabled, in initONNXBindings()
204 "Returns whether ONNX logging is enabled or disabled.") in initONNXBindings()
207 ::torch::jit::onnx::set_log_enabled, in initONNXBindings()
208 "Enables or disables ONNX logging.") in initONNXBindings()
223 ::torch::jit::onnx::set_log_output_stream(out); in initONNXBindings()
225 "Set specific file stream for ONNX logging.") in initONNXBindings()
229 if (::torch::jit::onnx::is_log_enabled()) { in initONNXBindings()
230 auto& out = ::torch::jit::onnx::_get_log_output_stream(); in initONNXBindings()
237 "Write `args` to the previously specified ONNX log stream.") in initONNXBindings()
241 ::torch::jit::onnx::AssignScopedNamesForNodeAndValue), in initONNXBindings()
246 ::torch::jit::onnx::ONNXScopeName::createFullScopeName), in initONNXBindings()
256 auto onnx = m.def_submodule("_onnx"); in initONNXBindings() local
257 py::enum_<::ONNX_NAMESPACE::TensorProto_DataType>(onnx, "TensorProtoDataType") in initONNXBindings()
275 .value("FLOAT8E4M3FN", ::torch::onnx::TensorProto_DataType_FLOAT8E4M3FN) in initONNXBindings()
277 "FLOAT8E4M3FNUZ", ::torch::onnx::TensorProto_DataType_FLOAT8E4M3FNUZ) in initONNXBindings()
278 .value("FLOAT8E5M2", ::torch::onnx::TensorProto_DataType_FLOAT8E5M2) in initONNXBindings()
280 "FLOAT8E5M2FNUZ", ::torch::onnx::TensorProto_DataType_FLOAT8E5M2FNUZ); in initONNXBindings()
282 py::enum_<OperatorExportTypes>(onnx, "OperatorExportTypes") in initONNXBindings()
283 .value("ONNX", OperatorExportTypes::ONNX) in initONNXBindings()
288 py::enum_<TrainingMode>(onnx, "TrainingMode") in initONNXBindings()
293 onnx.attr("PRODUCER_VERSION") = py::str(TORCH_VERSION); in initONNXBindings()
295 } // namespace torch::onnx