Home
last modified time | relevance | path

Searched full:aten (Results 1 – 25 of 4368) sorted by relevance

12345678910>>...175

/external/pytorch/test/expect/
DHasDecompTest.test_has_decomposition.expect1 aten::__lshift__.Scalar
2 aten::__lshift__.Scalar_out
3 aten::__lshift__.Tensor
4 aten::__lshift__.Tensor_out
5 aten::__rshift__.Scalar
6 aten::__rshift__.Scalar_out
7 aten::__rshift__.Tensor
8 aten::__rshift__.Tensor_out
9 aten::_adaptive_avg_pool2d_backward
10 aten::_adaptive_avg_pool2d_backward.out
[all …]
DHasDecompTest.test_aten_core_operators.expect1 aten::__ilshift__.Scalar
2 aten::__ilshift__.Tensor
3 aten::__irshift__.Scalar
4 aten::__irshift__.Tensor
5 aten::_adaptive_avg_pool2d
6 aten::_adaptive_avg_pool2d.out
7 aten::_addmm_activation
8 aten::_addmm_activation.out
9 aten::_batch_norm_no_update
10 aten::_batch_norm_with_update
[all …]
/external/pytorch/test/mobile/model_test/
Dcoverage.yaml7 - aten::Bool.Tensor
8 - aten::Bool.int
9 - aten::Float.Scalar
10 - aten::Float.Tensor
11 - aten::Float.str
12 - aten::FloatImplicit
13 - aten::Int.Scalar
14 - aten::Int.Tensor
15 - aten::Int.float
16 - aten::Int.str
[all …]
Dmodel_ops.yaml2 aten::Bool.Tensor: 32
3 aten::Bool.int: 34
4 aten::Float.Scalar: 30
5 aten::Float.Tensor: 20
6 aten::Float.str: 32
7 aten::FloatImplicit: 5
8 aten::Int.Scalar: 60
9 aten::Int.Tensor: 66
10 aten::Int.float: 11
11 aten::Int.str: 20
[all …]
/external/pytorch/
Dpt_ops.bzl171 "aten::str",
172 "aten::list",
173 "aten::__range_length",
174 "aten::__derive_index",
177 "aten::IntImplicit",
178 "aten::FloatImplicit",
179 "aten::ScalarImplicit",
180 "aten::Bool.Tensor",
181 "aten::Bool.int",
182 "aten::Bool.float",
[all …]
Dbuild_variables.bzl37 …hub.com/pytorch/pytorch/blob/f99a693cd9ff7a9b5fdc71357dac66b8192786d3/aten/src/ATen/core/CMakeList…
969 # List of non-globed source used to build ATen core internally
971 "aten/src/ATen/detail/CUDAHooksInterface.cpp",
972 "aten/src/ATen/detail/HIPHooksInterface.cpp",
973 "aten/src/ATen/detail/MPSHooksInterface.cpp",
974 "aten/src/ATen/detail/MAIAHooksInterface.cpp",
975 "aten/src/ATen/detail/PrivateUse1HooksInterface.cpp",
976 "aten/src/ATen/detail/XPUHooksInterface.cpp",
977 "aten/src/ATen/detail/MTIAHooksInterface.cpp",
978 "aten/src/ATen/detail/IPUHooksInterface.cpp",
[all …]
DBUILD.bazel9 load("@pytorch//:aten.bzl", "generate_aten", "intern_build_aten_ops")
37 …ten_generation_srcs = ["aten/src/ATen/native/native_functions.yaml"] + ["aten/src/ATen/native/tags…
40 "aten/src/ATen/RegisterBackendSelect.cpp",
41 "aten/src/ATen/RegisterCPU.cpp",
42 "aten/src/ATen/RegisterFunctionalization_0.cpp",
43 "aten/src/ATen/RegisterFunctionalization_1.cpp",
44 "aten/src/ATen/RegisterFunctionalization_2.cpp",
45 "aten/src/ATen/RegisterFunctionalization_3.cpp",
46 # "aten/src/ATen/RegisterFunctionalizationEverything.cpp",
47 "aten/src/ATen/RegisterMkldnnCPU.cpp",
[all …]
/external/pytorch/torch/distributed/tensor/_ops/
D_pointwise_ops.py32 aten = torch.ops.aten variable
51aten.div.Scalar, # this op is linear on the first argument, and the second argument is scalar, so…
52aten.div_.Scalar, # this op is linear on the first argument, and the second argument is scalar, s…
53 aten.to.dtype,
54 aten.add.Tensor,
55 aten.add_.Tensor,
61 aten.__ilshift__.Scalar,
62 aten.__ilshift__.Tensor,
63 aten.__irshift__.Scalar,
64 aten.__irshift__.Tensor,
[all …]
/external/pytorch/test/functorch/
Dtest_vmap_registrations.py18 "aten::is_nonzero",
19 "aten::item",
20 "aten::linalg_slogdet",
21 "aten::masked_select_backward",
22 "aten::one_hot",
23 "aten::silu_backward",
24 "aten::where",
28 "aten::alias_copy",
29 "aten::as_strided_copy",
30 "aten::diagonal_copy",
[all …]
/external/pytorch/test/export/
Dtesting.py8 aten = torch.ops.aten variable
12 aten.arctan2.default,
13 aten.divide.Tensor,
14 aten.divide.Scalar,
15 aten.divide.Tensor_mode,
16 aten.divide.Scalar_mode,
17 aten.multiply.Tensor,
18 aten.multiply.Scalar,
19 aten.subtract.Tensor,
20 aten.subtract.Scalar,
[all …]
/external/pytorch/torchgen/aoti/
Dfallback_ops.py11 "aten._adaptive_avg_pool2d_backward.default",
12 "aten._adaptive_avg_pool2d.default",
13 "aten._adaptive_avg_pool3d.default",
14 "aten._adaptive_avg_pool3d_backward.default",
15 "aten.adaptive_max_pool2d_backward.default",
16 "aten.adaptive_max_pool2d.default",
17 "aten.adaptive_max_pool3d.default",
18 "aten.adaptive_max_pool3d_backward.default",
19 "aten.addbmm.default",
20 "aten._addmm_activation.default",
[all …]
/external/pytorch/torch/csrc/jit/passes/
Dnormalize_ops.cpp25 "aten::rsub.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor")) { in normalizeRSub()
27 Node* newSub = iter->replaceWithNewSymbol(aten::sub); in normalizeRSub()
42 if (iter->kind() == aten::__is__) { in normalizeIsBool()
43 iter->replaceWithNewSymbol(aten::eq); in normalizeIsBool()
47 if (iter->kind() == aten::__isnot__) { in normalizeIsBool()
48 iter->replaceWithNewSymbol(aten::ne); in normalizeIsBool()
84 {aten::absolute, aten::abs}, in getOperatorAliasMap()
85 {aten::absolute_, aten::abs_}, in getOperatorAliasMap()
86 {aten::clip, aten::clamp}, in getOperatorAliasMap()
87 {aten::clip_, aten::clamp_}, in getOperatorAliasMap()
[all …]
/external/pytorch/torch/_decomp/
D__init__.py63 # to filter those out, e.g aten.add.float_int
157 @register_decomposition(torch.ops.aten.clamp_min)
253 # See NOTE [Core ATen Ops]
257 # Resulting opset of decomposition is core aten ops
259 aten = torch.ops.aten
262 aten.addcdiv,
263 aten.addcdiv_,
264 aten.addcmul,
265 aten.addcmul_,
266 aten.addr,
[all …]
/external/pytorch/torch/_inductor/fx_passes/serialized_patterns/
D_sfdp_pattern_16.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
36 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
37 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
38 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
39 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
40 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
41 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
[all …]
D_sfdp_pattern_18.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
36 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
37 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
38 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
39 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored(), _users=2)
40 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
41 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
[all …]
D_sfdp_pattern_14.py11 aten = torch.ops.aten variable
33 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
34 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
35 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
36 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
37 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
38 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
39 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
40 clone_default_1 = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous…
41 view_default_1 = CallFunction(aten.view.default, clone_default_1, Ignored(), _users=2)
[all …]
D_sfdp_pattern_17.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 eq_Scalar = CallFunction(aten.eq.Scalar, KeywordArg('attn_mask'), Ignored())
36 expand_default = CallFunction(aten.expand.default, eq_Scalar, Ignored(), _users=2)
37 full_default = CallFunction(aten.full.default, [], Ignored(), dtype=Ignored(), device=Ignored(), pi…
38 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
39 expand_default_1 = CallFunction(aten.expand.default, permute_default, Ignored())
40 clone_default = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous_f…
41 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
[all …]
D_sfdp_pattern_12.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
36 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
37 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
38 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
39 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
40 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
41 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
[all …]
D_sfdp_pattern_9.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
36 div_Tensor = CallFunction(aten.div.Tensor, permute_default, Ignored())
37 expand_default = CallFunction(aten.expand.default, div_Tensor, Ignored())
38 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
39 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
40 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
41 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
[all …]
D_sfdp_pattern_7.py11 aten = torch.ops.aten variable
33 rand_default = CallFunction(aten.rand.default, Ignored(), dtype=Ignored(), device=Ignored(), pin_me…
34 gt_Scalar = CallFunction(aten.gt.Scalar, rand_default, KeywordArg('dropout_p'), _users=2)
35 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
36 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
37 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
38 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
39 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
40 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
41 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
[all …]
D_sfdp_pattern_15.py11 aten = torch.ops.aten variable
33 eq_Scalar = CallFunction(aten.eq.Scalar, KeywordArg('attn_mask'), Ignored())
34 expand_default = CallFunction(aten.expand.default, eq_Scalar, Ignored(), _users=2)
35 full_default = CallFunction(aten.full.default, [], Ignored(), dtype=Ignored(), device=Ignored(), pi…
36 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
37 expand_default_1 = CallFunction(aten.expand.default, permute_default, Ignored())
38 clone_default = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous_f…
39 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
40 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
41 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
[all …]
D_sfdp_pattern_11.py11 aten = torch.ops.aten variable
33 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
34 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
35 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
36 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
37 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
38 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
39 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
40 clone_default_1 = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous…
41 view_default_1 = CallFunction(aten.view.default, clone_default_1, Ignored(), _users=2)
[all …]
D_sfdp_pattern_10.py11 aten = torch.ops.aten variable
33 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
34 div_Tensor = CallFunction(aten.div.Tensor, permute_default, Ignored())
35 expand_default = CallFunction(aten.expand.default, div_Tensor, Ignored())
36 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
37 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
38 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
39 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
40 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
41 clone_default_1 = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous…
[all …]
D_sfdp_pattern_8.py11 aten = torch.ops.aten variable
33 permute_default = CallFunction(aten.permute.default, KeywordArg('query'), Ignored())
34 expand_default = CallFunction(aten.expand.default, permute_default, Ignored())
35 clone_default = CallFunction(aten.clone.default, expand_default, memory_format=torch.contiguous_for…
36 view_default = CallFunction(aten.view.default, clone_default, Ignored(), _users=2)
37 permute_default_1 = CallFunction(aten.permute.default, KeywordArg('key'), Ignored())
38 permute_default_2 = CallFunction(aten.permute.default, permute_default_1, Ignored())
39 expand_default_1 = CallFunction(aten.expand.default, permute_default_2, Ignored())
40 clone_default_1 = CallFunction(aten.clone.default, expand_default_1, memory_format=torch.contiguous…
41 view_default_1 = CallFunction(aten.view.default, clone_default_1, Ignored(), _users=2)
[all …]
/external/executorch/kernels/test/
Dtargets.bzl7 For ATen kernel testing, let's use portable functions.yaml for tested ops.
170 codegen_function_header_wrapper("executorch/kernels/aten", "aten")
176 _common_op_test("op__to_dim_order_copy_test", ["aten", "portable"])
177 _common_op_test("op_abs_test", ["aten", "portable"])
178 _common_op_test("op_acos_test", ["aten", "portable"])
179 _common_op_test("op_acosh_test", ["aten", "portable"])
180 _common_op_test("op_add_test", ["aten", "portable", "optimized"])
181 _common_op_test("op_addmm_test", ["aten", "portable"])
182 _common_op_test("op_alias_copy_test", ["aten", "portable"])
183 _common_op_test("op_amax_test", ["aten", "portable"])
[all …]

12345678910>>...175