Home
last modified time | relevance | path

Searched refs:allow_in_graph (Results 1 – 16 of 16) sorted by relevance

/external/pytorch/torch/_dynamo/
Ddecorators.py77 def allow_in_graph(fn): function
87 return [allow_in_graph(x) for x in fn]
327 allow_in_graph(einops.rearrange)
328 allow_in_graph(einops.reduce)
330 allow_in_graph(einops.repeat) # available since einops 0.2.0
332 allow_in_graph(einops.einsum) # available since einops 0.5.0
334 allow_in_graph(einops.pack) # available since einops 0.6.0
336 allow_in_graph(einops.unpack) # available since einops 0.6.0
D__init__.py8 allow_in_graph,
/external/pytorch/docs/source/
Dtorch.compiler_fine_grain_apis.rst27allow_in_graph``", "The annotated callable goes as is in the TorchDynamo graph. For example, a bla…
75 different backend compilers, you might have to call ``allow_in_graph`` for
78 ``torch.compiler.allow_in_graph``
81 ``torch.compiler.allow_in_graph`` is useful when the relevant function frame
85 function is decorated with ``allow_in_graph``, TorchDynamo treats it as a
89 ``allow_in_graph`` skips TorchDynamo completely on the decorated function
91 closures, and others. Use `allow_in_graph` with caution. PyTorch downstream
93 features, but ``allow_in_graph`` bypasses TorchDynamo. Using ``allow_in_graph``
Dtorch.compiler_api.rst18 allow_in_graph
Dtorch.compiler_faq.rst372 For other transforms, as a workaround, use ``torch._dynamo.allow_in_graph``
374 ``allow_in_graph`` is an escape hatch. If your code does not work with
377 ``allow_in_graph``.
379 By using ``allow_in_graph`` to annotate a function, you must make sure
396 return torch._dynamo.allow_in_graph(torch.vmap(torch.sum))(x)
401 A common pitfall is using ``allow_in_graph`` to annotate a function that
/external/pytorch/torch/nn/attention/
Dbias.py27 torch._dynamo.allow_in_graph(is_flash_attention_available)
28 torch._dynamo.allow_in_graph(can_use_flash_attention)
29 torch._dynamo.allow_in_graph(can_use_efficient_attention)
30 torch._dynamo.allow_in_graph(SDPAParams)
/external/pytorch/torch/compiler/
D__init__.py43 def allow_in_graph(fn): function
120 return torch._dynamo.allow_in_graph(fn)
/external/pytorch/test/dynamo/
Dtest_interop.py35 from torch._dynamo import allow_in_graph
38 f = allow_in_graph(f)
Dtest_sdpa.py15 SDPAParams = torch._dynamo.allow_in_graph(SDPAParams)
Dtest_aot_autograd_cache.py691 @torch._dynamo.allow_in_graph
710 @torch._dynamo.allow_in_graph
753 @torch._dynamo.allow_in_graph
Dtest_decorators.py34 torch._dynamo.allow_in_graph(torch.sub)
199 torch._dynamo.allow_in_graph(my_custom_function)
Dtest_autograd_function.py334 @torch._dynamo.allow_in_graph
928 torch._dynamo.allow_in_graph(FooTensor)
Dtest_misc.py39 from torch._dynamo import allow_in_graph
7258 @allow_in_graph
7379 @torch._dynamo.allow_in_graph
7413 @torch._dynamo.allow_in_graph
Dtest_repros.py4218 @torch._dynamo.allow_in_graph
5502 @torch._dynamo.allow_in_graph
/external/pytorch/torch/sparse/
Dsemi_structured.py131 torch._dynamo.allow_in_graph(cls)
/external/pytorch/test/functorch/
Dtest_eager_transforms.py43 from torch._dynamo import allow_in_graph
5132 f = allow_in_graph(f)