• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #pragma once
2 // ${generated_comment}
3 
4 // NB: The implementing C++ file is RegisterDispatchKey.cpp
5 
6 // The only #includes we need are for custom classes that have defaults in the C++ API
7 #include <c10/core/MemoryFormat.h>
8 #include <c10/core/Scalar.h>
9 #include <ATen/core/Reduction.h>
10 
11 #if defined(AT_PER_OPERATOR_HEADERS) && defined(TORCH_ASSERT_ONLY_METHOD_OPERATORS)
12 #error This change adds a dependency on all pytorch operators, meaning the     \
13   file will need to be re-compiled every time an operator is changed or added. \
14   Consider including a specific operator from                                  \
15   <ATen/ops/{my_operator}_${dispatch_namespace}_dispatch.h>.                   \
16   See NOTE [TORCH_ASSERT_ONLY_METHOD_OPERATORS].
17 #endif
18 
19 ${DispatchKeyFunctions_inl_includes}
20 
21 
22 ${dispatch_namespaced_declarations}
23