1r"""Quantized Modules. 2 3Note:: 4 The `torch.nn.quantized` namespace is in the process of being deprecated. 5 Please, use `torch.ao.nn.quantized` instead. 6""" 7 8# The following imports are needed in case the user decides 9# to import the files directly, 10# s.a. `from torch.nn.quantized.modules.conv import ...`. 11# No need to add them to the `__all__`. 12from torch.ao.nn.quantized.modules import ( 13 activation, 14 batchnorm, 15 conv, 16 DeQuantize, 17 dropout, 18 embedding_ops, 19 functional_modules, 20 linear, 21 MaxPool2d, 22 normalization, 23 Quantize, 24 rnn, 25 utils, 26) 27from torch.ao.nn.quantized.modules.activation import ( 28 ELU, 29 Hardswish, 30 LeakyReLU, 31 MultiheadAttention, 32 PReLU, 33 ReLU6, 34 Sigmoid, 35 Softmax, 36) 37from torch.ao.nn.quantized.modules.batchnorm import BatchNorm2d, BatchNorm3d 38from torch.ao.nn.quantized.modules.conv import ( 39 Conv1d, 40 Conv2d, 41 Conv3d, 42 ConvTranspose1d, 43 ConvTranspose2d, 44 ConvTranspose3d, 45) 46from torch.ao.nn.quantized.modules.dropout import Dropout 47from torch.ao.nn.quantized.modules.embedding_ops import Embedding, EmbeddingBag 48from torch.ao.nn.quantized.modules.functional_modules import ( 49 FloatFunctional, 50 FXFloatFunctional, 51 QFunctional, 52) 53from torch.ao.nn.quantized.modules.linear import Linear 54from torch.ao.nn.quantized.modules.normalization import ( 55 GroupNorm, 56 InstanceNorm1d, 57 InstanceNorm2d, 58 InstanceNorm3d, 59 LayerNorm, 60) 61from torch.ao.nn.quantized.modules.rnn import LSTM 62 63 64__all__ = [ 65 "BatchNorm2d", 66 "BatchNorm3d", 67 "Conv1d", 68 "Conv2d", 69 "Conv3d", 70 "ConvTranspose1d", 71 "ConvTranspose2d", 72 "ConvTranspose3d", 73 "DeQuantize", 74 "ELU", 75 "Embedding", 76 "EmbeddingBag", 77 "GroupNorm", 78 "Hardswish", 79 "InstanceNorm1d", 80 "InstanceNorm2d", 81 "InstanceNorm3d", 82 "LayerNorm", 83 "LeakyReLU", 84 "Linear", 85 "LSTM", 86 "MultiheadAttention", 87 "Quantize", 88 "ReLU6", 89 "Sigmoid", 90 "Softmax", 91 "Dropout", 92 "PReLU", 93 # Wrapper modules 94 "FloatFunctional", 95 "FXFloatFunctional", 96 "QFunctional", 97] 98