From 83d2c9a23250ff12fb894d070aa0b420427244b8 Mon Sep 17 00:00:00 2001 From: Jerry Zhang Date: Wed, 7 Oct 2020 10:24:57 -0700 Subject: [PATCH] [quant] Add quantized Sigmoid module (#45883) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/45883 Test Plan: python test/test_quantization.py TestStaticQuantizedModule.test_sigmoid Imported from OSS Reviewed By: z-a-f Differential Revision: D24129116 fbshipit-source-id: aa960549509c60374012f35b1f5be39e90418099 --- test/quantization/test_quantized_module.py | 3 +++ torch/nn/quantized/modules/__init__.py | 3 ++- torch/nn/quantized/modules/activation.py | 21 +++++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/test/quantization/test_quantized_module.py b/test/quantization/test_quantized_module.py index f5c3a8e3e8d5..a1fbc308dfde 100644 --- a/test/quantization/test_quantized_module.py +++ b/test/quantization/test_quantized_module.py @@ -716,6 +716,9 @@ def test_elu(self): def test_leaky_relu(self): self._test_activation_module_impl("LeakyReLU", nn.LeakyReLU, nnq.LeakyReLU, {"negative_slope": 0.2}) + def test_sigmoid(self): + self._test_activation_module_impl("Sigmoid", nn.Sigmoid, nnq.Sigmoid, {}) + @given( num_embeddings=st.integers(10, 50), embedding_dim=st.integers(5, 50).filter(lambda x: x % 4 == 0), diff --git a/torch/nn/quantized/modules/__init__.py b/torch/nn/quantized/modules/__init__.py index fe6a5f6c3765..72595eb3cea4 100644 --- a/torch/nn/quantized/modules/__init__.py +++ b/torch/nn/quantized/modules/__init__.py @@ -2,7 +2,7 @@ import torch from torch.nn.modules.pooling import MaxPool2d -from .activation import ReLU, ReLU6, Hardswish, ELU, LeakyReLU +from .activation import ReLU, ReLU6, Hardswish, ELU, LeakyReLU, Sigmoid from .batchnorm import BatchNorm2d, BatchNorm3d from .normalization import LayerNorm, GroupNorm, InstanceNorm1d, \ InstanceNorm2d, InstanceNorm3d @@ -100,6 +100,7 @@ def from_float(mod): 'Hardswish', 'ELU', 'LeakyReLU', + 'Sigmoid', 'LayerNorm', 'GroupNorm', 'InstanceNorm1d', diff --git a/torch/nn/quantized/modules/activation.py b/torch/nn/quantized/modules/activation.py index f2017c85f0fd..366e1e63a039 100644 --- a/torch/nn/quantized/modules/activation.py +++ b/torch/nn/quantized/modules/activation.py @@ -149,3 +149,24 @@ def _get_name(self): def from_float(cls, mod): scale, zero_point = mod.activation_post_process.calculate_qparams() return cls(float(scale), int(zero_point), mod.negative_slope, mod.inplace) + +class Sigmoid(torch.nn.Sigmoid): + r"""This is the quantized equivalent of :class:`~torch.nn.LeakyReLU`. + + Args: + scale: quantization scale of the output tensor + zero_point: quantization zero point of the output tensor + """ + + def __init__(self, output_scale: float, output_zero_point: int): + super().__init__() + self.output_scale = output_scale + self.output_zero_point = output_zero_point + + def forward(self, input): + return torch.ops.quantized.sigmoid(input, self.output_scale, self.output_zero_point) + + @classmethod + def from_float(cls, mod): + output_scale, output_zero_point = mod.activation_post_process.calculate_qparams() + return cls(float(output_scale), int(output_zero_point))