From e276b24cd7fc7ce13f5086a5eb3a507b973919e4 Mon Sep 17 00:00:00 2001 From: Digant Desai Date: Tue, 27 Jun 2023 17:10:29 -0700 Subject: [PATCH] Add support for quantized LeakyReLU Summary: Also adds support for backend_config Reviewed By: mcr229 Differential Revision: D47043207 fbshipit-source-id: 509bd4c02eb7ff5d3d47762522debd827bee7240 --- .../xnnpack/partition/xnnpack_partitioner.py | 3 ++ .../xnnpack/test/test_xnnpack_quantized.py | 41 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/backends/xnnpack/partition/xnnpack_partitioner.py b/backends/xnnpack/partition/xnnpack_partitioner.py index ddcf44b3c90..fa58a960871 100644 --- a/backends/xnnpack/partition/xnnpack_partitioner.py +++ b/backends/xnnpack/partition/xnnpack_partitioner.py @@ -553,6 +553,9 @@ def __init__(self): torch.nn.ReLU, torch.nn.functional.relu, torch.nn.functional.relu_, + torch.nn.functional.leaky_relu, + torch.nn.functional.leaky_relu_, + torch.nn.LeakyReLU, ] # Modules which support dynamic quantization diff --git a/backends/xnnpack/test/test_xnnpack_quantized.py b/backends/xnnpack/test/test_xnnpack_quantized.py index 09a53bf22e4..9edc94438d8 100644 --- a/backends/xnnpack/test/test_xnnpack_quantized.py +++ b/backends/xnnpack/test/test_xnnpack_quantized.py @@ -178,6 +178,47 @@ def test_xnnpack_qhardtanh(self): example_inputs = (torch.randn(1, 1, 1),) self.quantize_and_test_model(torch.nn.Hardtanh(), example_inputs) + def test_xnnpack_leaky_relu(self): + example_inputs = (torch.randn(1, 3, 3),) + + class LeakyReLUModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.leaky_relu_out_of_place = torch.nn.LeakyReLU(negative_slope=0.2) + + def forward(self, x): + return self.leaky_relu_out_of_place(x) + + self.quantize_and_test_model(LeakyReLUModule(), example_inputs) + + def test_xnnpack_leaky_relu2(self): + example_inputs = (torch.randn(1, 3, 3),) + + class LeakyReLUModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.leaky_relu_in_place = torch.nn.LeakyReLU( + negative_slope=0.08, inplace=True + ) + + def forward(self, x): + return self.leaky_relu_in_place(x) + + self.quantize_and_test_model(LeakyReLUModule(), example_inputs) + + def test_xnnpack_leaky_relu3(self): + example_inputs = (torch.randn(1, 3, 3),) + + class LeakyReLUModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.leaky_relu_functional_default = torch.nn.functional.leaky_relu + + def forward(self, x): + return self.leaky_relu_functional_default(x) + + self.quantize_and_test_model(LeakyReLUModule(), example_inputs) + def test_xnnpack_qlinear(self): in_size = 1 input_size = 3