@@ -178,6 +178,47 @@ def test_xnnpack_qhardtanh(self):
178
178
example_inputs = (torch .randn (1 , 1 , 1 ),)
179
179
self .quantize_and_test_model (torch .nn .Hardtanh (), example_inputs )
180
180
181
+ def test_xnnpack_leaky_relu (self ):
182
+ example_inputs = (torch .randn (1 , 3 , 3 ),)
183
+
184
+ class LeakyReLUModule (torch .nn .Module ):
185
+ def __init__ (self ):
186
+ super ().__init__ ()
187
+ self .leaky_relu_out_of_place = torch .nn .LeakyReLU (negative_slope = 0.2 )
188
+
189
+ def forward (self , x ):
190
+ return self .leaky_relu_out_of_place (x )
191
+
192
+ self .quantize_and_test_model (LeakyReLUModule (), example_inputs )
193
+
194
+ def test_xnnpack_leaky_relu2 (self ):
195
+ example_inputs = (torch .randn (1 , 3 , 3 ),)
196
+
197
+ class LeakyReLUModule (torch .nn .Module ):
198
+ def __init__ (self ):
199
+ super ().__init__ ()
200
+ self .leaky_relu_in_place = torch .nn .LeakyReLU (
201
+ negative_slope = 0.08 , inplace = True
202
+ )
203
+
204
+ def forward (self , x ):
205
+ return self .leaky_relu_in_place (x )
206
+
207
+ self .quantize_and_test_model (LeakyReLUModule (), example_inputs )
208
+
209
+ def test_xnnpack_leaky_relu3 (self ):
210
+ example_inputs = (torch .randn (1 , 3 , 3 ),)
211
+
212
+ class LeakyReLUModule (torch .nn .Module ):
213
+ def __init__ (self ):
214
+ super ().__init__ ()
215
+ self .leaky_relu_functional_default = torch .nn .functional .leaky_relu
216
+
217
+ def forward (self , x ):
218
+ return self .leaky_relu_functional_default (x )
219
+
220
+ self .quantize_and_test_model (LeakyReLUModule (), example_inputs )
221
+
181
222
def test_xnnpack_qlinear (self ):
182
223
in_size = 1
183
224
input_size = 3
0 commit comments