Skip to content

Commit 4b19f3d

Browse files
committed
Fix test formatting
1 parent 56be3c2 commit 4b19f3d

File tree

1 file changed

+10
-21
lines changed

1 file changed

+10
-21
lines changed

test/unit/test_default_inference_handler.py

Lines changed: 10 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@
3131

3232

3333
class DummyModel(nn.Module):
34-
def __init__(self):
34+
35+
def __init__(self, ):
3536
super(DummyModel, self).__init__()
3637

3738
def forward(self, x):
@@ -58,11 +59,9 @@ def eia_inference_handler():
5859

5960

6061
def test_default_model_fn(inference_handler):
61-
with mock.patch(
62-
"sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os"
63-
) as mock_os:
62+
with mock.patch("sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os") as mock_os:
6463
mock_os.getenv.return_value = "true"
65-
mock_os.path.join.return_value = "model_dir"
64+
mock_os.path.join = os.path.join
6665
mock_os.path.exists.return_value = True
6766
with mock.patch("torch.jit.load") as mock_torch:
6867
mock_torch.return_value = DummyModel()
@@ -71,11 +70,9 @@ def test_default_model_fn(inference_handler):
7170

7271

7372
def test_default_model_fn_unknown_name(inference_handler):
74-
with mock.patch(
75-
"sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os"
76-
) as mock_os:
73+
with mock.patch("sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os") as mock_os:
7774
mock_os.getenv.return_value = "false"
78-
mock_os.path.join.return_value = "model_dir"
75+
mock_os.path.join = os.path.join
7976
mock_os.path.isfile.return_value = True
8077
mock_os.listdir.return_value = ["abcd.pt", "efgh.txt", "ijkl.bin"]
8178
with mock.patch("torch.jit.load") as mock_torch_load:
@@ -99,9 +96,7 @@ def test_default_model_fn_no_model_file(inference_handler, listdir_return_value)
9996
mock_os.path.splitext = os.path.splitext
10097
with mock.patch("torch.jit.load") as mock_torch_load:
10198
mock_torch_load.return_value = DummyModel()
102-
with pytest.raises(
103-
ValueError, match=r"Exactly one .pth or .pt file is required for PyTorch models: .*"
104-
):
99+
with pytest.raises(ValueError, match=r"Exactly one .pth or .pt file is required for PyTorch models: .*"):
105100
inference_handler.default_model_fn("model_dir")
106101

107102

@@ -231,9 +226,7 @@ def test_default_output_fn_gpu(inference_handler):
231226

232227

233228
def test_eia_default_model_fn(eia_inference_handler):
234-
with mock.patch(
235-
"sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os"
236-
) as mock_os:
229+
with mock.patch("sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os") as mock_os:
237230
mock_os.getenv.return_value = "true"
238231
mock_os.path.join.return_value = "model_dir"
239232
mock_os.path.exists.return_value = True
@@ -244,9 +237,7 @@ def test_eia_default_model_fn(eia_inference_handler):
244237

245238

246239
def test_eia_default_model_fn_error(eia_inference_handler):
247-
with mock.patch(
248-
"sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os"
249-
) as mock_os:
240+
with mock.patch("sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os") as mock_os:
250241
mock_os.getenv.return_value = "true"
251242
mock_os.path.join.return_value = "model_dir"
252243
mock_os.path.exists.return_value = False
@@ -256,9 +247,7 @@ def test_eia_default_model_fn_error(eia_inference_handler):
256247

257248
def test_eia_default_predict_fn(eia_inference_handler, tensor):
258249
model = DummyModel()
259-
with mock.patch(
260-
"sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os"
261-
) as mock_os:
250+
with mock.patch("sagemaker_pytorch_serving_container.default_pytorch_inference_handler.os") as mock_os:
262251
mock_os.getenv.return_value = "true"
263252
with mock.patch("torch.jit.optimized_execution") as mock_torch:
264253
mock_torch.__enter__.return_value = "dummy"

0 commit comments

Comments
 (0)