From 56f5611493b12397cf84facd32e5e036e1fada2a Mon Sep 17 00:00:00 2001 From: ydcjeff Date: Thu, 8 Apr 2021 20:18:59 +0630 Subject: [PATCH 1/2] fix(templates): merge handlers.py into utils.py --- templates/gan/handlers.py | 1 - templates/gan/test_all.py | 3 +-- templates/gan/utils.py | 2 ++ templates/image_classification/handlers.py | 1 - templates/image_classification/test_all.py | 3 +-- templates/image_classification/utils.py | 2 ++ templates/single/handlers.py | 1 - templates/single/test_all.py | 3 +-- templates/single/utils.py | 1 + 9 files changed, 8 insertions(+), 9 deletions(-) delete mode 100644 templates/gan/handlers.py delete mode 100644 templates/image_classification/handlers.py delete mode 100644 templates/single/handlers.py diff --git a/templates/gan/handlers.py b/templates/gan/handlers.py deleted file mode 100644 index b4cd4648..00000000 --- a/templates/gan/handlers.py +++ /dev/null @@ -1 +0,0 @@ -{% extends "_handlers.py" %} diff --git a/templates/gan/test_all.py b/templates/gan/test_all.py index 4ca6105f..aa3c3b41 100644 --- a/templates/gan/test_all.py +++ b/templates/gan/test_all.py @@ -8,7 +8,6 @@ import torch from config import get_default_parser from datasets import get_datasets -from handlers import get_handlers, get_logger from ignite.contrib.handlers import ( ClearMLLogger, MLflowLogger, @@ -28,7 +27,7 @@ from torch import nn, optim from torch.utils.data import Dataset from trainers import create_trainers, train_function -from utils import hash_checkpoint, log_metrics, resume_from, setup_logging +from utils import hash_checkpoint, log_metrics, resume_from, setup_logging, get_handlers, get_logger class TestDataset(unittest.TestCase): diff --git a/templates/gan/utils.py b/templates/gan/utils.py index 1851749a..4b232ecb 100644 --- a/templates/gan/utils.py +++ b/templates/gan/utils.py @@ -21,6 +21,8 @@ from torch.optim.optimizer import Optimizer from models import Generator, Discriminator +{% includes "_handlers.py" %} + # we can use `idist.auto_model` to handle distributed configurations # for your model : https://pytorch.org/ignite/distributed.html#ignite.distributed.auto.auto_model diff --git a/templates/image_classification/handlers.py b/templates/image_classification/handlers.py deleted file mode 100644 index b4cd4648..00000000 --- a/templates/image_classification/handlers.py +++ /dev/null @@ -1 +0,0 @@ -{% extends "_handlers.py" %} diff --git a/templates/image_classification/test_all.py b/templates/image_classification/test_all.py index 5c3e628f..68028e81 100644 --- a/templates/image_classification/test_all.py +++ b/templates/image_classification/test_all.py @@ -9,7 +9,6 @@ import torch from config import get_default_parser from datasets import get_datasets -from handlers import get_handlers, get_logger from ignite.contrib.handlers import ( ClearMLLogger, MLflowLogger, @@ -36,7 +35,7 @@ train_events_to_attr, train_function, ) -from utils import hash_checkpoint, initialize, log_metrics, resume_from, setup_logging +from utils import hash_checkpoint, initialize, log_metrics, resume_from, setup_logging, get_handlers, get_logger class TestDataset(unittest.TestCase): diff --git a/templates/image_classification/utils.py b/templates/image_classification/utils.py index 520c5c08..eba3ff89 100644 --- a/templates/image_classification/utils.py +++ b/templates/image_classification/utils.py @@ -21,6 +21,8 @@ from torch.optim import Optimizer, SGD from models import get_model +{% includes "_handlers.py" %} + def initialize(config: Optional[Any]) -> Tuple[Module, Optimizer, Module, Union[_LRScheduler, ParamScheduler]]: diff --git a/templates/single/handlers.py b/templates/single/handlers.py deleted file mode 100644 index b4cd4648..00000000 --- a/templates/single/handlers.py +++ /dev/null @@ -1 +0,0 @@ -{% extends "_handlers.py" %} diff --git a/templates/single/test_all.py b/templates/single/test_all.py index c9fd82c7..1b81e762 100644 --- a/templates/single/test_all.py +++ b/templates/single/test_all.py @@ -26,7 +26,6 @@ from torch import nn, optim from config import get_default_parser -from handlers import get_handlers, get_logger from trainers import ( TrainEvents, create_trainers, @@ -34,7 +33,7 @@ train_events_to_attr, train_function, ) -from utils import hash_checkpoint, log_metrics, resume_from, setup_logging +from utils import hash_checkpoint, log_metrics, resume_from, setup_logging, get_handlers, get_logger class TestHandlers(unittest.TestCase): diff --git a/templates/single/utils.py b/templates/single/utils.py index 8c5281dc..7dab530f 100644 --- a/templates/single/utils.py +++ b/templates/single/utils.py @@ -18,6 +18,7 @@ from torch.nn import Module from torch.optim.lr_scheduler import _LRScheduler from torch.optim.optimizer import Optimizer +{% includes "_handlers.py" %} # we can use `idist.auto_model` to handle distributed configurations From aade4ac9e22d30c62b8386435011d5a7f5f8dffb Mon Sep 17 00:00:00 2001 From: ydcjeff Date: Thu, 8 Apr 2021 20:22:29 +0630 Subject: [PATCH 2/2] fix: includes -> include --- templates/gan/utils.py | 2 +- templates/image_classification/utils.py | 2 +- templates/single/utils.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/templates/gan/utils.py b/templates/gan/utils.py index 4b232ecb..9d958fc1 100644 --- a/templates/gan/utils.py +++ b/templates/gan/utils.py @@ -21,7 +21,7 @@ from torch.optim.optimizer import Optimizer from models import Generator, Discriminator -{% includes "_handlers.py" %} +{% include "_handlers.py" %} # we can use `idist.auto_model` to handle distributed configurations diff --git a/templates/image_classification/utils.py b/templates/image_classification/utils.py index eba3ff89..736e6ebb 100644 --- a/templates/image_classification/utils.py +++ b/templates/image_classification/utils.py @@ -21,7 +21,7 @@ from torch.optim import Optimizer, SGD from models import get_model -{% includes "_handlers.py" %} +{% include "_handlers.py" %} diff --git a/templates/single/utils.py b/templates/single/utils.py index 7dab530f..22d36460 100644 --- a/templates/single/utils.py +++ b/templates/single/utils.py @@ -18,7 +18,7 @@ from torch.nn import Module from torch.optim.lr_scheduler import _LRScheduler from torch.optim.optimizer import Optimizer -{% includes "_handlers.py" %} +{% include "_handlers.py" %} # we can use `idist.auto_model` to handle distributed configurations