Skip to content

Change default Namespace to Omegaconf and new method to save config-lock.yaml for reproducibility #292

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
1 change: 1 addition & 0 deletions scripts/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ pytest
transformers
datasets
tensorboard
omegaconf
2 changes: 2 additions & 0 deletions src/templates/template-common/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ num_workers: 4
max_epochs: 20
use_amp: false
debug: false
train_epoch_length: null
eval_epoch_length: null

#::: if (it.dist === 'spawn') { :::#
# distributed spawn
Expand Down
1 change: 1 addition & 0 deletions src/templates/template-common/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ torch>=1.10.2
torchvision>=0.11.3
pytorch-ignite>=0.4.8
pyyaml
omegaconf

#::: if (['neptune', 'polyaxon'].includes(it.logger)) { :::#

Expand Down
10 changes: 10 additions & 0 deletions src/templates/template-common/test_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
def test_save_config():
with open("./config.yaml", "r") as f:
config = OmegaConf.load(f)

save_config(config, "./")

with open("./config-lock.yaml", "r") as f:
test_config = OmegaConf.load(f)

assert config == test_config
20 changes: 10 additions & 10 deletions src/templates/template-common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import ignite.distributed as idist
import torch
import yaml
from ignite.contrib.engines import common
from ignite.engine import Engine

Expand Down Expand Up @@ -35,6 +34,7 @@

#::: } :::#
from ignite.utils import setup_logger
from omegaconf import DictConfig, OmegaConf


def get_default_parser():
Expand All @@ -57,17 +57,11 @@ def setup_config(parser=None):
args = parser.parse_args()
config_path = args.config

with open(config_path, "r") as f:
config = yaml.safe_load(f.read())
config = OmegaConf.load(config_path)

optional_attributes = ["train_epoch_length", "eval_epoch_length"]
for attr in optional_attributes:
config[attr] = config.get(attr, None)
config.backend = args.backend

for k, v in config.items():
setattr(args, k, v)

return args
return DictConfig(config)


def log_metrics(engine: Engine, tag: str) -> None:
Expand Down Expand Up @@ -138,6 +132,12 @@ def setup_output_dir(config: Any, rank: int) -> Path:
return Path(idist.broadcast(config.output_dir, src=0))


def save_config(config, output_dir):
"""Save configuration to config-lock.yaml for result reproducibility."""
with open(f"{output_dir}/config-lock.yaml", "w") as f:
OmegaConf.save(config, f)


def setup_logging(config: Any) -> Logger:
"""Setup logger with `ignite.utils.setup_logger()`.

Expand Down
5 changes: 2 additions & 3 deletions src/templates/template-text-classification/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
from pprint import pformat
from shutil import copy
from typing import Any

import ignite.distributed as idist
Expand All @@ -25,7 +24,7 @@ def run(local_rank: int, config: Any):
# create output folder and copy config file to output dir
config.output_dir = setup_output_dir(config, rank)
if rank == 0:
copy(config.config, f"{config.output_dir}/config-lock.yaml")
save_config(config, config.output_dir)

# donwload datasets and create dataloaders
dataloader_train, dataloader_eval = setup_data(config)
Expand Down Expand Up @@ -69,7 +68,7 @@ def run(local_rank: int, config: Any):
# setup engines logger with python logging
# print training configurations
logger = setup_logging(config)
logger.info("Configuration: \n%s", pformat(vars(config)))
logger.info("Configuration: \n%s", pformat(config))
trainer.logger = evaluator.logger = logger

trainer.add_event_handler(Events.ITERATION_COMPLETED, lr_scheduler)
Expand Down
5 changes: 5 additions & 0 deletions src/templates/template-text-classification/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
import ignite.distributed as idist
import pytest
from data import setup_data
from omegaconf import OmegaConf
from torch import nn, optim
from torch.functional import Tensor
from torch.utils.data import DataLoader
from utils import save_config


def set_up():
Expand Down Expand Up @@ -45,3 +47,6 @@ def test_setup_data():
assert isinstance(eval_batch["attention_mask"], Tensor)
assert isinstance(eval_batch["token_type_ids"], Tensor)
assert isinstance(eval_batch["label"], Tensor)


#::= from_template_common ::#
5 changes: 2 additions & 3 deletions src/templates/template-vision-classification/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from pprint import pformat
from shutil import copy
from typing import Any

import ignite.distributed as idist
Expand All @@ -22,7 +21,7 @@ def run(local_rank: int, config: Any):
# create output folder and copy config file to output dir
config.output_dir = setup_output_dir(config, rank)
if rank == 0:
copy(config.config, f"{config.output_dir}/config-lock.yaml")
save_config(config, config.output_dir)

# donwload datasets and create dataloaders
dataloader_train, dataloader_eval = setup_data(config)
Expand Down Expand Up @@ -59,7 +58,7 @@ def run(local_rank: int, config: Any):
# setup engines logger with python logging
# print training configurations
logger = setup_logging(config)
logger.info("Configuration: \n%s", pformat(vars(config)))
logger.info("Configuration: \n%s", pformat(config))
trainer.logger = evaluator.logger = logger

trainer.add_event_handler(Events.ITERATION_COMPLETED, lr_scheduler)
Expand Down
5 changes: 5 additions & 0 deletions src/templates/template-vision-classification/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@
import pytest
import torch
from data import setup_data
from omegaconf import OmegaConf
from torch import nn, optim, Tensor
from torch.utils.data.dataloader import DataLoader
from trainers import setup_evaluator
from utils import save_config


def set_up():
Expand Down Expand Up @@ -48,3 +50,6 @@ def test_setup_evaluator():
evaluator = setup_evaluator(config, model, device)
evaluator.run([batch, batch])
assert isinstance(evaluator.state.output, tuple)


#::= from_template_common ::#
5 changes: 2 additions & 3 deletions src/templates/template-vision-dcgan/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from pprint import pformat
from shutil import copy
from typing import Any

import ignite.distributed as idist
Expand All @@ -25,7 +24,7 @@ def run(local_rank: int, config: Any):
# create output folder and copy config file to output dir
config.output_dir = setup_output_dir(config, rank)
if rank == 0:
copy(config.config, f"{config.output_dir}/config-lock.yaml")
save_config(config, config.output_dir)

# donwload datasets and create dataloaders
dataloader_train, dataloader_eval, num_channels = setup_data(config)
Expand Down Expand Up @@ -74,7 +73,7 @@ def run(local_rank: int, config: Any):
# setup engines logger with python logging
# print training configurations
logger = setup_logging(config)
logger.info("Configuration: \n%s", pformat(vars(config)))
logger.info("Configuration: \n%s", pformat(config))
trainer.logger = evaluator.logger = logger

#::: if (it.save_training || it.save_evaluation) { :::#
Expand Down
5 changes: 5 additions & 0 deletions src/templates/template-vision-dcgan/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
import torch
from data import setup_data
from models import Discriminator, Generator
from omegaconf import OmegaConf
from torch import nn, optim, Tensor
from torch.utils.data.dataloader import DataLoader
from trainers import setup_trainer
from utils import save_config


def set_up():
Expand Down Expand Up @@ -62,3 +64,6 @@ def test_setup_trainer():
trainer = setup_trainer(config, model, model, optimizer, optimizer, loss_fn, device, None)
trainer.run([batch, batch])
assert isinstance(trainer.state.output, dict)


#::= from_template_common ::#
5 changes: 2 additions & 3 deletions src/templates/template-vision-segmentation/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from functools import partial
from pprint import pformat
from shutil import copy
from typing import Any, cast

import ignite.distributed as idist
Expand Down Expand Up @@ -30,7 +29,7 @@ def run(local_rank: int, config: Any):
# create output folder and copy config file to output dir
config.output_dir = setup_output_dir(config, rank)
if rank == 0:
copy(config.config, f"{config.output_dir}/config-lock.yaml")
save_config(config, config.output_dir)

# donwload datasets and create dataloaders
dataloader_train, dataloader_eval = setup_data(config)
Expand Down Expand Up @@ -72,7 +71,7 @@ def run(local_rank: int, config: Any):
# setup engines logger with python logging
# print training configurations
logger = setup_logging(config)
logger.info("Configuration: \n%s", pformat(vars(config)))
logger.info("Configuration: \n%s", pformat(config))
trainer.logger = evaluator.logger = logger

if isinstance(lr_scheduler, PyTorchLRScheduler):
Expand Down
5 changes: 5 additions & 0 deletions src/templates/template-vision-segmentation/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@

import pytest
from data import setup_data
from omegaconf import OmegaConf
from torch import Tensor
from torch.utils.data.dataloader import DataLoader
from utils import save_config


@pytest.mark.skipif(os.getenv("RUN_SLOW_TESTS", 0) == 0, reason="Skip slow tests")
Expand All @@ -26,3 +28,6 @@ def test_setup_data():
assert isinstance(eval_batch["mask"], Tensor)
assert eval_batch["image"].ndim == 4
assert eval_batch["mask"].ndim == 3


#::= from_template_common ::#