Skip to content

[enhance] Increase the coverage #336

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 89 additions & 0 deletions test/test_pipeline/test_base_component.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import CategoricalHyperparameter, UniformIntegerHyperparameter

import pytest

from autoPyTorch.pipeline.components.base_component import ThirdPartyComponents, autoPyTorchComponent


class DummyComponentRequiredFailuire(autoPyTorchComponent):
_required_properties = {'required'}

def __init__(self, random_state=None):
self.fitted = False
self._cs_updates = {}

def fit(self, X, y):
self.fitted = True
return self

def get_properties(dataset_properties=None):
return {"name": 'DummyComponentRequiredFailuire',
"shortname": "Dummy"}


class DummyComponentExtraPropFailuire(autoPyTorchComponent):

def __init__(self, random_state=None):
self.fitted = False
self._cs_updates = {}

def fit(self, X, y):
self.fitted = True
return self

def get_properties(dataset_properties=None):
return {"name": 'DummyComponentExtraPropFailuire',
"shortname": 'Dummy',
"must_not_be_there": True}


class DummyComponent(autoPyTorchComponent):
def __init__(self, a=0, b='orange', random_state=None):
self.a = a
self.b = b
self.fitted = False
self.random_state = random_state
self._cs_updates = {}

def get_hyperparameter_search_space(self, dataset_properties=None):
cs = ConfigurationSpace()
a = UniformIntegerHyperparameter('a', lower=10, upper=100, log=False)
b = CategoricalHyperparameter('b', choices=['red', 'green', 'blue'])
cs.add_hyperparameters([a, b])
return cs

def fit(self, X, y):
self.fitted = True
return self

def get_properties(dataset_properties=None):
return {"name": 'DummyComponent',
"shortname": 'Dummy'}


def test_third_party_component_failure():
_addons = ThirdPartyComponents(autoPyTorchComponent)

with pytest.raises(ValueError, match=r"Property required not specified for .*"):
_addons.add_component(DummyComponentRequiredFailuire)

with pytest.raises(ValueError, match=r"Property must_not_be_there must not be specified for algorithm .*"):
_addons.add_component(DummyComponentExtraPropFailuire)

with pytest.raises(TypeError, match=r"add_component works only with a subclass of .*"):
_addons.add_component(1)


def test_set_hyperparameters_not_found_failure():
dummy_component = DummyComponent()
dummy_config_space = dummy_component.get_hyperparameter_search_space()
success_configuration = dummy_config_space.sample_configuration()
dummy_config_space.add_hyperparameter(CategoricalHyperparameter('c', choices=[1, 2]))
failure_configuration = dummy_config_space.sample_configuration()
with pytest.raises(ValueError, match=r"Cannot set hyperparameter c for autoPyTorch.pipeline "
r"DummyComponent because the hyperparameter does not exist."):
dummy_component.set_hyperparameters(failure_configuration)
with pytest.raises(ValueError, match=r"Cannot set init param r for autoPyTorch.pipeline "
r"DummyComponent because the init param does not exist."):
dummy_component.set_hyperparameters(success_configuration, init_params={'r': 1})
11 changes: 10 additions & 1 deletion test/test_pipeline/test_tabular_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@
import pytest

import torch
from torch.optim.lr_scheduler import _LRScheduler

from autoPyTorch.pipeline.components.setup.early_preprocessor.utils import get_preprocess_transforms
from autoPyTorch.pipeline.components.setup.lr_scheduler.NoScheduler import NoScheduler
from autoPyTorch.pipeline.tabular_classification import TabularClassificationPipeline
from autoPyTorch.utils.common import FitRequirement
from autoPyTorch.utils.hyperparameter_search_space_update import HyperparameterSearchSpaceUpdates, \
Expand Down Expand Up @@ -223,6 +225,7 @@ def test_network_optimizer_lr_handshake(self, fit_dictionary_tabular):
# No error when network is passed
X = pipeline.named_steps['optimizer'].fit(X, None).transform(X)
assert 'optimizer' in X
assert isinstance(pipeline.named_steps['optimizer'].choice.get_optimizer(), torch.optim.Optimizer)

# Then fitting a optimizer should fail if no network:
assert 'lr_scheduler' in pipeline.named_steps.keys()
Expand All @@ -234,7 +237,13 @@ def test_network_optimizer_lr_handshake(self, fit_dictionary_tabular):

# No error when network is passed
X = pipeline.named_steps['lr_scheduler'].fit(X, None).transform(X)
assert 'optimizer' in X
assert 'lr_scheduler' in X
if isinstance(pipeline.named_steps['lr_scheduler'].choice, NoScheduler):
pytest.skip("This scheduler does not support `get_scheduler`")
lr_scheduler = pipeline.named_steps['lr_scheduler'].choice.get_scheduler()
if isinstance(lr_scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):
pytest.skip("This scheduler is not a child of _LRScheduler")
assert isinstance(lr_scheduler, _LRScheduler)

def test_get_fit_requirements(self, fit_dictionary_tabular):
dataset_properties = {'numerical_columns': [], 'categorical_columns': [],
Expand Down