Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 57 additions & 5 deletions botorch_community/acquisition/discretized.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,12 @@ class DiscretizedAcquistionFunction(AcquisitionFunction, ABC):
be implemented by subclasses to define the specific acquisition functions.
"""

def __init__(self, model: Model, posterior_transform: PosteriorTransform) -> None:
def __init__(
self,
model: Model,
posterior_transform: PosteriorTransform,
assume_symmetric_posterior: bool = True,
) -> None:
r"""
Initialize the DiscretizedAcquistionFunction

Expand All @@ -49,8 +54,17 @@ def __init__(self, model: Model, posterior_transform: PosteriorTransform) -> Non
The model should be a `PFNModel`.
posterior_transform: A ScalarizedPosteriorTransform that can only
indicate minimization or maximization of the objective.
assume_symmetric_posterior: If True, we simply negate train y, if
the task is to minimize the objective. Else, we use a proper
posterior transform. We cannot do this generally, as some
models only support maximization. This does not mean that
the posterior distribution for a particular set is symmetric
but that one can negate the y's of the context and get out
negated ys.
"""
super().__init__(model=model)
self.set_X_pending(None)
self.assume_symmetric_posterior = assume_symmetric_posterior
self.maximize = True
if posterior_transform is not None:
unsupported_error_message = (
Expand Down Expand Up @@ -80,8 +94,12 @@ def forward(self, X: Tensor) -> Tensor:
A `(b)`-dim Tensor of the acquisition function at the given
design points `X`.
"""
discrete_posterior = self.model.posterior(X)
if not self.maximize:
discrete_posterior = self.model.posterior(
X,
pending_X=self.X_pending,
negate_train_ys=(not self.maximize) and self.assume_symmetric_posterior,
)
if not self.maximize and not self.assume_symmetric_posterior:
discrete_posterior.borders = -torch.flip(discrete_posterior.borders, [0])
discrete_posterior.probabilities = torch.flip(
discrete_posterior.probabilities, [-1]
Expand Down Expand Up @@ -124,6 +142,7 @@ def __init__(
model: Model,
best_f: Tensor,
posterior_transform: PosteriorTransform | None = None,
assume_symmetric_posterior: bool = True,
) -> None:
r"""
Initialize the DiscretizedExpectedImprovement
Expand All @@ -134,7 +153,11 @@ def __init__(
The model should be a `PFNModel`.
best_f: A tensor representing the current best observed value.
"""
super().__init__(model=model, posterior_transform=posterior_transform)
super().__init__(
model=model,
posterior_transform=posterior_transform,
assume_symmetric_posterior=assume_symmetric_posterior,
)
self.register_buffer("best_f", torch.as_tensor(best_f))

def ag_integrate(self, lower_bound: Tensor, upper_bound: Tensor) -> Tensor:
Expand Down Expand Up @@ -187,6 +210,30 @@ def ag_integrate(self, lower_bound: Tensor, upper_bound: Tensor) -> Tensor:
return result.clamp_min(0)


class DiscretizedNoisyExpectedImprovement(DiscretizedExpectedImprovement):
def __init__(
self,
model: Model,
posterior_transform: PosteriorTransform | None = None,
X_pending: Tensor | None = None,
) -> None:
r"""
Only works with models trained specifically for this.

Args:
model: A fitted model that is used to compute the posterior
distribution over the outcomes of interest.
The model should be a `PFNModel`.
best_f: A tensor representing the current best observed value.
"""
super().__init__(
model=model,
posterior_transform=posterior_transform,
best_f=0.0,
)
self.set_X_pending(X_pending)


class DiscretizedProbabilityOfImprovement(DiscretizedAcquistionFunction):
r"""DiscretizedProbabilityOfImprovement is an acquisition function that
computes the probability of improvement over the current best observed value
Expand All @@ -198,6 +245,7 @@ def __init__(
model: Model,
best_f: Tensor,
posterior_transform: PosteriorTransform | None = None,
assume_symmetric_posterior: bool = True,
) -> None:
r"""
Initialize the DiscretizedProbabilityOfImprovement
Expand All @@ -209,7 +257,11 @@ def __init__(
best_f: A tensor representing the current best observed value.
"""

super().__init__(model, posterior_transform)
super().__init__(
model,
posterior_transform,
assume_symmetric_posterior=assume_symmetric_posterior,
)
self.register_buffer("best_f", torch.as_tensor(best_f))

def ag_integrate(self, lower_bound: Tensor, upper_bound: Tensor) -> Tensor:
Expand Down
28 changes: 28 additions & 0 deletions botorch_community/acquisition/input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@

from botorch_community.acquisition.discretized import (
DiscretizedExpectedImprovement,
DiscretizedNoisyExpectedImprovement,
DiscretizedProbabilityOfImprovement,
)
from botorch_community.acquisition.scorebo import qSelfCorrectingBayesianOptimization
Expand Down Expand Up @@ -78,6 +79,33 @@ def construct_inputs_best_f(
}


@acqf_input_constructor(DiscretizedNoisyExpectedImprovement)
def construct_inputs_noisy(
model: Model,
posterior_transform: PosteriorTransform | None = None,
X_pending: Optional[Tensor] = None,
) -> dict[str, Any]:
r"""Construct kwargs for the acquisition functions requiring `best_f`.

Args:
model: The model to be used in the acquisition function.
best_f: Threshold above (or below) which improvement is defined.
posterior_transform: The posterior transform to be used in the
acquisition function.
X_pending: Points already tried, but not yet included in the
training data.


Returns:
A dict mapping kwarg names of the constructor to values.
"""
return {
"model": model,
"posterior_transform": posterior_transform,
"X_pending": X_pending,
}


@acqf_input_constructor(
qBayesianQueryByComittee,
qBayesianVarianceReduction,
Expand Down
Loading