Skip to content
Open
2 changes: 2 additions & 0 deletions baybe/kernels/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"""

from baybe.kernels.basic import (
IndexKernel,
LinearKernel,
MaternKernel,
PeriodicKernel,
Expand All @@ -18,6 +19,7 @@

__all__ = [
"AdditiveKernel",
"IndexKernel",
"LinearKernel",
"MaternKernel",
"PeriodicKernel",
Expand Down
10 changes: 7 additions & 3 deletions baybe/parameters/fidelity.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
validate_is_finite,
validate_unique_values,
)
from baybe.settings import active_settings
from baybe.utils.conversion import nonstring_to_tuple
from baybe.utils.numerical import DTypeFloatNumpy


def _convert_zeta(
Expand Down Expand Up @@ -107,7 +107,9 @@ def values(self) -> tuple[str | bool, ...]:
@cached_property
def comp_df(self) -> pd.DataFrame:
return pd.DataFrame(
range(len(self.values)), dtype=DTypeFloatNumpy, columns=[self.name]
range(len(self.values)),
dtype=active_settings.DTypeFloatNumpy,
columns=[self.name],
)


Expand Down Expand Up @@ -159,5 +161,7 @@ def values(self) -> tuple[float, ...]:
@cached_property
def comp_df(self) -> pd.DataFrame:
return pd.DataFrame(
{self.name: self.values}, index=self.values, dtype=DTypeFloatNumpy
{self.name: self.values},
index=self.values,
dtype=active_settings.DTypeFloatNumpy,
)
108 changes: 104 additions & 4 deletions baybe/searchspace/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
from baybe.constraints.base import Constraint
from baybe.parameters import TaskParameter
from baybe.parameters.base import Parameter
from baybe.parameters.fidelity import (
CategoricalFidelityParameter,
NumericalDiscreteFidelityParameter,
)
from baybe.searchspace.continuous import SubspaceContinuous
from baybe.searchspace.discrete import (
MemorySize,
Expand Down Expand Up @@ -48,6 +52,29 @@ class SearchSpaceType(Enum):
"""Flag for hybrid search spaces resp. compatibility with hybrid search spaces."""


class SearchSpaceTaskType(Enum):
Comment thread
AVHopp marked this conversation as resolved.
"""Enum class for different types of task and/or fidelity subspaces."""

SINGLETASK = "SINGLETASK"
Comment thread
jpenn2023 marked this conversation as resolved.
"""Flag for search spaces with a single task, meaning no task parameter."""

CATEGORICALMULTITASK = "CATEGORICALMULTITASK"
"""Flag for search spaces with a categorical task parameter."""


class SearchSpaceFidelityType(Enum):
"""Enum class for different types of task and/or fidelity subspaces."""

SINGLEFIDELITY = "SINGLEFIDELITY"
"""Flag for search spaces with a single fidelity, meaning no fidelity parameter."""

NUMERICALDISCRETEMULTIFIDELITY = "NUMERICALDISCRETEMULTIFIDELITY"
"""Flag for search spaces with a discrete numerical (ordered) fidelity parameter."""

CATEGORICALMULTIFIDELITY = "CATEGORICALMULTIFIDELITY"
"""Flag for search spaces with a categorical (unordered) fidelity parameter."""


@define
class SearchSpace(SerialMixin):
"""Class for managing the overall search space.
Expand Down Expand Up @@ -258,15 +285,32 @@ def _task_parameter(self) -> TaskParameter | None:
if not params:
return None

assert len(params) == 1 # currently ensured by parameter validation step
return params[0]

@property
def _fidelity_parameter(
self,
) -> NumericalDiscreteFidelityParameter | CategoricalFidelityParameter | None:
"""The (single) fidelity parameter of the space, if it exists."""
# Currently private, see comment above
fidelity_parameters = (
NumericalDiscreteFidelityParameter,
CategoricalFidelityParameter,
)

params = [p for p in self.parameters if isinstance(p, fidelity_parameters)]

if not params:
return None

return params[0]

@property
def task_idx(self) -> int | None:
"""The column index of the task parameter in computational representation."""
"""Column index of the task parameter in computational representation."""
if (task_param := self._task_parameter) is None:
return None
# TODO[11611]: The current approach has three limitations:
# TODO [11611]: The current approach has three limitations:
# 1. It matches by column name and thus assumes that the parameter name
# is used as the column name.
# 2. It relies on the current implementation detail that discrete parameters
Expand All @@ -275,6 +319,14 @@ def task_idx(self) -> int | None:
# --> Fix this when refactoring the data
return cast(int, self.discrete.comp_rep.columns.get_loc(task_param.name))

@property
def fidelity_idx(self) -> int | None:
"""Column index of the fidelity parameter in computational representation."""
if (fidelity_param := self._fidelity_parameter) is None:
return None
# See TODO [11611] above
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove the number and directly refer to the corresponding function, that is, have it as # TODO: See comment in tasks_idx property

return cast(int, self.discrete.comp_rep.columns.get_loc(fidelity_param.name))

@property
def n_tasks(self) -> int:
"""The number of tasks encoded in the search space."""
Expand All @@ -287,6 +339,54 @@ def n_tasks(self) -> int:
return 1
return len(task_param.values)

@property
def n_fidelities(self) -> int:
"""The number of fidelities encoded in the search space."""
# See TODO [16932] above
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove the number and directly refer to the corresponding function, that is, have it as # TODO: See comment in n_tasks property

if (fidelity_param := self._fidelity_parameter) is None:
# When there are no task parameters, we effectively have a single task
return 1
return len(fidelity_param.values)

@property
def task_type(self) -> SearchSpaceTaskType:
"""Return the task type of the search space."""
task_parameters = (p for p in self.parameters if isinstance(p, TaskParameter))

if len(task_parameters) == 0:
return SearchSpaceTaskType.SINGLETASK
elif len(task_parameters) == 1:
return SearchSpaceTaskType.CATEGORICALMULTITASK
else:
raise NotImplementedError(
"BayBE does not currently support search"
"spaces with multiple task parameters."
)

def fidelity_type(self) -> SearchSpaceFidelityType:
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't this also be a property?

"""Return the fidelity type of the search space."""
fidelity_parameters = (
CategoricalFidelityParameter,
NumericalDiscreteFidelityParameter,
)

fidelity_parameters = (
p for p in self.parameters if isinstance(p, fidelity_parameters)
)

if len(fidelity_parameters) == 0:
return SearchSpaceFidelityType.SINGLEFIDELITY
elif len(fidelity_parameters) == 1:
if isinstance(fidelity_parameters[0], CategoricalFidelityParameter):
return SearchSpaceFidelityType.CATEGORICALMULTIFIDELITY
if isinstance(fidelity_parameters[0], NumericalDiscreteFidelityParameter):
return SearchSpaceFidelityType.NUMERICALDISCRETEMULTIFIDELITY
else:
raise NotImplementedError(
"BayBE does not currently support search"
"spaces with multiple fidelity parameters."
)

def get_comp_rep_parameter_indices(self, name: str, /) -> tuple[int, ...]:
"""Find a parameter's column indices in the computational representation.

Expand Down Expand Up @@ -386,7 +486,7 @@ def transform(

@property
def constraints_augmentable(self) -> tuple[Constraint, ...]:
"""The searchspace constraints that can be considered during augmentation."""
"""The search space constraints that can be considered during augmentation."""
return tuple(c for c in self.constraints if c.eval_during_augmentation)

def get_parameters_by_name(self, names: Sequence[str]) -> tuple[Parameter, ...]:
Expand Down
5 changes: 1 addition & 4 deletions baybe/surrogates/bandit.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from __future__ import annotations

import gc
from typing import TYPE_CHECKING, Any, ClassVar
from typing import TYPE_CHECKING, Any

from attrs import define, field
from typing_extensions import override
Expand All @@ -29,9 +29,6 @@
class BetaBernoulliMultiArmedBanditSurrogate(Surrogate):
"""A multi-armed bandit model with Bernoulli likelihood and beta prior."""

supports_transfer_learning: ClassVar[bool] = False
# See base class.

prior: BetaPrior = field(factory=lambda: BetaPrior(1, 1))
"""The beta prior for the win rates of the bandit arms. Uniform by default."""

Expand Down
19 changes: 18 additions & 1 deletion baybe/surrogates/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,14 @@ def to_botorch(self) -> Model:
class Surrogate(ABC, SurrogateProtocol, SerialMixin):
"""Abstract base class for all surrogate models."""

supports_transfer_learning: ClassVar[bool]
supports_transfer_learning: ClassVar[bool] = False
"""Class variable encoding whether or not the surrogate supports transfer
learning."""

supports_multi_fidelity: ClassVar[bool] = False
"""Class variable encoding whether or not the surrogate supports multi fidelity
Bayesian optimization."""

supports_multi_output: ClassVar[bool] = False
"""Class variable encoding whether or not the surrogate is multi-output
compatible."""
Expand Down Expand Up @@ -428,6 +432,14 @@ def fit(
f"support transfer learning."
)

# Check if multi fidelity capabilities are needed
if (searchspace.n_fidelities > 1) and (not self.supports_multi_fidelity):
raise ValueError(
f"The search space contains fidelity parameters but the selected "
f"surrogate model type ({self.__class__.__name__}) does not "
f"support multi fidelity Bayesian optimisation."
)

# Block partial measurements
handle_missing_values(measurements, [t.name for t in objective.targets])

Expand Down Expand Up @@ -472,6 +484,11 @@ def __str__(self) -> str:
self.supports_transfer_learning,
single_line=True,
),
to_string(
"Supports Multi Fidelity",
self.supports_multi_fidelity,
single_line=True,
),
]
return to_string(self.__class__.__name__, *fields)

Expand Down
5 changes: 1 addition & 4 deletions baybe/surrogates/custom.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from __future__ import annotations

import gc
from typing import TYPE_CHECKING, Any, ClassVar, NoReturn
from typing import TYPE_CHECKING, Any, NoReturn

import cattrs
from attrs import define, field, validators
Expand Down Expand Up @@ -67,9 +67,6 @@ class CustomONNXSurrogate(IndependentGaussianSurrogate):
Note that these surrogates cannot be retrained.
"""

supports_transfer_learning: ClassVar[bool] = False
# See base class.

onnx_input_name: str = field(validator=validators.instance_of(str))
"""The input name used for constructing the ONNX str."""

Expand Down
20 changes: 18 additions & 2 deletions baybe/surrogates/gaussian_process/components/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from baybe.kernels.base import Kernel
from baybe.kernels.composite import ProductKernel
from baybe.parameters.categorical import TaskParameter
from baybe.parameters.fidelity import CategoricalFidelityParameter
from baybe.parameters.selector import (
ParameterSelectorProtocol,
TypeSelector,
Expand Down Expand Up @@ -79,15 +80,30 @@ def _default_base_kernel_factory(self) -> KernelFactoryProtocol:
BayBENumericalKernelFactory,
)

return BayBENumericalKernelFactory(TypeSelector((TaskParameter,), exclude=True))
return BayBENumericalKernelFactory(
TypeSelector(
(
TaskParameter,
CategoricalFidelityParameter,
),
exclude=True,
)
)

@task_kernel_factory.default
def _default_task_kernel_factory(self) -> KernelFactoryProtocol:
from baybe.surrogates.gaussian_process.presets.baybe import (
BayBETaskKernelFactory,
)

return BayBETaskKernelFactory(TypeSelector((TaskParameter,)))
return BayBETaskKernelFactory(
TypeSelector(
(
TaskParameter,
CategoricalFidelityParameter,
)
)
)

@override
def __call__(
Expand Down
Loading
Loading