Skip to content

Base optimizer

neps.optimizers.base_optimizer #

BaseOptimizer #

BaseOptimizer(
    pipeline_space: SearchSpace,
    patience: int = 50,
    logger: Logger | None = None,
    budget: int | float | None = None,
    loss_value_on_error: float | None = None,
    cost_value_on_error: float | None = None,
    learning_curve_on_error: (
        float | list[float] | None
    ) = None,
    ignore_errors=False,
)

Base sampler class. Implements all the low-level work.

Source code in neps/optimizers/base_optimizer.py
def __init__(
    self,
    pipeline_space: SearchSpace,
    patience: int = 50,
    logger: logging.Logger | None = None,
    budget: int | float | None = None,
    loss_value_on_error: float | None = None,
    cost_value_on_error: float | None = None,
    learning_curve_on_error: float | list[float] | None = None,
    ignore_errors=False,
) -> None:
    if patience < 1:
        raise ValueError("Patience should be at least 1")

    self.used_budget: float = 0.0
    self.budget = budget
    self.pipeline_space = pipeline_space
    self.patience = patience
    self.logger = logger or logging.getLogger("neps")
    self.loss_value_on_error = loss_value_on_error
    self.cost_value_on_error = cost_value_on_error
    self.learning_curve_on_error = learning_curve_on_error
    self.ignore_errors = ignore_errors

get_config_and_ids abstractmethod #

get_config_and_ids() -> tuple[SearchSpace, str, str | None]

Sample a new configuration

RETURNS DESCRIPTION
config

serializable object representing the configuration config_id: unique identifier for the configuration previous_config_id: if provided, id of a previous on which this configuration is based

TYPE: tuple[SearchSpace, str, str | None]

Source code in neps/optimizers/base_optimizer.py
@abstractmethod
def get_config_and_ids(self) -> tuple[SearchSpace, str, str | None]:
    """Sample a new configuration

    Returns:
        config: serializable object representing the configuration
        config_id: unique identifier for the configuration
        previous_config_id: if provided, id of a previous on which this
            configuration is based
    """
    raise NotImplementedError

get_cost #

get_cost(result: str | dict | float) -> float | Any

Calls result.utils.get_cost() and passes the error handling through. Please use self.get_cost() instead of get_cost() in all optimizer classes.

Source code in neps/optimizers/base_optimizer.py
def get_cost(self, result: str | dict | float) -> float | Any:
    """Calls result.utils.get_cost() and passes the error handling through.
    Please use self.get_cost() instead of get_cost() in all optimizer classes."""
    return _get_cost(
        result,
        cost_value_on_error=self.cost_value_on_error,
        ignore_errors=self.ignore_errors,
    )

get_learning_curve #

get_learning_curve(
    result: str | dict | float,
) -> float | Any

Calls result.utils.get_loss() and passes the error handling through. Please use self.get_loss() instead of get_loss() in all optimizer classes.

Source code in neps/optimizers/base_optimizer.py
def get_learning_curve(self, result: str | dict | float) -> float | Any:
    """Calls result.utils.get_loss() and passes the error handling through.
    Please use self.get_loss() instead of get_loss() in all optimizer classes."""
    return _get_learning_curve(
        result,
        learning_curve_on_error=self.learning_curve_on_error,
        ignore_errors=self.ignore_errors,
    )

get_loss #

get_loss(result: str | dict | float) -> float | Any

Calls result.utils.get_loss() and passes the error handling through. Please use self.get_loss() instead of get_loss() in all optimizer classes.

Source code in neps/optimizers/base_optimizer.py
def get_loss(self, result: str | dict | float) -> float | Any:
    """Calls result.utils.get_loss() and passes the error handling through.
    Please use self.get_loss() instead of get_loss() in all optimizer classes."""
    return _get_loss(
        result,
        loss_value_on_error=self.loss_value_on_error,
        ignore_errors=self.ignore_errors,
    )