Skip to content

Modify restart

IPOP

Bases: ModifyingRestart

Source code in evotorch/algorithms/restarter/modify_restart.py
class IPOP(ModifyingRestart):
    def __init__(
        self,
        problem: Problem,
        algorithm_class: Type[SearchAlgorithm],
        algorithm_args: dict = {},
        min_fitness_stdev: float = 1e-9,
        popsize_multiplier: float = 2,
    ):
        """IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case
        References:
            Glasmachers, Tobias, and Oswin Krause.
            "The hessian estimation evolution strategy."
            PPSN 2020
        Args:
            problem (Problem): A Problem to solve
            algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
            algorithm_args (dict): Arguments to pass to the search algorithm on restart
            min_fitness_stdev (float): The minimum standard deviation in fitnesses; going below this will trigger a restart
            popsize_multiplier (float): A multiplier on the population size within algorithm_args
        """
        super().__init__(problem, algorithm_class, algorithm_args)

        self.min_fitness_stdev = min_fitness_stdev
        self.popsize_multiplier = popsize_multiplier

    def _search_algorithm_terminated(self) -> bool:
        # Additional check on standard deviation of fitnesses of population
        if self.search_algorithm.population.evals.std() < self.min_fitness_stdev:
            return True
        return super()._search_algorithm_terminated()

    def _modify_algorithm_args(self) -> None:
        # Only modify arguments if this isn't the first restart
        if self.num_restarts >= 1:
            new_args = deepcopy(self._algorithm_args)
            # Multiply population size
            new_args["popsize"] = int(self.popsize_multiplier * len(self.search_algorithm.population))
            self._algorithm_args = new_args

__init__(problem, algorithm_class, algorithm_args={}, min_fitness_stdev=1e-09, popsize_multiplier=2)

IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case References: Glasmachers, Tobias, and Oswin Krause. "The hessian estimation evolution strategy." PPSN 2020 Args: problem (Problem): A Problem to solve algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart algorithm_args (dict): Arguments to pass to the search algorithm on restart min_fitness_stdev (float): The minimum standard deviation in fitnesses; going below this will trigger a restart popsize_multiplier (float): A multiplier on the population size within algorithm_args

Source code in evotorch/algorithms/restarter/modify_restart.py
def __init__(
    self,
    problem: Problem,
    algorithm_class: Type[SearchAlgorithm],
    algorithm_args: dict = {},
    min_fitness_stdev: float = 1e-9,
    popsize_multiplier: float = 2,
):
    """IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case
    References:
        Glasmachers, Tobias, and Oswin Krause.
        "The hessian estimation evolution strategy."
        PPSN 2020
    Args:
        problem (Problem): A Problem to solve
        algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
        algorithm_args (dict): Arguments to pass to the search algorithm on restart
        min_fitness_stdev (float): The minimum standard deviation in fitnesses; going below this will trigger a restart
        popsize_multiplier (float): A multiplier on the population size within algorithm_args
    """
    super().__init__(problem, algorithm_class, algorithm_args)

    self.min_fitness_stdev = min_fitness_stdev
    self.popsize_multiplier = popsize_multiplier

ModifyingRestart

Bases: Restart

Source code in evotorch/algorithms/restarter/modify_restart.py
class ModifyingRestart(Restart):
    def _modify_algorithm_args(self) -> None:
        """Modify the algorithm arguments on restart"""
        pass

    def _restart(self) -> None:
        """Restart the search algorithm"""
        self._modify_algorithm_args()
        return super()._restart()