Index
This namespace contains the implementations of various restart mechanisms
        modify_restart
¶
    
        
IPOP            (ModifyingRestart)
        
¶
    Source code in evotorch/algorithms/restarter/modify_restart.py
          class IPOP(ModifyingRestart):
    def __init__(
        self,
        problem: Problem,
        algorithm_class: Type[SearchAlgorithm],
        algorithm_args: dict = {},
        min_fitness_stdev: float = 1e-9,
        popsize_multiplier: float = 2,
    ):
        """IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case
        References:
            Glasmachers, Tobias, and Oswin Krause.
            "The hessian estimation evolution strategy."
            PPSN 2020
        Args:
            problem (Problem): A Problem to solve
            algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
            algorithm_args (dict): Arguments to pass to the search algorithm on restart
            min_fitness_stdev (float): The minimum standard deviation in fitnesses; going below this will trigger a restart
            popsize_multiplier (float): A multiplier on the population size within algorithm_args
        """
        super().__init__(problem, algorithm_class, algorithm_args)
        self.min_fitness_stdev = min_fitness_stdev
        self.popsize_multiplier = popsize_multiplier
    def _search_algorithm_terminated(self) -> bool:
        # Additional check on standard deviation of fitnesses of population
        if self.search_algorithm.population.evals.std() < self.min_fitness_stdev:
            return True
        return super()._search_algorithm_terminated()
    def _modify_algorithm_args(self) -> None:
        # Only modify arguments if this isn't the first restart
        if self.num_restarts >= 1:
            new_args = deepcopy(self._algorithm_args)
            # Multiply population size
            new_args["popsize"] = int(self.popsize_multiplier * len(self.search_algorithm.population))
            self._algorithm_args = new_args
__init__(self, problem, algorithm_class, algorithm_args={}, min_fitness_stdev=1e-09, popsize_multiplier=2)
  
      special
  
¶
    IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case
References
Glasmachers, Tobias, and Oswin Krause. "The hessian estimation evolution strategy." PPSN 2020
Parameters:
| Name | Type | Description | Default | 
|---|---|---|---|
| problem | Problem | A Problem to solve | required | 
| algorithm_class | Type[SearchAlgorithm] | The class of the search algorithm to restart | required | 
| algorithm_args | dict | Arguments to pass to the search algorithm on restart | {} | 
| min_fitness_stdev | float | The minimum standard deviation in fitnesses; going below this will trigger a restart | 1e-09 | 
| popsize_multiplier | float | A multiplier on the population size within algorithm_args | 2 | 
Source code in evotorch/algorithms/restarter/modify_restart.py
          def __init__(
    self,
    problem: Problem,
    algorithm_class: Type[SearchAlgorithm],
    algorithm_args: dict = {},
    min_fitness_stdev: float = 1e-9,
    popsize_multiplier: float = 2,
):
    """IPOP restart, terminates algorithm when minimum standard deviation in fitness values is hit, multiplies the population size in that case
    References:
        Glasmachers, Tobias, and Oswin Krause.
        "The hessian estimation evolution strategy."
        PPSN 2020
    Args:
        problem (Problem): A Problem to solve
        algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
        algorithm_args (dict): Arguments to pass to the search algorithm on restart
        min_fitness_stdev (float): The minimum standard deviation in fitnesses; going below this will trigger a restart
        popsize_multiplier (float): A multiplier on the population size within algorithm_args
    """
    super().__init__(problem, algorithm_class, algorithm_args)
    self.min_fitness_stdev = min_fitness_stdev
    self.popsize_multiplier = popsize_multiplier
        restart
¶
    
        
Restart            (SearchAlgorithm)
        
¶
    Source code in evotorch/algorithms/restarter/restart.py
          class Restart(SearchAlgorithm):
    def __init__(
        self,
        problem: Problem,
        algorithm_class: Type[SearchAlgorithm],
        algorithm_args: dict = {},
        **kwargs: Any,
    ):
        """Base class for independent restarts methods
        Args:
            problem (Problem): A Problem to solve
            algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
            algorithm_args (dict): Arguments to pass to the search algorithm on restart
        """
        SearchAlgorithm.__init__(
            self,
            problem,
            search_algorithm=self._get_sa_status,
            num_restarts=self._get_num_restarts,
            algorithm_terminated=self._search_algorithm_terminated,
            **kwargs,
        )
        self._algorithm_class = algorithm_class
        self._algorithm_args = algorithm_args
        self.num_restarts = 0
        self._restart()
    def _get_sa_status(self) -> dict:
        """Status dictionary of search algorithm"""
        return self.search_algorithm.status
    def _get_num_restarts(self) -> int:
        """Number of restarts (including the first start) so far"""
        return self.num_restarts
    def _restart(self) -> None:
        """Restart the search algorithm"""
        self.search_algorithm = self._algorithm_class(self._problem, **self._algorithm_args)
        self.num_restarts += 1
    def _search_algorithm_terminated(self) -> bool:
        """Boolean flag for search algorithm terminated"""
        return self.search_algorithm.is_terminated
    def _step(self):
        # Step the search algorithm
        self.search_algorithm.step()
        # If stepping the search algorithm has reached a terminal state, restart the search algorithm
        if self._search_algorithm_terminated():
            self._restart()
__init__(self, problem, algorithm_class, algorithm_args={}, **kwargs)
  
      special
  
¶
    Base class for independent restarts methods
Parameters:
| Name | Type | Description | Default | 
|---|---|---|---|
| problem | Problem | A Problem to solve | required | 
| algorithm_class | Type[SearchAlgorithm] | The class of the search algorithm to restart | required | 
| algorithm_args | dict | Arguments to pass to the search algorithm on restart | {} | 
Source code in evotorch/algorithms/restarter/restart.py
          def __init__(
    self,
    problem: Problem,
    algorithm_class: Type[SearchAlgorithm],
    algorithm_args: dict = {},
    **kwargs: Any,
):
    """Base class for independent restarts methods
    Args:
        problem (Problem): A Problem to solve
        algorithm_class (Type[SearchAlgorithm]): The class of the search algorithm to restart
        algorithm_args (dict): Arguments to pass to the search algorithm on restart
    """
    SearchAlgorithm.__init__(
        self,
        problem,
        search_algorithm=self._get_sa_status,
        num_restarts=self._get_num_restarts,
        algorithm_terminated=self._search_algorithm_terminated,
        **kwargs,
    )
    self._algorithm_class = algorithm_class
    self._algorithm_args = algorithm_args
    self.num_restarts = 0
    self._restart()