pyduino.optimization.gradient_descent
1import numpy as np 2from . import Optimizer, linmap 3 4class GradientDescent(Optimizer): 5 def __init__(self, population_size: int, ranges: list[float], damping: float = 0.01, rng_seed: int = 0): 6 r""" 7 This gradient descent algorithm assumes that the optimization function 'f' is to be minimized, differentiable, and time independent. 8 9 $$\frac{\mathrm{d} f}{\mathrm{d} t} = \frac{\partial f}{\partial \vec{x}}\frac{\mathrm{d} \vec{x}}{\mathrm{d} t}$$ 10 11 Where $\frac{\partial f}{\partial t}$ is assumed to be zero. 12 13 Args: 14 ranges (list of pairs): Maxima and minima that each parameter can cover. 15 Example: [(0, 10), (0, 10)] for two parameters ranging from 0 to 10. 16 population_size (int): Number of individuals in the population. 17 damping (float, optional): Damping factor to avoid oscillations. Defaults to 0.01. 18 rng_seed (int, optional): Seed for the random number generator. Defaults to 0. 19 """ 20 self.population_size = population_size 21 self.ranges = np.array(ranges) 22 self.damping = damping 23 self.rng_seed = np.random.default_rng(rng_seed) 24 25 # Derived attributes 26 self.invlinmap = linmap(self.ranges, np.array([[0, 1]] * len(self.ranges))) 27 self.linmap = linmap(np.array([[0, 1]] * len(self.ranges)), self.ranges) 28 29 # Initialize the population (random position and initial momentum) 30 self.population = self.rng_seed.random((self.population_size, len(self.ranges))) 31 self.momenta = self.rng_seed.random((self.population_size, len(self.ranges))) 32 self.oracle_past = self.rng_seed.random((self.population_size, 1)) 33 34 def view(self, x): 35 """ 36 Maps the input from the domain to the codomain. 37 """ 38 return self.linmap(x) 39 40 def view_g(self): 41 """ 42 Maps the input from the domain to the codomain. 43 """ 44 return self.linmap(self.population) 45 46 def inverse_view(self, x): 47 """ 48 Maps the input from the codomain to the domain. 49 """ 50 return self.invlinmap(x) 51 52 def ask_oracle(self) -> np.ndarray: 53 return super().ask_oracle() 54 55 def set_oracle(self, X: np.ndarray): 56 return super().set_oracle(X) 57 58 def init_oracle(self): 59 return self.set_oracle(self.view(self.population)) 60 61 def step(self, dt: float): 62 """ 63 Moves the population in the direction of the gradient. 64 65 dt float: 66 Time taken from the last observation. 67 """ 68 pass
5class GradientDescent(Optimizer): 6 def __init__(self, population_size: int, ranges: list[float], damping: float = 0.01, rng_seed: int = 0): 7 r""" 8 This gradient descent algorithm assumes that the optimization function 'f' is to be minimized, differentiable, and time independent. 9 10 $$\frac{\mathrm{d} f}{\mathrm{d} t} = \frac{\partial f}{\partial \vec{x}}\frac{\mathrm{d} \vec{x}}{\mathrm{d} t}$$ 11 12 Where $\frac{\partial f}{\partial t}$ is assumed to be zero. 13 14 Args: 15 ranges (list of pairs): Maxima and minima that each parameter can cover. 16 Example: [(0, 10), (0, 10)] for two parameters ranging from 0 to 10. 17 population_size (int): Number of individuals in the population. 18 damping (float, optional): Damping factor to avoid oscillations. Defaults to 0.01. 19 rng_seed (int, optional): Seed for the random number generator. Defaults to 0. 20 """ 21 self.population_size = population_size 22 self.ranges = np.array(ranges) 23 self.damping = damping 24 self.rng_seed = np.random.default_rng(rng_seed) 25 26 # Derived attributes 27 self.invlinmap = linmap(self.ranges, np.array([[0, 1]] * len(self.ranges))) 28 self.linmap = linmap(np.array([[0, 1]] * len(self.ranges)), self.ranges) 29 30 # Initialize the population (random position and initial momentum) 31 self.population = self.rng_seed.random((self.population_size, len(self.ranges))) 32 self.momenta = self.rng_seed.random((self.population_size, len(self.ranges))) 33 self.oracle_past = self.rng_seed.random((self.population_size, 1)) 34 35 def view(self, x): 36 """ 37 Maps the input from the domain to the codomain. 38 """ 39 return self.linmap(x) 40 41 def view_g(self): 42 """ 43 Maps the input from the domain to the codomain. 44 """ 45 return self.linmap(self.population) 46 47 def inverse_view(self, x): 48 """ 49 Maps the input from the codomain to the domain. 50 """ 51 return self.invlinmap(x) 52 53 def ask_oracle(self) -> np.ndarray: 54 return super().ask_oracle() 55 56 def set_oracle(self, X: np.ndarray): 57 return super().set_oracle(X) 58 59 def init_oracle(self): 60 return self.set_oracle(self.view(self.population)) 61 62 def step(self, dt: float): 63 """ 64 Moves the population in the direction of the gradient. 65 66 dt float: 67 Time taken from the last observation. 68 """ 69 pass
Abstract class for optimization algorithms
GradientDescent( population_size: int, ranges: list[float], damping: float = 0.01, rng_seed: int = 0)
6 def __init__(self, population_size: int, ranges: list[float], damping: float = 0.01, rng_seed: int = 0): 7 r""" 8 This gradient descent algorithm assumes that the optimization function 'f' is to be minimized, differentiable, and time independent. 9 10 $$\frac{\mathrm{d} f}{\mathrm{d} t} = \frac{\partial f}{\partial \vec{x}}\frac{\mathrm{d} \vec{x}}{\mathrm{d} t}$$ 11 12 Where $\frac{\partial f}{\partial t}$ is assumed to be zero. 13 14 Args: 15 ranges (list of pairs): Maxima and minima that each parameter can cover. 16 Example: [(0, 10), (0, 10)] for two parameters ranging from 0 to 10. 17 population_size (int): Number of individuals in the population. 18 damping (float, optional): Damping factor to avoid oscillations. Defaults to 0.01. 19 rng_seed (int, optional): Seed for the random number generator. Defaults to 0. 20 """ 21 self.population_size = population_size 22 self.ranges = np.array(ranges) 23 self.damping = damping 24 self.rng_seed = np.random.default_rng(rng_seed) 25 26 # Derived attributes 27 self.invlinmap = linmap(self.ranges, np.array([[0, 1]] * len(self.ranges))) 28 self.linmap = linmap(np.array([[0, 1]] * len(self.ranges)), self.ranges) 29 30 # Initialize the population (random position and initial momentum) 31 self.population = self.rng_seed.random((self.population_size, len(self.ranges))) 32 self.momenta = self.rng_seed.random((self.population_size, len(self.ranges))) 33 self.oracle_past = self.rng_seed.random((self.population_size, 1))
This gradient descent algorithm assumes that the optimization function 'f' is to be minimized, differentiable, and time independent.
$$\frac{\mathrm{d} f}{\mathrm{d} t} = \frac{\partial f}{\partial \vec{x}}\frac{\mathrm{d} \vec{x}}{\mathrm{d} t}$$
Where $\frac{\partial f}{\partial t}$ is assumed to be zero.
Arguments:
- ranges (list of pairs): Maxima and minima that each parameter can cover. Example: [(0, 10), (0, 10)] for two parameters ranging from 0 to 10.
- population_size (int): Number of individuals in the population.
- damping (float, optional): Damping factor to avoid oscillations. Defaults to 0.01.
- rng_seed (int, optional): Seed for the random number generator. Defaults to 0.
def
view(self, x):
35 def view(self, x): 36 """ 37 Maps the input from the domain to the codomain. 38 """ 39 return self.linmap(x)
Maps the input from the domain to the codomain.
def
view_g(self):
41 def view_g(self): 42 """ 43 Maps the input from the domain to the codomain. 44 """ 45 return self.linmap(self.population)
Maps the input from the domain to the codomain.
def
inverse_view(self, x):
47 def inverse_view(self, x): 48 """ 49 Maps the input from the codomain to the domain. 50 """ 51 return self.invlinmap(x)
Maps the input from the codomain to the domain.