Skip to content

Instantly share code, notes, and snippets.

@gugarosa
Created June 18, 2019 13:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gugarosa/5aebe534c6212b7f640f09e464b59bb8 to your computer and use it in GitHub Desktop.
Save gugarosa/5aebe534c6212b7f640f09e464b59bb8 to your computer and use it in GitHub Desktop.
A Simulated Annealing algorithm for function optimization.
from math import pi, sin
import numpy as np
def function(x):
"""Fitness function.
Args:
x (float): Input value for fitness function.
Returns:
The output value of the fitness function.
"""
return 2 ** (-2 * (((x - 0.1) / 0.9) ** 2)) * (sin(5 * pi * x) ** 6)
def simulated_annealing(x, func, T=0.01, beta=0.9, lower_bound=0.0, upper_bound=1.0, n_iterations=100):
"""Performs the Simulated Annealing optimization algorithm.
Args:
x (float): Initial position.
func (*): Pointer to fitness function.
T (float): System's temperature.
beta (float): Temperature decay.
lower_bound (float): Minimum value for position.
upper_bound (float): Maximum value for position.
n_iterations (int): Number of iterations.
Returns:
The best position value after performing optimization.
"""
# Iterate through every iteration
for t in range(n_iterations):
# Logging current iteration
print(f'Iteration {t+1}/{n_iterations}')
# Gaussian noise
noise = np.random.normal(0, 0.1)
# Perturbing current position
x_new = x + noise
# Generate random number
r = np.random.uniform(0, 1)
# Check if new fitness is better than current fitness
if func(x_new) > func(x):
# If yes, apply value to current position
x = x_new
# Check if random number is smaller than current state
elif r < np.exp((func(x_new) - func(x)) / T):
# If yes, apply value to current position
x = x_new
# Clipping value to make sure its inside bounds
x = np.clip(x, lower_bound, upper_bound)
# Updating temperature
T *= beta
# Logging iteration's fitness and position
print(f'Fitness: {func(x)} | Position: {x}')
return x
if __name__ == "__main__":
# Initial solution
x = np.random.uniform(0, 1)
# Simulated Annealing
x_best = simulated_annealing(x, function, T=0.5, beta=0.9, n_iterations=100)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment