Skip to content

Instantly share code, notes, and snippets.

@phabee
Created December 9, 2023 06:41
Show Gist options
  • Save phabee/75817e598f75e505334f1f4749f6db89 to your computer and use it in GitHub Desktop.
Save phabee/75817e598f75e505334f1f4749f6db89 to your computer and use it in GitHub Desktop.
import matplotlib.pyplot as plt
def remove_duplicate_candidates(N):
'''
Accept a list of candidate solutions (being binary lists) and make sure, each (element)-list is contained only once.
:param N:
:return: revised list containing no duplicate candidates
'''
unique_set = set(tuple(lst) for lst in N)
unique_list_of_lists = [list(tpl) for tpl in unique_set]
return unique_list_of_lists
def nb_knapsack(S):
"""
Calculate the neighborhood of the starting solution S.
NB: All solutions that arise from S by either
a) adding an object
b) removing an object
c) removing a contained object AND adding a non-contained object
:param S: Starting solution (list of n binary elements)
:return: List of lists containing neighboring solutions
"""
# Variable to collect the neighbor solutions
N = []
for i in range(len(S)):
candidate = S.copy()
# a),b) Generate all neighbors obtained by adding an object
candidate[i] = int(not candidate[i])
N.append(candidate)
# c) Generate all neighbors obtained by swapping an object
# first determine 0/1 positions in start solution
zero_positions = [i for i in range(len(S)) if S[i] == 0]
one_positions = [i for i in range(len(S)) if S[i] == 1]
# then loop through all pairwise combinations of 0s/1s
# and swap 0s/1s
for zero_idx in zero_positions:
for one_idx in one_positions:
# jetzt einen Nachbarn erzeugen, wo 0 in 1 und 1 in 0 umgewandelt wird
candidate = S.copy()
candidate[zero_idx] = 1
candidate[one_idx] = 0
N.append(candidate)
N = remove_duplicate_candidates(N)
return N
def eval_func(S, values):
'''
calculate the scalar product of S and values
:param S: a given solution as binary vector
:param values: the values-vector to be multiplied with the bin-vector
:return: the scalar product
'''
value = 0
for i in range(len(S)):
value = value + S[i] * values[i]
return value
def local_search_knapsack(S, weights, values, capacity):
'''
perform one local search iteration in the neighborhood of the
solution S
:param S: Start solution
:param weights: Weights of the objects of the knapsack problem
:param values: Values assigned to the knapsack items
:param capacity: Capacity of the knapsack
:return: the best solution candidate found in this local search iteration
'''
best_solution = S
best_value = eval_func(S, values)
# determine the neighborhood of S
N = nb_knapsack(S)
print("Neighborhood (", S, ") is: ", N)
# loop through all candidate solutions
for candidate in N:
weight = eval_func(candidate, weights)
if weight <= capacity:
# if knapsack capacity not exceeded (valid candidate)
# evaluate obj. function
cand_value = eval_func(candidate, values)
# compare current solution with current best solution
if cand_value > best_value:
# found a better candidate: Save candidate in best_solution (for later..)
best_solution = candidate
# update best value
best_value = cand_value
# return best candidate
print("Best solution:", best_solution, ", val: ", best_value)
return best_solution
def meta_heuristic(S, weights, values, capacity):
'''
Apply a metaheuristic based on the starting solution S.
Calls local_search_knapsack multiple times, selecting the
best solution as the next starting solution, and returns
the local optimum in the end.
:param S: Starting solution
:param weights: Weights of the objects in the knapsack problem
:param values: Values assigned to the knapsack items
:param capacity: Capacity of the knapsack
:return: Tuple containing the local optimum and its objective value
'''
old_sol = S
stop_loop = False
obj_value = eval_func(S, values)
obj_values = [obj_value]
while not stop_loop:
new_sol = local_search_knapsack(old_sol, weights, values, capacity)
obj_value = eval_func(new_sol, values)
obj_values.append(obj_value)
if new_sol == old_sol:
stop_loop = True
old_sol = new_sol
return new_sol, obj_value, obj_values
def plot(obj_values_list, label_list):
# Finde die maximale Länge aller Unterlisten
max_length = max(len(obj_values) for obj_values in obj_values_list)
# Fülle kürzere Unterlisten mit NaN, um die gleiche Länge zu erreichen
obj_values_list_padded = [obj_values + [np.nan] * (max_length - len(obj_values)) for obj_values in obj_values_list]
# Erstelle eine Liste von Iterationszahlen (1, 2, 3, ..., n)
iterations = list(range(1, max_length + 1))
# Iteriere über jede Liste in obj_values_list_padded und erstelle einen Plot
for i, obj_values in enumerate(obj_values_list_padded):
# Plotte die Zielfunktionswerte gegen die Iterationszahlen
plt.plot(iterations, obj_values, marker='o', linestyle='-', label=label_list[i])
# Beschriftungen hinzufügen
plt.title('Optimization Process')
plt.xlabel('Iteration Number')
plt.ylabel('Objective Function Value')
# Setze die x-Achsenticks nur für ganze Zahlen
plt.xticks(iterations)
# Füge eine Legende hinzu
plt.legend()
# Zeige den Plot an
plt.show()
# Main program
# Knapsack problem definition
S = [0, 1, 1, 0, 0]
weights = [4, 12, 1, 2, 1]
values = [10, 4, 2, 2, 1]
capacity = 15
history = []
label_list = ["[0,1,1,0,0]", "[0,0,0,0,0]", "[1,1,0,0,0]", "[0,0,0,1,1]"]
l_opt, l_val, l_val_history = meta_heuristic(S, weights, values, capacity)
history.extend([l_val_history])
S = [0, 0, 0, 0, 0]
l_opt, l_val, l_val_history = meta_heuristic(S, weights, values, capacity)
history.extend([l_val_history])
S = [1, 1, 0, 0, 0]
l_opt, l_val, l_val_history = meta_heuristic(S, weights, values, capacity)
history.extend([l_val_history])
S = [0, 0, 0, 1, 1]
l_opt, l_val, l_val_history = meta_heuristic(S, weights, values, capacity)
history.extend([l_val_history])
plot(history, label_list)
print("Local Optimum: ", l_opt, ", Obj. val:", l_val)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment