Skip to content

Instantly share code, notes, and snippets.

@schlerp
Created September 13, 2017 06:33
Show Gist options
  • Save schlerp/bef91bd29c3f6afa860d5966f4dfa8e3 to your computer and use it in GitHub Desktop.
Save schlerp/bef91bd29c3f6afa860d5966f4dfa8e3 to your computer and use it in GitHub Desktop.
'''
ga_tfl.py
applying genetic algorithms to TFLearn neural networks
'''
import random
import tflearn
from tflearn.data_utils import *
# load data
X, Y, Xval, Yval = tflearn.datasets.mnist.load_data(one_hot=True)
X = X.reshape([-1, 28, 28, 1])
Xval = Xval.reshape([-1, 28, 28, 1])
in_shape = [None, 28, 28, 1]
out_nodes = 10
pop_size = 10
hps_map = ('conv_layers', 'conv_filters', 'conv_activations', 'lin_layers',
'lin_widths', 'lin_activations', 'learn_rates', 'optimizers')
hps = {'conv_layers': [x for x in range(1, 3)],
'conv_filters': [x for x in range(8, 128, 8)],
'conv_activations': ['linear', 'relu', 'elu', 'tanh', 'sigmoid'],
'lin_layers': [x for x in range(1, 5)],
'lin_widths': [x for x in range(32, 256, 32)],
'lin_activations': ['linear', 'relu', 'elu', 'tanh', 'sigmoid'],
'learn_rates': [0.000001, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1],
'optimizers': ['sgd', 'rmsprop', 'adam', 'momentum', 'adagrad'],
}
def breed(parent1, parent2):
crossover = random.randint(1, len(parent2)-2)
child = mutate(parent1[0:crossover] + parent2[crossover:])
return child
def mutate(child):
gene = random.randint(0, len(hps_map)-1)
gene_name = hps_map[gene]
child[gene] = random.choice(hps[gene_name])
return child
def make_model(conv_layer, conv_filter, conv_activation, lin_layer,
lin_width, lin_activation, learn_rate, optimizer):
# input layer
network = tflearn.input_data(shape=in_shape, name='input')
# conv layers
for _ in range(conv_layer):
network = tflearn.conv_2d(network, conv_filter, 3,
activation=conv_activation, regularizer="L2")
# max pool layers
network = tflearn.max_pool_2d(network, 2)
# linear layers
for _ in range(lin_layer):
network = tflearn.fully_connected(network, lin_width,
activation=lin_activation)
# output layer
network = tflearn.fully_connected(network, out_nodes, activation='softmax')
network = tflearn.regression(network, optimizer=optimizer, learning_rate=learn_rate,
loss='categorical_crossentropy', name='target')
return network
def init_pop():
entities = []
for _ in range(pop_size):
print('network {}'.format(_))
genes = []
for gene_name in hps_map:
genes.append(random.choice(hps[gene_name]))
derp = zip(hps_map, genes)
for item in derp:
print(item)
entities.append(make_model(*genes))
return entities
def main():
try:
pop = init_pop()
generation = 1
while True:
print('Training Generation {}'.format(generation))
top_5 = []
for net in pop:
model = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(X, Y, n_epoch=1, snapshot_step=100,
show_metric=False, run_id='ga_convnet_mnist')
score = model.evaluate(Xval, Yval)
print(score)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment