Skip to content

Instantly share code, notes, and snippets.

View emilwallner's full-sized avatar

Emil Wallner emilwallner

View GitHub Profile
#Large learning rate
network = regression(network,
optimizer='adam',
loss='categorical_crossentropy',
learning_rate=0.01)
#Large batch size
model.fit(X, Y,
n_epoch=50,
shuffle=True,
validation_set=(X_test, Y_test),
show_metric=True,
batch_size=2000,
run_id='cifar_large_batch_size')
network = input_data(shape=[None, 32, 32, 3],
data_preprocessing=img_prep,
data_augmentation=img_aug)
network = conv_2d(network, 32, 3, activation='relu', regularizer='L2')
network = max_pool_2d(network, 2)
network = conv_2d(network, 64, 3, activation='relu', regularizer='L2')
network = conv_2d(network, 64, 3, activation='relu', regularizer='L2')
network = max_pool_2d(network, 2)
network = fully_connected(network, 512, activation='relu', regularizer='L2')
network = fully_connected(network, 10, activation='softmax')
network = input_data(shape=[None, 32, 32, 3],
data_preprocessing=img_prep,
data_augmentation=img_aug)
network = conv_2d(network, 32, 3, activation='relu')
network = max_pool_2d(network, 2)
network = conv_2d(network, 64, 3, activation='relu')
network = conv_2d(network, 64, 3, activation='relu')
network = max_pool_2d(network, 2)
network = fully_connected(network, 512, activation='relu')
#The dropout layer
# Convolutional network building
network = input_data(shape=[None, 32, 32, 3],
data_preprocessing=img_prep,
data_augmentation=img_aug)
# One set of layers
network = conv_2d(network, 32, 3, activation='relu')
network = max_pool_2d(network, 2)
network = fully_connected(network, 512, activation='relu')
network = fully_connected(network, 10, activation='softmax')
network = regression(network, optimizer='adam',
# 1. Import library of functions
import tflearn
# 2. Logical OR operator / the data
OR = [[0., 0.], [0., 1.], [1., 0.], [1., 1.]]
Y_truth = [[0.], [1.], [1.], [1.]]
# 3. Building our neural network/layers of functions
neural_net = tflearn.input_data(shape=[None, 2])
neural_net = tflearn.fully_connected(neural_net, 1, activation='sigmoid')
neural_net = tflearn.regression(neural_net, optimizer='sgd', learning_rate=2, loss='mean_square')
# 4. Train the neural network / Epochs