Skip to content

Instantly share code, notes, and snippets.

@julesangebault
Created August 19, 2022 10:48
Show Gist options
  • Save julesangebault/264359cc037dfe3ceee2d8b36b042869 to your computer and use it in GitHub Desktop.
Save julesangebault/264359cc037dfe3ceee2d8b36b042869 to your computer and use it in GitHub Desktop.
Trying to optimize the Neural Network architecture using Hyperas
#%% Hyperas Optimisation
from hyperas import optim
import hyperopt.hp as hp
from hyperas.distributions import choice, uniform
from keras import optimizers
from hyperopt import Trials, STATUS_OK, tpe
from Reasearch_project_Code import *
dpath="C:\\Users\\jules\\OneDrive - University of Southampton\\Documents\\Research Project\\5- Codes"
R=RP(dpath)
def data():
dpath="C:\\Users\\jules\\OneDrive - University of Southampton\\Documents\\Research Project\\5- Codes"
R=RP(dpath)
V=R.Verification("", "Data1")
V.load_data()
X=V.labels_unscaled
y=V.samples_unscaled
y_A=y[:,:2]
y_B=y[:,2:]
scalerInputs=MinMaxScaler((0,1))
X_scaled=scalerInputs.fit_transform(X)
scalerOutputs=MinMaxScaler((0,1))
y_B_scaled=scalerOutputs.fit_transform(y_B)
X_train, X_test, Y_train, Y_test = train_test_split(X_scaled, y_B_scaled, test_size=0.2, random_state=777)
return X_train, X_test, Y_train, Y_test
def get_model(X_train, X_test, Y_train, Y_test):
n_inputs, n_outputs = X.shape[1], y_B.shape[1]
# Nodes=[32,64,96,128]
# ActivationFunction=["relu","sigmoid","tanh","linear"]
# Epochs=[10,50,100,200,500]
# Batch=[16,32,64]
# LearningRate=[1e-3,1e-2,1e-1]
# Optimizer=["adam","sgd"]
# NbHiddenLayers=["two","three","Four"]
model = Sequential()
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs, activation={{choice(["relu","sigmoid","tanh","linear"])}})) #Inputs layers
model.add(Dropout({{uniform(0,1)}}))
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs,activation={{choice(["relu","sigmoid","tanh","linear"])}})) # Hidden layer 1
model.add(Dropout({{uniform(0,1)}}))
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs,activation={{choice(["relu","sigmoid","tanh","linear"])}})) # Hidden layer 2
model.add(Dropout({{uniform(0,1)}}))
HiddenLayers= {{choice(["two","three","Four"])}}
if HiddenLayers == "Three":
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs,activation={{choice(["relu","sigmoid","tanh","linear"])}})) # Hidden layer 3
model.add(Dropout({{uniform(0,1)}}))
elif HiddenLayers == "Four":
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs,activation={{choice(["relu","sigmoid","tanh","linear"])}})) # Hidden layer 3
model.add(Dropout({{uniform(0,1)}}))
model.add(Dense({{choice([32,64,96,128])}}, input_dim=n_inputs,activation={{choice(["relu","sigmoid","tanh","linear"])}})) # Hidden layer 4
model.add(Dropout({{uniform(0,1)}}))
model.add(Dense(n_outputs)) # Outputs layer
#Optimizer choice
adam = keras.optimizers.Adam(lr={{choice([1e-3,1e-2,1e-1])}})
sgd = keras.optimizers.SGD(lr={{choice([1e-3,1e-2,1e-1])}})
choiceval = {{choice(["adam","sgd"])}}
if choiceval == 'adam':
optim = adam
else:
optim = sgd
model.compile(loss='mae', optimizer=optim )
model.fit(X_train,
Y_train,
epochs={{choice([10,50,100,200,500])}},
batch_size={{choice([16,32,64])}},
validation_data=(X_test, Y_test))
mae = model.evaluate(X_test, Y_test, verbose=0)
return {'loss': mae}
X_train, X_test, Y_train, Y_test = data()
bestRun, BestModel = optim.minimize(model=get_model,
data=data,
algo=tpe.suggest,
max_evals=30,
trials=Trials())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment