-
-
Save muriloasouza/6b928d762fbbd2f748c68bc99eaf8382 to your computer and use it in GitHub Desktop.
Keras Tuner to Optimize ESN Hyperparameteres
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import pandas as pd | |
import tensorflow as tf | |
from tensorflow import keras | |
from tensorflow.keras import layers | |
# from keras.models import Sequential | |
# from keras.models import InputLayer | |
from tensorflow_addons.layers import ESN | |
# from keras.layers import Dense | |
# from keras.layers import Dropout | |
from keras.callbacks import EarlyStopping | |
import keras_tuner | |
df = pd.read_csv('df_NE_seno_cosseno.csv', | |
index_col='DataHora') | |
lista_dias_semana = list(df.columns[3:]) | |
amostras_antes = 48 | |
amostras_depois = 24 | |
x = [] | |
y = [] | |
for i in range(0, len(df), 24): | |
final_x = i + amostras_antes | |
final_y = final_x + amostras_depois | |
if final_y > len(df): | |
break | |
previsor_carga = df['Carga'][i:final_x] | |
previsor_dia = df.loc[df.index[final_x], lista_dias_semana] | |
previsor_sen = pd.Series(df['Seno'].loc[df.index[final_x]]) | |
previsor_cos = pd.Series(df['Cosseno'].loc[df.index[final_x]]) | |
previsor_concat = pd.concat([previsor_carga, | |
previsor_dia, | |
previsor_sen, | |
previsor_cos]).values | |
x.append(previsor_concat) | |
y.append(df['Carga'][final_x:final_y]) | |
x = np.array(x) | |
y = np.array(y) | |
lin, col = np.where(np.isclose(y, df['Carga'].loc[df.index == '31/12/2021 23:00'])) | |
x_train = x[0:lin[np.argmax(col)] + 1] | |
y_train = y[0:lin[np.argmax(col)] + 1] | |
max_batch_size = x_train.shape[0] | |
timesteps = x_train.shape[1] | |
input_dim = 1 | |
x_train = x_train.reshape(max_batch_size, timesteps, input_dim) | |
val_size = 0.2 | |
def criar_modelo(hp): | |
model = keras.Sequential(name='ESN_NE') | |
model.add(layers.InputLayer((timesteps, input_dim), name='camada_entrada')) | |
model.add(ESN(units=hp.Int('units', min_value=100, max_value=500, step=50), | |
connectivity=hp.Float('connectivity', min_value=0.1, max_value=0.4, step=0.1), | |
spectral_radius=hp.Float('spectral_radius', min_value=0.5, max_value=0.9, step=0.1), | |
activation=hp.Choice('activation_esn', ['relu', 'tanh', 'sigmoid']), | |
name='camada_esn')) | |
if hp.Boolean('dropout'): | |
model.add(layers.Dropout(rate=hp.Float('rate', min_value=0.2, max_value=0.5, step=0.1))) | |
for k in range(hp.Int('num_layers', 1, 2)): | |
model.add(layers.Dense(units=hp.Int(f'units_{k}', min_value=24, max_value=120, step=24), | |
activation=hp.Choice('activation_dense', ['relu', 'tanh', 'sigmoid']), | |
name=f'{k}_dense')) | |
model.add(layers.Dense(units=amostras_depois, | |
activation=hp.Choice('activation_out', ['relu', 'tanh', 'sigmoid']), | |
name='camada_saida')) | |
model.compile(optimizer='adam', | |
loss='mean_squared_error') | |
return model | |
tuner = keras_tuner.BayesianOptimization( | |
hypermodel=criar_modelo, | |
objective='val_loss', | |
max_trials=40, | |
executions_per_trial=5, | |
overwrite=True, | |
directory='tune_hp', | |
project_name='tuning', | |
) | |
print(tuner.search_space_summary()) | |
earlystop = EarlyStopping(monitor='val_loss', | |
patience=500, | |
verbose=1, | |
mode='min', | |
restore_best_weights=True) | |
tuner.search(x_train, y_train, epochs=1500, batch_size=32, verbose=2, | |
callbacks=[earlystop], | |
validation_split=val_size, shuffle=True) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment