Skip to content

Instantly share code, notes, and snippets.

View JulieProst's full-sized avatar

JulieProst

  • Sicara
  • Paris
View GitHub Profile
@JulieProst
JulieProst / import_data.py
Last active January 13, 2020 12:28
Keras-tuner_gist_1
from tensorflow.keras.datasets import cifar10
# Load data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Pre-processing
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
@JulieProst
JulieProst / keras_model.py
Created January 13, 2020 12:27
Keras sequential CNN model
from tensorflow import keras
from tensorflow.keras.layers import (
Conv2D,
Dense,
Dropout,
Flatten,
MaxPooling2D
)
INPUT_SHAPE = (32, 32, 3)
from kerastuner import HyperModel
class CNNHyperModel(HyperModel):
def __init__(self, input_shape, num_classes):
self.input_shape = input_shape
self.num_classes = num_classes
def build(self, hp):
model = keras.Sequential()
from kerastuner.tuners import RandomSearch
NUM_CLASSES = 10 # cifar10 number of classes
INPUT_SHAPE = (32, 32, 3) # cifar10 images input shape
hypermodel = CNNHyperModel(input_shape=INPUT_SHAPE, num_classes=NUM_CLASSES)
tuner = RandomSearch(
hypermodel,
objective='val_accuracy',
from kerastuner.tuners import Hyperband
tuner = Hyperband(
hypermodel,
max_epochs=HYPERBAND_MAX_EPOCHS,
objective='val_accuracy',
seed=SEED,
executions_per_trial=EXECUTION_PER_TRIAL,
directory='hyperband',
filters=hp.Choice(
'num_filters',
values=[32, 64],
default=64,
),
Dense(
units=hp.Int(
'units',
min_value=32,
max_value=512,
step=32,
default=128
),
activation=hp.Choice(
'dense_activation',
model.compile(
optimizer=keras.optimizers.Adam(1e-3),
loss='sparse_categorical_crossentropy',
metrics=['accuracy']
)
hp.Float(
'learning_rate',
min_value=1e-5,
max_value=1e-2,
sampling='LOG',
default=1e-3
)
# Show a summary of the search
tuner.results_summary()
# Retrieve the best model.
best_model = tuner.get_best_models(num_models=1)[0]
# Evaluate the best model.
loss, accuracy = best_model.evaluate(x_test, y_test)