Skip to content

Instantly share code, notes, and snippets.

@sfujiwara
Last active December 9, 2018 17:22
Show Gist options
  • Save sfujiwara/261e289ec7d70fbbb97307ccf25339db to your computer and use it in GitHub Desktop.
Save sfujiwara/261e289ec7d70fbbb97307ccf25339db to your computer and use it in GitHub Desktop.
import tensorflow as tf
import optuna
import sklearn.datasets
from sklearn.model_selection import train_test_split
def create_input_fn():
iris = sklearn.datasets.load_iris()
x, y = iris.data, iris.target
x_train, x_eval, y_train, y_eval = train_test_split(x, y, test_size=0.5, random_state=42)
def _train_input_fn():
dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
dataset = dataset.shuffle(128).repeat().batch(16)
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return {"x": features}, labels
def _eval_input_fn():
dataset = tf.data.Dataset.from_tensor_slices((x_eval, y_eval))
dataset = dataset.batch(16)
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return {"x": features}, labels
return _train_input_fn, _eval_input_fn
def objective(trial):
train_input_fn, eval_input_fn = create_input_fn()
learning_rate = trial.suggest_loguniform("learning_rate", 1e-5, 1e-1)
clf = tf.estimator.DNNClassifier(
feature_columns=[tf.feature_column.numeric_column(key="x", shape=[4])],
n_classes=3,
hidden_units=[],
optimizer=tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
)
clf.train(input_fn=train_input_fn, steps=100)
result = clf.evaluate(input_fn=eval_input_fn, steps=100)
accuracy = result["accuracy"]
return 1.0 - accuracy
if __name__ == "__main__":
study = optuna.create_study()
study.optimize(objective, n_trials=10)
print(study.best_trial)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment