Skip to content

Instantly share code, notes, and snippets.

@cottrell
Last active March 27, 2020 20:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cottrell/0d805ee95995e24b7b1f63f073f39f60 to your computer and use it in GitHub Desktop.
Save cottrell/0d805ee95995e24b7b1f63f073f39f60 to your computer and use it in GitHub Desktop.
WrappedTrackFunc error
import numpy as np
import pandas as pd
import ray
import ray.tune
import ray.tune.track
import tensorflow as tf
import tensorflow.keras
def ray_init(load_code_from_local=False):
if not ray.is_initialized():
print(f'load_code_from_local={load_code_from_local}')
return ray.init(
memory=2000 * 1024 * 1024,
object_store_memory=200 * 1024 * 1024,
driver_object_store_memory=100 * 1024 * 1024,
load_code_from_local=load_code_from_local,
)
def ray_bounce():
ray.disconnect()
return ray_init()
@ray.remote
def somesubfun(config):
# import tensorflow as tf # UNCOMMENT THIS TO SEE ERROR GO AWAY
# do something with tensorflow
opt = tf.keras.optimizers.Adam(learning_rate=0.1)
time.sleep(1)
return np.random.randn(1)
def somefun(config):
res = list()
for i in range(10):
res.append(somesubfun.remote(config))
res = ray.get(res)
res = np.mean(res)
ray.tune.track.init()
ray.tune.track.log(something=res, test="asdf")
return res
import hyperopt as ho
def test_ray_tuner():
space = dict(
l1=ho.hp.loguniform("l1", -2, 2),
l2=ho.hp.loguniform("l2", -2, 2),
concentration=ho.hp.loguniform("concentration", 0, 5),
)
from ray.tune.suggest.hyperopt import HyperOptSearch
search = HyperOptSearch(space, max_concurrent=10,
reward_attr="something", mode="min")
analysis = ray.tune.run(somefun, search_alg=search)
return analysis
if __name__ == '__main__':
ray_init()
print(test_ray_tuner())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment