Created
August 10, 2020 08:19
-
-
Save neelriyer/f0403bd1079bdae77778a67ff774e898 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from itertools import product | |
from tqdm.notebook import tqdm | |
def get_learner(emb_szs=emb_szs, layers=[1000,500], ps=[0.02,0.04], emb_drop=0.08): | |
return (tabular_learner(data, | |
layers=layers, | |
ps=ps, | |
emb_drop=emb_drop, | |
y_range=y_range, | |
emb_szs=emb_szs, | |
metrics=exp_rmspe)) | |
lrs = [] | |
losses = [] | |
wds = [] | |
ps = [] | |
layers = [] | |
iter_count = 600 # anything over 300 seems to work well. | |
curr_wd = 1e-3 | |
layers = [1000,500] | |
ps = [0.002,0.02] | |
emb_drop = 0.04 | |
params = { | |
'wd':[i for i in np.linspace(0,0.6,7)] | |
} | |
parameter_combinations = [] | |
for i in tqdm(list(product(*params.values()))): | |
curr_wd = i[0] | |
print("curr_wd = {}".format(i[0]) | |
learner = get_learner(emb_szs=emb_szs, layers = layers, ps = ps, emb_drop = emb_drop) | |
learner.lr_find(wd=curr_wd, num_it=iter_count) | |
lrs.append(learner.recorder.lrs) | |
losses.append(learner.recorder.losses) | |
combination = [[curr_wd]] | |
parameter_combinations += combination |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment