Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save machinatoonist/7c95c31f2ff427f434159af9a143cda0 to your computer and use it in GitHub Desktop.
Save machinatoonist/7c95c31f2ff427f434159af9a143cda0 to your computer and use it in GitHub Desktop.
Error when attempting to specify vocabulary size in fastai text_classifier_learner with AWD_LTSM
def train(arch, train_df=train_df, accum=1, epochs=12):
dls = TextDataLoaders.from_df(train_df,text_col='desc',label_col='label',valid_pct=0.2,bs=64//accum)
cbs = GradientAccumulation(64) if accum else []
# learn = text_classifier_learner(dls, arch=AWD_LSTM, drop_mult=0.5, metrics=[accuracy], cbs=cbs).to_fp16()
learn = text_classifier_learner(dls, arch=AWD_LSTM(vocab_sz=200, emb_sz=20, n_hid=10, n_layers=2, pad_token=1,
hidden_p=0.2, embed_p=0.02, input_p=0.1, weight_p=0.2),
drop_mult=0.5, metrics=[accuracy], cbs=cbs).to_fp16()
learn.fine_tune(epochs, 0.01)
return learn
l = train(arch=AWD_LSTM, accum=2, epochs=4)
KeyError Traceback (most recent call last)
/tmp/ipykernel_1867/573006035.py in <module>
10 return learn
11
---> 12 l = train(arch=AWD_LSTM, accum=2, epochs=4)
/tmp/ipykernel_1867/573006035.py in train(arch, train_df, accum, epochs)
5 learn = text_classifier_learner(dls, arch=AWD_LSTM(vocab_sz=200, emb_sz=20, n_hid=10, n_layers=2, pad_token=1,
6 hidden_p=0.2, embed_p=0.02, input_p=0.1, weight_p=0.2),
----> 7 drop_mult=0.5, metrics=[accuracy], cbs=cbs).to_fp16()
8 # learn = text_classifier_learner(dls, arch=LMModel6, drop_mult=0.5, metrics=[accuracy], cbs=cbs).to_fp16()
9 learn.fine_tune(epochs, 0.01)
~/.local/lib/python3.7/site-packages/fastai/text/learner.py in text_classifier_learner(dls, arch, seq_len, config, backwards, pretrained, drop_mult, n_out, lin_ftrs, ps, max_len, y_range, **kwargs)
260 assert n_out, "`n_out` is not defined, and could not be inferred from data, set `dls.c` or pass `n_out`"
261 model = get_text_classifier(arch, len(vocab), n_out, seq_len=seq_len, config=config, y_range=y_range,
--> 262 drop_mult=drop_mult, lin_ftrs=lin_ftrs, ps=ps, max_len=max_len)
263 meta = _model_meta[arch]
264 learn = TextLearner(dls, model, splitter=meta['split_clas'], **kwargs)
~/.local/lib/python3.7/site-packages/fastai/text/models/core.py in get_text_classifier(arch, vocab_sz, n_class, seq_len, config, drop_mult, lin_ftrs, ps, pad_idx, max_len, y_range)
159 ):
160 "Create a text classifier from `arch` and its `config`, maybe `pretrained`"
--> 161 meta = _model_meta[arch]
162 config = ifnone(config, meta['config_clas']).copy()
163 for k in config.keys():
KeyError: AWD_LSTM(
(encoder): Embedding(200, 20, padding_idx=1)
(encoder_dp): EmbeddingDropout(
(emb): Embedding(200, 20, padding_idx=1)
)
(rnns): ModuleList(
(0): WeightDropout(
(module): LSTM(20, 10, batch_first=True)
)
(1): WeightDropout(
(module): LSTM(10, 20, batch_first=True)
)
)
(input_dp): RNNDropout()
(hidden_dps): ModuleList(
(0): RNNDropout()
(1): RNNDropout()
)
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment