-
-
Save shrinath-suresh/00086bf503690dd0365e39846ef2dbb5 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ python news_classifier.py --max_epochs 1 --num_samples 100 | |
2022-07-19 09:39:20.830460: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory | |
2022-07-19 09:39:20.830488: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine. | |
Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertModel: ['cls.predictions.transform.dense.weight', 'cls.predictions.decoder.weight', 'cls.predictions.bias', 'cls.seq_relationship.bias', 'cls.seq_relationship.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight'] | |
- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model). | |
- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model). | |
/home/ubuntu/anaconda3/lib/python3.8/site-packages/transformers/tokenization_utils_base.py:1656: FutureWarning: Calling BertTokenizer.from_pretrained() with the path to a single file or url is deprecated and won't be possible anymore in v5. Use a model identifier or the path to a directory instead. | |
warnings.warn( | |
Number of samples used for training: 80 | |
Number of samples used for validation: 10 | |
Number of samples used for test: 10 | |
/home/ubuntu/anaconda3/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning | |
warnings.warn( | |
Epoch 1/1 | |
6it [00:15, 2.58s/it] | |
Train loss 2.7903212507565818 accuracy 0.19791666666666666 | |
Val loss 5.48581059773763 accuracy 0.0625 | |
TRAINING COMPLETED!!! | |
0.041666666666666664 | |
SAVING MODEL | |
Traceback (most recent call last): | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/torch/utils/data/datapipes/datapipe.py", line 317, in __getstate__ | |
value = pickle.dumps(self._datapipe) | |
AttributeError: Can't pickle local object 'to_map_style_dataset.<locals>._MapStyleDataset' | |
During handling of the above exception, another exception occurred: | |
Traceback (most recent call last): | |
File "news_classifier.py", line 396, in <module> | |
mlflow.pytorch.save_model( | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/mlflow/pytorch/__init__.py", line 512, in save_model | |
torch.save(pytorch_model, model_path, pickle_module=pickle_module, **kwargs) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/torch/serialization.py", line 379, in save | |
_save(obj, opened_zipfile, pickle_module, pickle_protocol) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/torch/serialization.py", line 589, in _save | |
pickler.dump(obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/cloudpickle/cloudpickle_fast.py", line 602, in dump | |
return Pickler.dump(self, obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/torch/utils/data/datapipes/datapipe.py", line 162, in __reduce_ex__ | |
return super().__reduce_ex__(*args, **kwargs) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/torch/utils/data/datapipes/datapipe.py", line 320, in __getstate__ | |
value = dill.dumps(self._datapipe) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/dill/_dill.py", line 304, in dumps | |
dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/dill/_dill.py", line 276, in dump | |
Pickler(file, protocol, **_kwds).dump(obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/dill/_dill.py", line 498, in dump | |
StockPickler.dump(self, obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 485, in dump | |
self.save(obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 601, in save | |
self.save_reduce(obj=obj, *rv) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 685, in save_reduce | |
save(cls) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 558, in save | |
f(self, obj) # Call unbound method with explicit self | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/dill/_dill.py", line 1422, in save_type | |
pickler.save_reduce(_create_type, (type(obj), obj.__name__, | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 690, in save_reduce | |
save(args) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 558, in save | |
f(self, obj) # Call unbound method with explicit self | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 899, in save_tuple | |
save(element) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 558, in save | |
f(self, obj) # Call unbound method with explicit self | |
File "/home/ubuntu/anaconda3/lib/python3.8/site-packages/dill/_dill.py", line 990, in save_module_dict | |
StockPickler.save_dict(pickler, obj) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 969, in save_dict | |
self._batch_setitems(obj.items()) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 995, in _batch_setitems | |
save(v) | |
File "/home/ubuntu/anaconda3/lib/python3.8/pickle.py", line 576, in save | |
rv = reduce(self.proto) | |
TypeError: cannot pickle '_abc_data' object |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment