I hereby claim:
- I am MichalMalyska on github.
- I am michalmalyska (https://keybase.io/michalmalyska) on keybase.
- I have a public key whose fingerprint is 5260 6F1A B94A 5821 2AB9 B502 DAFB 65D5 DA70 D72A
To claim this, I am signing this object:
local experiment_name = "cnn_edss19"; | |
{ "train_options": { | |
"serialization_dir": "/results/dev/" + experiment_name, | |
"file_friendly_logging": false, | |
"recover": false, | |
"force": true, | |
"node_rank": 0, | |
"batch_weight_key": "", | |
"dry_run": false, |
import os | |
import pandas as pd | |
from typing import Dict, List, Iterator, Tuple, Union | |
import logging | |
import torch | |
from overrides import overrides | |
from transformers import BertTokenizerFast |
"dataset_reader": { | |
"type": "data_scripts.dataset_reader.ms_edss19_reader", | |
"token_indexers": { | |
"tokens": { | |
"type": "pretrained_transformer", | |
"model_name": "ms-bert", | |
"namespace": "tokens", | |
"max_length": 768, | |
} | |
}, |
import transformers | |
tokenizer = AutoTokenizer.from_pretrained("NLP4H/ms_bert") | |
model = AutoModel.from_pretrained("NLP4H/ms_bert") |
import transformers | |
tokenizer = AutoTokenizer.from_pretrained("NLP4H/ms_bert") | |
model = AutoModelWithLMHead.from_pretrained("NLP4H/ms_bert") |
"seq2vec_encoder": { | |
"type": "cnn", | |
"embedding_dim": hidden_size, | |
"num_filters": 128, | |
"ngram_filter_sizes": [2, 3, 4, 5, 6, 10], |
I hereby claim:
To claim this, I am signing this object: