Skip to content

Instantly share code, notes, and snippets.

@jamescalam
Created June 11, 2021 19:38
Show Gist options
  • Save jamescalam/7aadafb4b1796f0bbac2733fe95ddec3 to your computer and use it in GitHub Desktop.
Save jamescalam/7aadafb4b1796f0bbac2733fe95ddec3 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"metadata": {
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
},
"orig_nbformat": 2,
"kernelspec": {
"name": "ml",
"display_name": "ML",
"language": "python"
}
},
"nbformat": 4,
"nbformat_minor": 2,
"cells": [
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"Epoch 0: 100%|██████████| 12500/12500 [1:29:47<00:00, 2.32it/s, loss=0.358]\n",
"Epoch 1: 100%|██████████| 12500/12500 [1:22:20<00:00, 2.53it/s, loss=0.31]\n"
]
}
],
"source": [
"epochs = 2\n",
"\n",
"for epoch in range(epochs):\n",
" # setup loop with TQDM and dataloader\n",
" loop = tqdm(loader, leave=True)\n",
" for batch in loop:\n",
" # initialize calculated gradients (from prev step)\n",
" optim.zero_grad()\n",
" # pull all tensor batches required for training\n",
" input_ids = batch['input_ids'].to(device)\n",
" attention_mask = batch['attention_mask'].to(device)\n",
" labels = batch['labels'].to(device)\n",
" # process\n",
" outputs = model(input_ids, attention_mask=attention_mask,\n",
" labels=labels)\n",
" # extract loss\n",
" loss = outputs.loss\n",
" # calculate loss for every parameter that needs grad update\n",
" loss.backward()\n",
" # update parameters\n",
" optim.step()\n",
" # print relevant info to progress bar\n",
" loop.set_description(f'Epoch {epoch}')\n",
" loop.set_postfix(loss=loss.item())"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [],
"source": [
"model.save_pretrained('./filiberto') # and don't forget to save filiBERTo!"
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment