Skip to content

Instantly share code, notes, and snippets.

from padl import params, save
from my_codebase.models import build_classifier
import torch
rnn = torch.nn.GRU(
**params(
'rnn',
input_size=64,
hidden_size=512,
import padl
import torch
hidden_size = padl.param('hidden_size', 512)
input_size = padl.param('input_size', 64)
n_tokens = padl.param('n_tokens', 16)
nn = padl.transform(torch.nn)
my_classifier.padl
|__0.pt
|__1.pt
|__2.pt
|__3.pt
|__requirements.txt
|__transform.py
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
from padl import run
m = run('config.py').m
print(m)
from padl import run
import torch
import padl
torch.nn = padl.transform(torch.nn)
lm = run('config.py').lm
lm = lm[:2] >> torch.nn.ReLU() >> lm[2:]
from padl import run
run('config.py', rnn=torch.nn.LSTM(64, 1024, 2, dropout=0.3))
from padl import run
run('config.py')
from padl import run
run('config.py', rnn_params={'input_size': 32}, classifier_params={n_embed: 32})
from padl.transforms import Batchify
from padl.transforms import identity
from padl.transforms import Unbatchify
from padl import batch
import padl
from padl import params
from padl import unbatch
import re
import torch