Skip to content

Instantly share code, notes, and snippets.

import padl
import torch
hidden_size = padl.param('hidden_size', 512)
input_size = padl.param('input_size', 64)
n_tokens = padl.param('n_tokens', 16)
nn = padl.transform(torch.nn)
my_classifier.padl
|__0.pt
|__1.pt
|__2.pt
|__3.pt
|__requirements.txt
|__transform.py
import padl
import torch
from padl import params
from my_codebase.pipelines import build_string_processor, build_preprocessor, build_model
from my_codebase.models import build_classifier
from my_codebase.transforms import Dictionary, build_dictionary
nn = padl.transform(torch.nn)
padl.load('my_classfier.padl', hidden_size=1024, input_size=24)
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import torch
class HiddenState(torch.nn.Layer):
def __init__(self, layer):
super().__init__()
self.layer = layer
def forward(self, x):
return self.layer(x)[0]
import models
models.build_classifier.rnn_layer = @LSTM()
models.build_classifier.n_tokens = 16
LSTM.hidden_size = 512
LSTM.input_size = 64
LSTM.num_layers = 1
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
# created with python-3.9.10
padl==0.2.5
torch==1.10.2