Skip to content

Instantly share code, notes, and snippets.

@cjmcmurtrie
Last active July 14, 2024 05:53
Show Gist options
  • Save cjmcmurtrie/bcf2bce22715545559f52c28716813f2 to your computer and use it in GitHub Desktop.
Save cjmcmurtrie/bcf2bce22715545559f52c28716813f2 to your computer and use it in GitHub Desktop.
1D convolutional text classifier in Pytorch.
import torch
import torch.nn as nn
import torch.nn.functional as fnn
import torch.optim as onn
class Transpose(nn.Module):
def __init__(self):
super(Transpose, self).__init__()
def forward(self, tensor):
return tensor.transpose(1, 2)
class Squeeze(nn.Module):
def __init__(self):
super(Squeeze, self).__init__()
def forward(self, tensor):
return tensor.squeeze()
def get_cnn(vocab_size, n_classes, embedding_size, kernel=2, frac=2):
# assumes padded sequence length of 100 tokens.
embeddings = nn.Embedding(
vocab_size + 5, embedding_size,
padding_idx=0,
)
encoder = nn.Sequential(
nn.Conv1d(embedding_size, embedding_size // frac, kernel, padding=2),
nn.ReLU(),
nn.AvgPool1d(kernel_size=2),
nn.Conv1d(embedding_size // frac, embedding_size // frac, kernel, padding=2),
nn.ReLU(),
nn.AvgPool1d(kernel_size=2),
nn.Conv1d(embedding_size // frac, embedding_size // frac, 27),
nn.ReLU(),
Squeeze(),
nn.Linear(embedding_size // frac, n_classes),
nn.LogSoftmax(dim=1)
)
return nn.Sequential(
embeddings,
Transpose(),
encoder
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment