Skip to content

Instantly share code, notes, and snippets.

@bartolsthoorn
Created April 29, 2017 12:13
  • Star 89 You must be signed in to star a gist
  • Fork 18 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save bartolsthoorn/36c813a4becec1b260392f5353c8b7cc to your computer and use it in GitHub Desktop.
Simple multi-laber classification example with Pytorch and MultiLabelSoftMarginLoss (https://en.wikipedia.org/wiki/Multi-label_classification)
import torch
import torch.nn as nn
import numpy as np
import torch.optim as optim
from torch.autograd import Variable
# (1, 0) => target labels 0+2
# (0, 1) => target labels 1
# (1, 1) => target labels 3
train = []
labels = []
for i in range(10000):
category = (np.random.choice([0, 1]), np.random.choice([0, 1]))
if category == (1, 0):
train.append([np.random.uniform(0.1, 1), 0])
labels.append([1, 0, 1])
if category == (0, 1):
train.append([0, np.random.uniform(0.1, 1)])
labels.append([0, 1, 0])
if category == (0, 0):
train.append([np.random.uniform(0.1, 1), np.random.uniform(0.1, 1)])
labels.append([0, 0, 1])
class _classifier(nn.Module):
def __init__(self, nlabel):
super(_classifier, self).__init__()
self.main = nn.Sequential(
nn.Linear(2, 64),
nn.ReLU(),
nn.Linear(64, nlabel),
)
def forward(self, input):
return self.main(input)
nlabel = len(labels[0]) # => 3
classifier = _classifier(nlabel)
optimizer = optim.Adam(classifier.parameters())
criterion = nn.MultiLabelSoftMarginLoss()
epochs = 5
for epoch in range(epochs):
losses = []
for i, sample in enumerate(train):
inputv = Variable(torch.FloatTensor(sample)).view(1, -1)
labelsv = Variable(torch.FloatTensor(labels[i])).view(1, -1)
output = classifier(inputv)
loss = criterion(output, labelsv)
optimizer.zero_grad()
loss.backward()
optimizer.step()
losses.append(loss.data.mean())
print('[%d/%d] Loss: %.3f' % (epoch+1, epochs, np.mean(losses)))
$ python multilabel.py
[1/5] Loss: 0.092
[2/5] Loss: 0.005
[3/5] Loss: 0.001
[4/5] Loss: 0.000
[5/5] Loss: 0.000
@wj-Mcat
Copy link

wj-Mcat commented Apr 7, 2020

I create custom multi-class loss function, but trained too slowly.

class MultilabelCrossEntropyLoss(nn.Module):
    def __init__(self):
        super(MultilabelCrossEntropyLoss, self).__init__()

    def forward(self, source: torch.Tensor, target: torch.Tensor) -> torch.Tensor:

        source = source.sigmoid()

        score = -1. * target * source.log() - (1 - target) * torch.log(1-source)
        return score.sum()

I got the result:

[1/500] Loss: 1.067
[2/500] Loss: 0.815
[3/500] Loss: 0.722
[4/500] Loss: 0.664
[5/500] Loss: 0.622
[6/500] Loss: 0.591
[7/500] Loss: 0.566
[8/500] Loss: 0.546
[9/500] Loss: 0.529
[10/500] Loss: 0.515
[11/500] Loss: 0.503
[12/500] Loss: 0.492
[13/500] Loss: 0.483
[14/500] Loss: 0.475
[15/500] Loss: 0.468
[16/500] Loss: 0.461
[17/500] Loss: 0.456
[18/500] Loss: 0.450

Why ?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment