Skip to content

Instantly share code, notes, and snippets.

@c3333
Forked from SuperShinyEyes/f1_score.py
Created November 14, 2020 17:07
Show Gist options
  • Save c3333/04b9cfd40f948f2967b50274e16b97f1 to your computer and use it in GitHub Desktop.
Save c3333/04b9cfd40f948f2967b50274e16b97f1 to your computer and use it in GitHub Desktop.
F1 score in PyTorch
def f1_loss(y_true:torch.Tensor, y_pred:torch.Tensor, is_training=False) -> torch.Tensor:
'''Calculate F1 score. Can work with gpu tensors
The original implmentation is written by Michal Haltuf on Kaggle.
Returns
-------
torch.Tensor
`ndim` == 1. 0 <= val <= 1
Reference
---------
- https://www.kaggle.com/rejpalcz/best-loss-function-for-f1-score-metric
- https://scikit-learn.org/stable/modules/generated/sklearn.metrics.f1_score.html#sklearn.metrics.f1_score
- https://discuss.pytorch.org/t/calculating-precision-recall-and-f1-score-in-case-of-multi-label-classification/28265/6
'''
assert y_true.ndim == 1
assert y_pred.ndim == 1 or y_pred.ndim == 2
if y_pred.ndim == 2:
y_pred = y_pred.argmax(dim=1)
tp = (y_true * y_pred).sum().to(torch.float32)
tn = ((1 - y_true) * (1 - y_pred)).sum().to(torch.float32)
fp = ((1 - y_true) * y_pred).sum().to(torch.float32)
fn = (y_true * (1 - y_pred)).sum().to(torch.float32)
epsilon = 1e-7
precision = tp / (tp + fp + epsilon)
recall = tp / (tp + fn + epsilon)
f1 = 2* (precision*recall) / (precision + recall + epsilon)
f1.requires_grad = is_training
return f1
@c3333
Copy link
Author

c3333 commented Nov 14, 2020

f1_score.py
def f1_loss(y_true:Fackel. Tensor, y_pred:Fackel. Tensor, is_training=False) -> torch. Tensor:
''''Berechnen F1-Score. Kann mit gpu Tensoren arbeiten

Die ursprüngliche Implmentation wurde von Michal Haltuf auf Kaggle geschrieben.

Gibt
-------
Fackel. Tensor
'ndim' == 1. 0 <= val <= 1

Verweis
---------
- https://www.kaggle.com/rejpalcz/best-loss-function-for-f1-score-metric
- https://scikit-learn.org/stable/modules/generated/sklearn.metrics.f1_score.html#sklearn.metrics.f1_score
- https://discuss.pytorch.org/t/calculating-precision-recall-and-f1-score-in-case-of-multi-label-classification/28265/6

'''
y_true geltend machen . ndim == 1
y_pred bestätigen . ndim == 1 oder y_pred. ndim == 2

wenn y_pred. ndim == 2:
    y_pred = y_pred. argmax(dim=1)
    

tp = (y_true * y_pred). Summe(). zu(Fackel. float32)
tn = ((1 - y_true) * (1 - y_pred)). Summe(). zu(Fackel. float32)
fp = ((1 - y_true) * y_pred). Summe(). zu(Fackel. float32)
fn = (y_true * (1 - y_pred)). Summe(). zu(Fackel. float32)

epsilon = 1e-7

Präzision = tp / (tp + fp + epsilon)
Rückruf = tp / (tp + fn + epsilon)

f1 = 2* (Genauigkeit*Rückruf) / (Genauigkeit + Rückruf + Epsilon)
f1. requires_grad = is_training
Rückgabe f1

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment