Last active
February 2, 2018 16:11
-
-
Save pyaf/9c015fc03b80885a77d57fbe4c1a2a05 to your computer and use it in GitHub Desktop.
Loss function for MURA model
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# tai = total abnormal images, tni = total normal images | |
# study_data[x] is a study level dataframe | |
tai = {x: get_count(study_data[x], 'positive') for x in data_cat} | |
tni = {x: get_count(study_data[x], 'negative') for x in data_cat} | |
Wt1 = {x: n_p(tni[x] / (tni[x] + tai[x])) for x in data_cat} | |
Wt0 = {x: n_p(tai[x] / (tni[x] + tai[x])) for x in data_cat} | |
class Loss(torch.nn.modules.Module): | |
def __init__(self, Wt1, Wt0): | |
super(Loss, self).__init__() | |
self.Wt1 = Wt1 | |
self.Wt0 = Wt0 | |
def forward(self, inputs, targets, phase): | |
loss = - (self.Wt1[phase] * targets * inputs.log() + self.Wt0[phase] * (1 - targets) * (1 - inputs).log()) | |
return loss |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment