Skip to content

Instantly share code, notes, and snippets.

@rtyasdf
Last active April 17, 2023 23:54
Show Gist options
  • Save rtyasdf/3fe913dae5686ffd66af917c53c468ec to your computer and use it in GitHub Desktop.
Save rtyasdf/3fe913dae5686ffd66af917c53c468ec to your computer and use it in GitHub Desktop.
roc curve for neural network
import torch
import torch.nn as nn
from torch.utils.data import TensorDataset, DataLoader
from torch.optim import Adam
from sklearn.metrics import roc_curve
import matplotlib.pyplot as plt
def create_data():
"""
Create separable synthetic data.
"""
feature_dim = 5
labels_1 = torch.full((1000, 1), 0.)
data_1 = torch.randn((1000, feature_dim)) + 0.7 * torch.rand((feature_dim,))
labels_2 = torch.full((1000, 1), 1.)
data_2 = torch.randn((1000, feature_dim)) - 0.7 * torch.rand((feature_dim,))
data = torch.cat((data_1, data_2), dim=0)
labels = torch.cat((labels_1, labels_2), dim=0)
return data, labels
if __name__ == "__main__":
# Specyfying model, optimizer and loss function
model = nn.Sequential(nn.Linear(5, 20),
nn.ReLU(),
nn.Linear(20, 1),
nn.Sigmoid())
opt = Adam(model.parameters(), 1e-3)
bce = nn.BCELoss()
# Create data and wrap it with dataloader
data, labels = create_data()
ds = TensorDataset(data, labels)
dl = DataLoader(ds, batch_size=64, shuffle=True)
# Training
for epoch in range(20):
for x, y in dl:
output = model(x)
loss = bce(output, y)
opt.zero_grad()
loss.backward()
opt.step()
print(f"EPOCH : {epoch}")
# Take roc_curve for training dataset
with torch.no_grad():
fpr, tpr, _ = roc_curve(labels.squeeze(-1).numpy(), model(data).squeeze(-1).numpy())
# Plotting
plt.plot(fpr, tpr, marker='.')
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate' )
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment