Skip to content

Instantly share code, notes, and snippets.

@MLWhiz
Created January 7, 2019 07:16
Show Gist options
  • Save MLWhiz/234a409138e6389f6b9eafe3438ca07a to your computer and use it in GitHub Desktop.
Save MLWhiz/234a409138e6389f6b9eafe3438ca07a to your computer and use it in GitHub Desktop.
def pytorch_model_run_cv(x_train,y_train,features,x_test, model_obj, feats = False,clip = True):
seed_everything()
avg_losses_f = []
avg_val_losses_f = []
# matrix for the out-of-fold predictions
train_preds = np.zeros((len(x_train)))
# matrix for the predictions on the test set
test_preds = np.zeros((len(x_test)))
splits = list(StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=SEED).split(x_train, y_train))
for i, (train_idx, valid_idx) in enumerate(splits):
seed_everything(i*1000+i)
x_train = np.array(x_train)
y_train = np.array(y_train)
if feats:
features = np.array(features)
x_train_fold = torch.tensor(x_train[train_idx.astype(int)], dtype=torch.long).cuda()
y_train_fold = torch.tensor(y_train[train_idx.astype(int), np.newaxis], dtype=torch.float32).cuda()
if feats:
kfold_X_features = features[train_idx.astype(int)]
kfold_X_valid_features = features[valid_idx.astype(int)]
x_val_fold = torch.tensor(x_train[valid_idx.astype(int)], dtype=torch.long).cuda()
y_val_fold = torch.tensor(y_train[valid_idx.astype(int), np.newaxis], dtype=torch.float32).cuda()
model = copy.deepcopy(model_obj)
model.cuda()
loss_fn = torch.nn.BCEWithLogitsLoss(reduction='sum')
step_size = 300
base_lr, max_lr = 0.001, 0.003
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()),
lr=max_lr)
################################################################################################
scheduler = CyclicLR(optimizer, base_lr=base_lr, max_lr=max_lr,
step_size=step_size, mode='exp_range',
gamma=0.99994)
###############################################################################################
train = MyDataset(torch.utils.data.TensorDataset(x_train_fold, y_train_fold))
valid = MyDataset(torch.utils.data.TensorDataset(x_val_fold, y_val_fold))
train_loader = torch.utils.data.DataLoader(train, batch_size=batch_size, shuffle=True)
valid_loader = torch.utils.data.DataLoader(valid, batch_size=batch_size, shuffle=False)
print(f'Fold {i + 1}')
for epoch in range(n_epochs):
start_time = time.time()
model.train()
avg_loss = 0.
for i, (x_batch, y_batch, index) in enumerate(train_loader):
if feats:
f = kfold_X_features[index]
y_pred = model([x_batch,f])
else:
y_pred = model(x_batch)
if scheduler:
scheduler.batch_step()
# Compute and print loss.
loss = loss_fn(y_pred, y_batch)
optimizer.zero_grad()
loss.backward()
if clip:
nn.utils.clip_grad_norm_(model.parameters(),1)
optimizer.step()
avg_loss += loss.item() / len(train_loader)
model.eval()
valid_preds_fold = np.zeros((x_val_fold.size(0)))
test_preds_fold = np.zeros((len(x_test)))
avg_val_loss = 0.
for i, (x_batch, y_batch,index) in enumerate(valid_loader):
if feats:
f = kfold_X_valid_features[index]
y_pred = model([x_batch,f]).detach()
else:
y_pred = model(x_batch).detach()
avg_val_loss += loss_fn(y_pred, y_batch).item() / len(valid_loader)
valid_preds_fold[index] = sigmoid(y_pred.cpu().numpy())[:, 0]
elapsed_time = time.time() - start_time
print('Epoch {}/{} \t loss={:.4f} \t val_loss={:.4f} \t time={:.2f}s'.format(
epoch + 1, n_epochs, avg_loss, avg_val_loss, elapsed_time))
avg_losses_f.append(avg_loss)
avg_val_losses_f.append(avg_val_loss)
# predict all samples in the test set batch per batch
for i, (x_batch,) in enumerate(test_loader):
if feats:
f = test_features[i * batch_size:(i+1) * batch_size]
y_pred = model([x_batch,f]).detach()
else:
y_pred = model(x_batch).detach()
test_preds_fold[i * batch_size:(i+1) * batch_size] = sigmoid(y_pred.cpu().numpy())[:, 0]
train_preds[valid_idx] = valid_preds_fold
test_preds += test_preds_fold / len(splits)
print('All \t loss={:.4f} \t val_loss={:.4f} \t '.format(np.average(avg_losses_f),np.average(avg_val_losses_f)))
return train_preds, test_preds
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment