Skip to content

Instantly share code, notes, and snippets.

def myfunc(a: int = 1, b: str = "", c: list = []):
pass
def myfunc(a, b, c):
pass
config = {
"a": 1,
"b": 2,
"c": 5
}
myfunc(**config)
def myfunc(a, b, c, d, e, f):
# Do stuff
pass
myfunc(a=1, b="foo", c=42, d=100, f="bar")
class EmptyClass:
pass
def some_function():
pass
for i in range(n):
# Do stuff...
if condition_is_met:
break
else:
# This will be executed only if break is never read.
for i in range(n):
# Do stuff...
if condition_is_met:
break
for i in range(n):
try:
data = next(dataiter)
except ValueError:
continue
# The code here is ignored if ValueError is raised in the "try" block
# Otherwise, run the rest of the loop here ...
epoch_history["loss"].append(sum(batch_history["loss"]) / len(batch_history["loss"]))
epoch_history["accuracy"].append(sum(batch_history["accuracy"]) / len(batch_history["accuracy"]))
model.eval()
print("Validation...")
val_loss, val_accuracy = validate(model, valid_loader)
epoch_history["val_loss"].append(val_loss)
epoch_history["val_accuracy"].append(val_accuracy)
print(f"{epoch_history=}")
from tqdm import tqdm, trange
for i in trange(epochs, unit="epoch", desc="Train"):
model.train()
with tqdm(train_loader, desc="Train") as tbatch:
for i, (samples, targets) in enumerate(tbatch):
model.train()
samples = samples.to(device).long()
targets = targets.to(device)
model.zero_grad()
predictions, _ = model(samples.transpose(0, 1))
model = BiLSTM(input_dim, embedding_dim, hidden_dim)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
criterion = nn.BCELoss().to(device)
optimizer = torch.optim.Adam(model.parameters())
model.to(device)
batch_history = {
"loss": [],
"accuracy": []
}
epoch_history = {