Skip to content

Instantly share code, notes, and snippets.

model.train()
for epoch in range(1, 5):
with tqdm(train_loader, unit="batch") as tepoch:
for data, target in tepoch:
tepoch.set_description(f"Epoch {epoch}")
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
predictions = output.argmax(dim=1, keepdim=True).squeeze()
def myfunc(a: int = 1, b: str = "", c: list = []):
pass
def myfunc(a, b, c):
pass
config = {
"a": 1,
"b": 2,
"c": 5
}
myfunc(**config)
def myfunc(a, b, c, d, e, f):
# Do stuff
pass
myfunc(a=1, b="foo", c=42, d=100, f="bar")
class EmptyClass:
pass
def some_function():
pass
for i in range(n):
# Do stuff...
if condition_is_met:
break
else:
# This will be executed only if break is never read.
for i in range(n):
# Do stuff...
if condition_is_met:
break
for i in range(n):
try:
data = next(dataiter)
except ValueError:
continue
# The code here is ignored if ValueError is raised in the "try" block
# Otherwise, run the rest of the loop here ...
from tensorflow.keras.layers import Bidirectional, LSTM, Dense, Embedding
from tensorflow.keras.models import Sequential
model = Sequential([
Embedding(input_dim, embedding_dim),
Bidirectional(LSTM(hidden_dim, return_sequences=True)),
Bidirectional(LSTM(hidden_dim)),
Dense(1, activation="sigmoid")
])
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=["accuracy"])
model.summary()
epoch_history["loss"].append(sum(batch_history["loss"]) / len(batch_history["loss"]))
epoch_history["accuracy"].append(sum(batch_history["accuracy"]) / len(batch_history["accuracy"]))
model.eval()
print("Validation...")
val_loss, val_accuracy = validate(model, valid_loader)
epoch_history["val_loss"].append(val_loss)
epoch_history["val_accuracy"].append(val_accuracy)
print(f"{epoch_history=}")