Skip to content

Instantly share code, notes, and snippets.

# Initialise the train loop metrics
train_acc = tf.keras.metrics.Mean()
train_loss = tf.keras.metrics.Mean()
val_acc = tf.keras.metrics.Mean()
val_loss = tf.keras.metrics.Mean()
@tf.function
def train_step(x, y):
with tf.GradientTape() as tape:
logits = model(x, training=True)
loss_fn = tf.keras.losses.BinaryCrossentropy(from_logits=True)
optimizer=tf.keras.optimizers.Adam(lr=learning_rate)
# Setup callbacks
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir)
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=ckpt_dir,
save_weights_only=True,
monitor='val_acc',
mode='max',
save_best_only=True)
model.compile(loss=loss_fn,
loss_fn = tf.keras.losses.BinaryCrossentropy(from_logits=True)
optimizer=tf.keras.optimizers.Adam(lr=learning_rate)
loss_fn = nn.BCEWithLogitsLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
import torch
from torch import nn
import torch.nn.functional as F
# Set up model
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Conv2d(3, 32, 3)
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential
# Set up model
model = Sequential([
layers.Conv2D(32, 3, padding='same', activation='relu', input_shape=(im_size, im_size, 3)),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, padding='same', activation='relu'),
# Get image names
train_dir = os.path.join(root_path,'data/train')
test_dir = os.path.join(root_path,'data/test')
train_dogs = [f'{train_dir}/{i}' for i in os.listdir(train_dir) if 'dog' in i] #get dog images
train_cats = [f'{train_dir}/{i}' for i in os.listdir(train_dir) if 'cat' in i] #get cat images
test_imgs = [f'{test_dir}/{i}' for i in os.listdir(test_dir)]
if num_im: # Combine dog and cat images, then shuffle them
import random
import numpy as np
import cv2
import torch
import torchvision
# custom augmentation functions
from image_augmentation import *
class PytorchDataGenerator(torch.utils.data.Dataset):
train_gen = TensorflowDataGenerator(train_dir, batch_size, num_im=num_im, shuffle=True)#
val_imgs = train_gen.load_val()