Created
October 26, 2021 07:11
-
-
Save fa-ahmad/0a4d49c12496246a9e9056930a301cd9 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class MyModel(Model): | |
def __init__(self): | |
super(MyModel, self).__init__() | |
self.conv1 = Conv2D(32, 3, activation='relu') | |
self.flatten = Flatten() | |
self.d1 = Dense(128, activation='relu') | |
self.d2 = Dense(10) | |
def call(self, x): | |
x = self.conv1(x) | |
x = self.flatten(x) | |
x = self.d1(x) | |
return self.d2(x) | |
# Create an instance of the model | |
model = MyModel() | |
loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) | |
optimizer = tf.keras.optimizers.Adam() | |
train_loss = tf.keras.metrics.Mean(name='train_loss') | |
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy') | |
@tf.function | |
def train_step(images, labels): | |
with tf.GradientTape() as tape: | |
# training=True is only needed if there are layers with different | |
# behavior during training versus inference (e.g. Dropout). | |
predictions = model(images, training=True) | |
loss = loss_object(labels, predictions) | |
gradients = tape.gradient(loss, model.trainable_variables) | |
optimizer.apply_gradients(zip(gradients, model.trainable_variables)) | |
train_loss(loss) | |
train_accuracy(labels, predictions) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment