Skip to content

Instantly share code, notes, and snippets.

View AFAgarap's full-sized avatar

Abien Fred Agarap AFAgarap

View GitHub Profile
@AFAgarap
AFAgarap / sampling.py
Created May 16, 2019 09:32
TensorFlow 2.0 implementation of a sampling layer for a variational autoencoder.
class Sampling(tf.keras.layers.Layer):
def call(self, args):
z_mean, z_log_var = args
batch = tf.shape(z_mean)[0]
dim = tf.shape(z_mean)[1]
epsilon = tf.random.normal(shape=(batch, dim), mean=0., stddev=1.)
return z_mean + epsilon * tf.exp(0.5 * z_log_var)
@AFAgarap
AFAgarap / decoder.py
Created May 16, 2019 09:31
TensorFlow 2.0 implementation of a decoder layer for a variational autoencoder.
class Decoder(tf.keras.layers.Layer):
def __init__(self, original_dim):
super(Decoder, self).__init__()
self.hidden_layer_1 = tf.keras.layers.Dense(units=32, activation=tf.nn.relu)
self.hidden_layer_2 = tf.keras.layers.Dense(units=64, activation=tf.nn.relu)
self.hidden_layer_3 = tf.keras.layers.Dense(units=128, activation=tf.nn.relu)
self.output_layer = tf.keras.layers.Dense(units=original_dim, activation=tf.nn.sigmoid)
def call(self, input_features):
activation_1 = self.hidden_layer_1(input_features)
@AFAgarap
AFAgarap / encoder.py
Created May 16, 2019 09:31
TensorFlow 2.0 implementation of an encoder layer for a variational autoencoder.
class Encoder(tf.keras.layers.Layer):
def __init__(self, latent_dim):
super(Encoder, self).__init__()
self.input_layer = tf.keras.layers.InputLayer(input_shape=(28, 28, 1))
self.reshape = tf.keras.layers.Reshape(target_shape=(784, ))
self.hidden_layer_1 = tf.keras.layers.Dense(units=128, activation=tf.nn.relu)
self.hidden_layer_2 = tf.keras.layers.Dense(units=64, activation=tf.nn.relu)
self.hidden_layer_3 = tf.keras.layers.Dense(units=32, activation=tf.nn.relu)
self.z_mean_layer = tf.keras.layers.Dense(units=latent_dim)
self.z_log_var_layer = tf.keras.layers.Dense(units=latent_dim)
@AFAgarap
AFAgarap / computation.py
Last active October 21, 2019 11:05
Training procedure for a vanilla autoencoder model.
autoencoder = Autoencoder(intermediate_dim=64, original_dim=784)
opt = tf.optimizers.Adam(learning_rate=learning_rate)
(training_features, _), (test_features, _) = tf.keras.datasets.mnist.load_data()
training_features = training_features / np.max(training_features)
training_features = training_features.reshape(training_features.shape[0],
training_features.shape[1] * training_features.shape[2])
training_features = training_features.astype('float32')
training_dataset = tf.data.Dataset.from_tensor_slices(training_features)
training_dataset = training_dataset.batch(batch_size)
@AFAgarap
AFAgarap / train.py
Created March 16, 2019 07:13
Optimization function for a vanilla autoencoder.
def train(loss, model, opt, original):
with tf.GradientTape() as tape:
gradients = tape.gradient(loss(model, original), model.trainable_variables)
gradient_variables = zip(gradients, model.trainable_variables)
opt.apply_gradients(gradient_variables)
@AFAgarap
AFAgarap / loss.py
Created March 16, 2019 07:11
Reconstruction error function for a vanilla autoencoder.
def loss(model, original):
reconstruction_error = tf.reduce_mean(tf.square(tf.subtract(model(original), original)))
return reconstruction_error
@AFAgarap
AFAgarap / autoencoder.py
Created March 16, 2019 07:09
TensorFlow 2.0 implementation of a vanilla autoencoder model.
class Autoencoder(tf.keras.Model):
def __init__(self, intermediate_dim, original_dim):
super(Autoencoder, self).__init__()
self.encoder = Encoder(intermediate_dim=intermediate_dim)
self.decoder = Decoder(intermediate_dim=intermediate_dim, original_dim=original_dim)
def call(self, input_features):
code = self.encoder(input_features)
reconstructed = self.decoder(code)
return reconstructed
@AFAgarap
AFAgarap / decoder.py
Last active November 16, 2019 18:49
TensorFlow 2.0 implementation of a decoder for a vanilla autoencoder.
class Decoder(tf.keras.layers.Layer):
def __init__(self, intermediate_dim, original_dim):
super(Decoder, self).__init__()
self.hidden_layer = tf.keras.layers.Dense(
units=intermediate_dim,
activation=tf.nn.relu,
kernel_initializer='he_uniform'
)
self.output_layer = tf.keras.layers.Dense(
units=original_dim,
@AFAgarap
AFAgarap / encoder.py
Last active November 16, 2019 18:49
TensorFlow 2.0 implementation of an encoder layer for a vanilla autoencoder.
class Encoder(tf.keras.layers.Layer):
def __init__(self, intermediate_dim):
super(Encoder, self).__init__()
self.hidden_layer = tf.keras.layers.Dense(
units=intermediate_dim,
activation=tf.nn.relu,
kernel_initializer='he_uniform'
)
self.output_layer = tf.keras.layers.Dense(
units=intermediate_dim,
@AFAgarap
AFAgarap / autoencoder-full.py
Last active March 27, 2022 14:56
TensorFlow 2.0 implementation for a vanilla autoencoder. Link to tutorial: https://medium.com/@abien.agarap/implementing-an-autoencoder-in-tensorflow-2-0-5e86126e9f7
"""TensorFlow 2.0 implementation of vanilla Autoencoder."""
import numpy as np
import tensorflow as tf
__author__ = "Abien Fred Agarap"
np.random.seed(1)
tf.random.set_seed(1)
batch_size = 128
epochs = 10