This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def batch_normalization(x, mean, var, beta, gamma, axis=-1, epsilon=1e-3): | |
"""Applies batch normalization on x given mean, var, beta and gamma. | |
I.e. returns: | |
`output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta` | |
Args: | |
x: Input tensor or variable. | |
mean: Mean of batch. | |
var: Variance of batch. | |
beta: Tensor with which to center the input. | |
gamma: Tensor by which to scale the input. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for i in range(5): | |
plt.figure(figsize=(7,7)) | |
for k in range(20): | |
noise=np.random.uniform(-1,1,size=[100,noise_shape]) | |
im=generator.predict(noise) | |
plt.subplot(5, 4, k+1) | |
plt.imshow(im[k].reshape(64,64,3)) | |
plt.xticks([]) | |
plt.yticks([]) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
epochs = 300 | |
batch_size = 64 | |
loss_from_discriminator_model=[] # Array to collect loss for the discriminator model | |
loss_from_generator_model=[] # Array to collect loss for generator model | |
with tf.device('/gpu:0'): | |
for epoch in range(epochs): | |
print(f"Currently training on Epoch {epoch+1}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
noise_shape = 100 | |
# Generator will upsample our seed using convolutional transpose layers (upsampling layers) | |
def generator_model(): | |
generator=Sequential() | |
# Random noise to 4x4x512 image | |
generator.add(Dense(4*4*512, input_shape=[noise_shape])) | |
# Next, add a reshape layer to the network to reshape the tensor from the |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import warnings | |
warnings.filterwarnings('ignore') | |
import glob | |
import numpy as np # linear algebra | |
import pandas as pd | |
from PIL import Image | |
import matplotlib.pyplot as plt | |
import os |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Clone GFPGAN and enter the GFPGAN folder | |
%cd /content | |
!rm -rf GFPGAN | |
!git clone https://github.com/TencentARC/GFPGAN.git | |
%cd GFPGAN | |
# Set up the environment | |
# Install basicsr - https://github.com/xinntao/BasicSR | |
# We use BasicSR for both training and inference | |
!pip install basicsr |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
A high-level strategy of coding feedforward propagation is as follows: | |
1. Perform a sum product at each neuron. | |
2. Compute activation. | |
3. Repeat the first two steps at each neuron until the output layer. | |
4. Compute the loss by comparing the prediction with the actual output. | |
""" | |
import numpy as np | |
from copy import deepcopy | |
import matplotlib.pyplot as plt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
from tensorflow import keras | |
def mp_model(): | |
model = keras.Sequential( | |
[ | |
keras.layers.Flatten(input_shape=(32, 32, 3)), | |
keras.layers.Dense(3000, activation="relu"), | |
keras.layers.Dense(1000, activation="relu"), |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
history = model_keras_seq.fit_generator(generator=train_generator_for_seq_model, validation_data=validation_generator_for_seq_model, epochs = 1, workers=-1) | |
predicted_test_seq_keras = model_keras_seq.predict_generator(test_generator_for_seq_model, verbose=1) | |
sample_submission['target'] = predicted_test_seq_keras[:len(sample_submission)] | |
sample_submission.to_csv('submission.csv', index=False) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
model_keras_seq = Sequential() | |
model_keras_seq.add(Conv1D(64, input_shape=(3, 4096), kernel_size=3, activation='relu')) | |
model_keras_seq.add(BatchNormalization()) | |
model_keras_seq.add(Flatten()) | |
model_keras_seq.add(Dense(64, activation='relu')) | |
model_keras_seq.add(Dense(1, activation='sigmoid')) | |
model_keras_seq.compile(optimizer= Adam(lr=2e-4), loss='binary_crossentropy', metrics=['acc']) | |
model_keras_seq.summary() |