This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def load_vgg(sess, vgg_path): | |
# load the model and weights | |
model = tf.saved_model.loader.load(sess, ['vgg16'], vgg_path) | |
# Get Tensors to be returned from graph | |
graph = tf.get_default_graph() | |
image_input = graph.get_tensor_by_name('image_input:0') | |
keep_prob = graph.get_tensor_by_name('keep_prob:0') | |
layer3 = graph.get_tensor_by_name('layer3_out:0') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def get_summary_n(squad_list, squad_name, nationality_list): | |
summary = [] | |
for i in nationality_list: | |
count = 0 | |
for j in squad_list: | |
# for overall rating | |
O_temp_rating, _ = get_best_squad_n(formation = j, nationality = i, measurement = 'Overall') | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
mulGate = MultiplyGate() | |
addGate = AddGate() | |
activation = Tanh() | |
class RNNLayer: | |
def forward(self, x, prev_s, U, W, V): | |
self.mulu = mulGate.forward(U, x) | |
self.mulw = mulGate.forward(W, prev_s) | |
self.add = addGate.forward(self.mulw, self.mulu) | |
self.s = activation.forward(self.add) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Model: | |
def __init__(self, word_dim, hidden_dim=100, bptt_truncate=4): | |
self.word_dim = word_dim | |
self.hidden_dim = hidden_dim | |
self.bptt_truncate = bptt_truncate | |
self.U = np.random.uniform(-np.sqrt(1. / word_dim), np.sqrt(1. / word_dim), (hidden_dim, word_dim)) | |
self.W = np.random.uniform(-np.sqrt(1. / hidden_dim), np.sqrt(1. / hidden_dim), (hidden_dim, hidden_dim)) | |
self.V = np.random.uniform(-np.sqrt(1. / hidden_dim), np.sqrt(1. / hidden_dim), (word_dim, hidden_dim)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def forward_propagation(self, x): | |
# The total number of time steps | |
T = len(x) | |
layers = [] | |
prev_s = np.zeros(self.hidden_dim) | |
# For each time step... | |
for t in range(T): | |
layer = RNNLayer() | |
input = np.zeros(self.word_dim) | |
input[x[t]] = 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Softmax: | |
def predict(self, x): | |
exp_scores = np.exp(x) | |
return exp_scores / np.sum(exp_scores) | |
def loss(self, x, y): | |
probs = self.predict(x) | |
return -np.log(probs[y]) | |
def diff(self, x, y): | |
probs = self.predict(x) | |
probs[y] -= 1.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def calculate_loss(self, x, y): | |
assert len(x) == len(y) | |
output = Softmax() | |
layers = self.forward_propagation(x) | |
loss = 0.0 | |
for i, layer in enumerate(layers): | |
loss += output.loss(layer.mulv, y[i]) | |
return loss / float(len(y)) | |
def calculate_total_loss(self, X, Y): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def bptt(self, x, y): | |
assert len(x) == len(y) | |
output = Softmax() | |
layers = self.forward_propagation(x) | |
dU = np.zeros(self.U.shape) | |
dV = np.zeros(self.V.shape) | |
dW = np.zeros(self.W.shape) | |
T = len(layers) | |
prev_s_t = np.zeros(self.hidden_dim) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sgd(self, x, y, learning_rate): | |
dU, dW, dV = self.bptt(x, y) | |
self.U -= learning_rate * dU | |
self.V -= learning_rate * dV | |
self.W -= learning_rate * dW |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Tanh: | |
def forward(self, x): | |
return np.tanh(x) | |
def backward(self, x, top_diff): | |
output = self.forward(x) | |
return (1.0 - np.square(output)) * top_diff |