Skip to content

Instantly share code, notes, and snippets.

@sol0invictus
Created January 5, 2020 04:04
Show Gist options
  • Save sol0invictus/6f77d3262869c250712d77227d7c2b4a to your computer and use it in GitHub Desktop.
Save sol0invictus/6f77d3262869c250712d77227d7c2b4a to your computer and use it in GitHub Desktop.
loss_blog_2
class model:
def __init__(self):
xavier=tf.keras.initializers.GlorotUniform()
self.l1=tf.keras.layers.Dense(64,kernel_initializer=xavier,activation=tf.nn.relu,input_shape=[1])
self.l2=tf.keras.layers.Dense(64,kernel_initializer=xavier,activation=tf.nn.relu)
self.out=tf.keras.layers.Dense(1,kernel_initializer=xavier)
self.train_op = tf.keras.optimizers.Adagrad(learning_rate=0.1)
# Running the model
def run(self,X):
boom=self.l1(X)
boom1=self.l2(boom)
boom2=self.out(boom1)
return boom2
#Custom loss fucntion
def get_loss(self,X,Y):
boom=self.l1(X)
boom1=self.l2(boom)
boom2=self.out(boom1)
return tf.math.square(boom2-Y)
# get gradients
def get_grad(self,X,Y):
with tf.GradientTape() as tape:
tape.watch(self.l1.variables)
tape.watch(self.l2.variables)
tape.watch(self.out.variables)
L = self.get_loss(X,Y)
g = tape.gradient(L, [self.l1.variables[0],self.l1.variables[1],self.l2.variables[0],self.l2.variables[1],self.out.variables[0],self.out.variables[1]])
return g
# perform gradient descent
def network_learn(self,X,Y):
g = self.get_grad(X,Y)
self.train_op.apply_gradients(zip(g, [self.l1.variables[0],self.l1.variables[1],self.l2.variables[0],self.l2.variables[1],self.out.variables[0],self.out.variables[1]]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment