Skip to content

Instantly share code, notes, and snippets.

View sol0invictus's full-sized avatar

Sunny Guha sol0invictus

View GitHub Profile
def sigmoid(z):
# compute sigmoid(x)
x=np.asarray(z,dtype=np.float32)
sigmoid = tf.math.sigmoid(x)
result = sigmoid.numpy()
return result
def sigmoid(z):
# Create a placeholder for x. Name it 'x'.
x = tf.placeholder(tf.float32, name = "x")
# compute sigmoid(x)
sigmoid = tf.sigmoid(x)
# Create a session, and run it. Please use the method 2 explained above.
# You should use a feed_dict to pass z's value to x.
with tf.Session() as sess:
# Run session and call the output "result"
result = sess.run(sigmoid, feed_dict = {x: z})
a = tf.constant(2)
b = tf.constant(10)
c = tf.multiply(a,b)
print(c)
a = tf.constant(2)
b = tf.constant(10)
c = tf.multiply(a,b)
sess = tf.Session()
print(sess.run(c))
y_hat = tf.constant(36) # Define y_hat constant. Set to 36.
y = tf.constant(39) # Define y. Set to 39
loss = tf.Variable((y - y_hat)**2, name='loss')
y_hat = tf.constant(36, name='y_hat') # Define y_hat constant. Set to 36.
y = tf.constant(39, name='y') # Define y. Set to 39
loss = tf.Variable((y - y_hat)**2, name='loss') # Create a variable for the loss
init = tf.global_variables_initializer() # When init is run later (session.run(init)),
# the loss variable will be initialized and ready to be computed
with tf.Session() as session: # Create a session and print the output
session.run(init) # Initializes the variables
print(session.run(loss)) # Prints the loss
@sol0invictus
sol0invictus / first.py
Created January 5, 2020 04:10
custom loss blog file
import tensorflow.keras.backend as kb
def custom_loss(y_actual,y_pred):
custom_loss=kb.square(y_actual-y_pred)
return custom_loss
@sol0invictus
sol0invictus / second.py
Created January 5, 2020 04:04
loss_blog_2
class model:
def __init__(self):
xavier=tf.keras.initializers.GlorotUniform()
self.l1=tf.keras.layers.Dense(64,kernel_initializer=xavier,activation=tf.nn.relu,input_shape=[1])
self.l2=tf.keras.layers.Dense(64,kernel_initializer=xavier,activation=tf.nn.relu)
self.out=tf.keras.layers.Dense(1,kernel_initializer=xavier)
self.train_op = tf.keras.optimizers.Adagrad(learning_rate=0.1)
# Running the model