Skip to content

Instantly share code, notes, and snippets.

@ispamm
ispamm / tf_add_multiply_standard.py
Last active March 16, 2018 14:13
Eager_tutorial_1
import tensorflow as tf
a = tf.constant(3.0)
b = a + 2.0 # Add
c = 1.5 * b # Multiply
sess = tf.Session()
with sess.as_default():
print(sess.run(c)) # Print the value of c
a = tf.constant(3.0)
b = a + 2.0
c = 1.5 * b
print(b) # tf.Tensor(5.0, shape=(), dtype=float32)
print(c) # tf.Tensor(7.5, shape=(), dtype=float32)
import tensorflow as tf
import tensorflow.contrib.eager as tfe
tf.enable_eager_execution()
# Create a variable
W = tfe.Variable(0.5, name='w')
# Print the value
print(W) # Prints: <tf.Variable 'w:0' shape=() dtype=float32, numpy=0.5>
# Add a NumPy array and print
print(W + np.asarray([1, 3])) # Prints: tf.Tensor([1.5 3.5], shape=(2,), dtype=float32)
# Get the value in NumPy form
def square_f(W):
# Return a tensor with elements squared
return tf.square(W)
f_grad = tfe.gradients_function(square_f, params=['W'])
print(f_grad(tf.constant(0.3)))
# Prints [<tf.Tensor: id=xx, shape=(), dtype=float32, numpy=0.6>]
# Second-order derivative
f_gg = tfe.gradients_function(f_grad)
f_gg(1.0)
# Print: [<tf.Tensor: id=57, shape=(), dtype=float32, numpy=2.0>]
w = tfe.Variable(0.3, name='w')
b = tfe.Variable(0, name='b')
def logreg_model(x, w, b):
return w * x + b
hid = tf.layers.Dense(units=10, activation=tf.nn.relu)
drop = tf.layers.Dropout()
out = tf.layers.Dense(units=3, activation=None)
def nn_model(x, training=False):
return out(drop(hid(x), training=training))
class SingleHiddenLayerNetwork(tf.keras.Model):
def __init__(self):
super(SingleHiddenLayerNetwork, self).__init__()
self.hidden_layer = tf.layers.Dense(10, activation=tf.nn.tanh, use_bias=True)
self.output_layer = tf.layers.Dense(3, use_bias=True, activation=None)
def call(self, x):
# Forward-pass logic
return self.output_layer(self.hidden_layer(x))
# Initialize the network
net = SingleHiddenLayerNetwork()
# Get all variables
len(net.variables) # Print: 0
# Make some prediction
net(tf.constant(2.0, shape=(1,1)))
# Get all variables (again)