Skip to content

Instantly share code, notes, and snippets.

@randcode-generator
Created November 16, 2019 00:22
Show Gist options
  • Save randcode-generator/0acec96850ce737043a87222a2d3387a to your computer and use it in GitHub Desktop.
Save randcode-generator/0acec96850ce737043a87222a2d3387a to your computer and use it in GitHub Desktop.
import tensorflow as tf
import numpy
# Parameters
learning_rate = 0.01
training_epochs = 500
datapoints_count = 100
# Training Data
train_X = []
train_Y = []
for i in range(datapoints_count):
train_X.append(i)
train_Y.append(5*i+8.0)
train_X = numpy.asarray(train_X)
train_Y = numpy.asarray(train_Y)
n_samples = train_X.shape[0]
# tf Graph Input
X = tf.placeholder("float", name="X")
Y = tf.placeholder("float", name="Y")
# Set model weights
W = tf.Variable(0.0, name="weight")
tf.summary.scalar('W', W)
b = tf.Variable(0.0, name="bias")
tf.summary.scalar('b', b)
# Construct a linear model
y_pred = X * W + b
# Mean squared error
cost = tf.div(tf.reduce_sum(tf.square(y_pred-Y)), (2*n_samples), name="cost")
# Gradient descent
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
saver = tf.train.Saver()
# Start training
with tf.Session() as sess:
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('./train', sess.graph)
sess.run(tf.global_variables_initializer())
# Fit all training data
for epoch in range(training_epochs):
for (x, y) in zip(train_X, train_Y):
sess.run(optimizer, feed_dict={X: x, Y: y})
training_cost, summary = sess.run([cost, merged], feed_dict={X: train_X, Y: train_Y})
train_writer.add_summary(summary, epoch)
# Display logs for every 50 epoch step
if (epoch+1) % 50 == 0:
print("Epoch: %04d cost=%.9f W=%f b=%f" % \
((epoch+1), training_cost, sess.run(W), sess.run(b)))
print("Training cost=%.9f W=%f b=%f" % \
(training_cost, sess.run(W), sess.run(b)))
saved_path = saver.save(sess, './saved_variable')
print('model saved in {}'.format(saved_path))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment