Skip to content

Instantly share code, notes, and snippets.

@siddMahen
Created July 30, 2016 12:05
Show Gist options
  • Save siddMahen/6d730e1b3d331c4be34c9a57b0a39ccf to your computer and use it in GitHub Desktop.
Save siddMahen/6d730e1b3d331c4be34c9a57b0a39ccf to your computer and use it in GitHub Desktop.
Doubtful it stood;
As two spent swimmers, that do cling together
And choke their art. The merciless Macdonwald--
Worthy to be a rebel, for to that
The multiplying villanies of nature
Do swarm upon him--from the western isles
Of kerns and gallowglasses is supplied;
And fortune, on his damned quarrel smiling,
Show'd like a rebel's whore: but all's too weak:
For brave Macbeth--well he deserves that name--
Disdaining fortune, with his brandish'd steel,
Which smoked with bloody execution,
Like valour's minion carved out his passage
Till he faced the slave;
Which ne'er shook hands, nor bade farewell to him,
Till he unseam'd him from the nave to the chaps,
And fix'd his head upon our battlements.
Was the hope drunk
Wherein you dress'd yourself? hath it slept since?
And wakes it now, to look so green and pale
At what it did so freely? From this time
Such I account thy love. Art thou afeard
To be the same in thine own act and valour
As thou art in desire? Wouldst thou have that
Which thou esteem'st the ornament of life,
And live a coward in thine own esteem,
Letting 'I dare not' wait upon 'I would,'
Like the poor cat i' the adage?
import tensorflow as tf
import sys
import os
def read_and_decode(filename_queue):
reader = tf.TextLineReader()
_, record = reader.read(filename_queue)
return record
def inputs(filenames, batch_size, num_epochs):
with tf.name_scope('input'):
filename_queue = tf.train.string_input_producer(
filenames, num_epochs=num_epochs, shuffle=True)
line = read_and_decode(filename_queue)
min_after_dequeue = 10
capacity = min_after_dequeue + 3*batch_size
line_batch = tf.train.shuffle_batch([line], batch_size=batch_size,
capacity=capacity, min_after_dequeue=min_after_dequeue)
return line_batch
def train(run_name, filenames):
with tf.Graph().as_default():
lines = inputs(filenames, batch_size=5, num_epochs=1)
init_op = tf.initialize_all_variables()
sess = tf.Session()
saver = tf.train.Saver()
prev_step = 0
ckpt = tf.train.get_checkpoint_state('.')
if ckpt and ckpt.model_checkpoint_path:
# Check if the run name matches ours
ending = ckpt.model_checkpoint_path.split('/')[-1].split('-')
alt_name = ending[1]
if alt_name == run_name:
prev_step = int(ending[2])
saver.restore(sess, ckpt.model_checkpoint_path)
else:
sess.run(init_op)
else:
sess.run(init_op)
coord = tf.train.Coordinator()
ckpt_path = os.path.join('.', "model-" + run_name)
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
try:
step = prev_step
while not coord.should_stop():
l = sess.run(lines)
for line in l:
print(line)
save_path = saver.save(sess, ckpt_path, global_step=step)
print('Model saved to %s' % save_path)
step += 1
except tf.errors.OutOfRangeError:
print("Done training!")
save_path = saver.save(sess, ckpt_path, global_step=step)
print('Model saved to %s' % save_path)
finally:
coord.request_stop()
coord.join(threads)
sess.close()
if __name__ == '__main__':
run_name = sys.argv[1]
filenames = sys.argv[2:]
train(run_name, filenames)
# Usage: python train.py model_name input1.txt input2.txt
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment