Skip to content

Instantly share code, notes, and snippets.

@BrianPugh
Last active September 12, 2017 19:29
Show Gist options
  • Save BrianPugh/de23b73fa2e2684830b4748d941fac94 to your computer and use it in GitHub Desktop.
Save BrianPugh/de23b73fa2e2684830b4748d941fac94 to your computer and use it in GitHub Desktop.
optimistic restore
def optimistic_restore(session, save_file, variable_scope=''):
'''
A Caffe-style restore that loads in variables
if they exist in both the checkpoint file and the current graph.
Call this after running the global init op.
By DanielGordon10 on December 27, 2016
https://github.com/tensorflow/tensorflow/issues/312
With RalphMao tweak.
bpugh, July 21, 2017: Added a variable_scope so that a network can be
loaded within a tf.variable_scope() and still have weights restored.
'''
reader = tf.train.NewCheckpointReader(save_file)
saved_shapes = reader.get_variable_to_shape_map()
if variable_scope is '':
saved_shapes_scoped = saved_shapes
offset = 0
else:
saved_shapes_scoped = [variable_scope + '/' + x for x in saved_shapes]
offset = len(variable_scope) + 1
var_names = []
for var in tf.global_variables():
search_term = var.name.split(':')[0]
if search_term in saved_shapes_scoped:
var_names.append((var.name.split(':')[0], var.name.split(':')[0][offset:]))
name2var = dict(zip(map(lambda x:x.name.split(':')[0],
tf.global_variables()), tf.global_variables()))
restore_variables = []
with tf.variable_scope('', reuse=True):
for var_name, saved_var_name in var_names:
try:
curr_var = name2var[var_name]
var_shape = curr_var.get_shape().as_list()
if var_shape == saved_shapes[saved_var_name]:
found_variable = tf.get_variable(var_name)
restore_variables.append(found_variable.assign(reader.get_tensor(saved_var_name)))
except:
print("{} couldn't be loaded.".format(saved_var_name))
session.run(restore_variables)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment