Last active
February 6, 2017 03:03
-
-
Save sjchoi86/acc225efd40c8f156886c10d15c8b3f0 to your computer and use it in GitHub Desktop.
A dumb way of handling variable scope
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
dim_hidden = 6 | |
dim_mapping = 6 | |
def mapping(feat, is_training=True, reuse=False): | |
batch_norm_params = {'is_training': is_training, 'decay': 0.9, 'updates_collections': None} | |
with tf.variable_scope("mapping") as scope: | |
if reuse: | |
scope.reuse_variables() | |
net = slim.fully_connected(feat, dim_hidden | |
, activation_fn = tf.nn.tanh # tf.nn.sigmoid | |
, weights_initializer = tf.truncated_normal_initializer(stddev=0.01) | |
, normalizer_fn = None # slim.batch_norm | |
, normalizer_params = None # batch_norm_params | |
, scope='m_fc1') | |
net = slim.dropout(net, keep_prob=0.8, is_training=is_training, scope='m_dr') | |
net = slim.fully_connected(net, dim_mapping | |
, activation_fn = None # tf.nn.tanh | |
, normalizer_fn = None | |
, scope='m_fc2') | |
return net | |
def recon(encode, is_training=True, reuse=False): | |
batch_norm_params = {'is_training': is_training, 'decay': 0.9, 'updates_collections': None} | |
with tf.variable_scope("recon") as scope: | |
if reuse: | |
scope.reuse_variables() | |
net = slim.fully_connected(encode, dim_hidden | |
, activation_fn = tf.nn.tanh | |
, weights_initializer = tf.truncated_normal_initializer(stddev=0.01) | |
, normalizer_fn = slim.batch_norm | |
, normalizer_params = batch_norm_params | |
, scope='r_fc1') | |
net = slim.dropout(net, keep_prob=0.8, is_training=is_training, scope='r_dr') | |
net = slim.fully_connected(net, 6 | |
, activation_fn = None | |
, normalizer_fn = None | |
, scope='r_fc2') | |
return net | |
# DEFINE PLACEHOLDERS | |
nzfeat1 = tf.placeholder(tf.float32, [None, 6]) | |
nzfeat2 = tf.placeholder(tf.float32, [None, 6]) | |
sameind = tf.placeholder(tf.float32, [None, 1]) | |
weight = tf.placeholder(tf.float32, [None, 1]) | |
is_train = tf.placeholder(tf.bool) | |
# DEFINE MAPPING | |
map1 = mapping(nzfeat1, is_train, False) | |
map2 = mapping(nzfeat2, is_train, True) | |
rout1 = recon(map1, is_train, False) | |
rout2 = recon(map2, is_train, True) | |
# WEIGHTS | |
g_weights = tf.global_variables() | |
t_weights = tf.trainable_variables() | |
# DEFINE LOSS | |
hinge_max = 1.0 | |
with tf.name_scope("LOSS_TOTAL"): | |
l2diff = tf.pow(map1-map2, 2) | |
l1diff = tf.abs(map1-map2) | |
loss_metric_pos = 1.0*tf.reduce_mean( | |
sameind*weight*l2diff | |
) | |
loss_metric_neg = 1.0*tf.reduce_mean( | |
tf.square( | |
tf.maximum( | |
hinge_max*tf.ones_like(l1diff) - (1-sameind)*weight*l1diff, tf.zeros_like(l2diff) | |
) | |
) | |
) | |
loss_recon = 0.000001*(tf.reduce_mean(tf.pow(nzfeat1-rout1, 2)) \ | |
+ tf.reduce_mean(tf.pow(nzfeat2-rout2, 2))) | |
loss_weight = 0.01*tf.nn.l2_loss(np.eye(6)-t_weights[0]) \ | |
+ tf.nn.l2_loss(np.eye(6)-t_weights[2]) | |
l2_loss = 0.00000*tf.add_n([tf.nn.l2_loss(v) \ | |
for v in tf.trainable_variables()]) | |
loss_total = loss_metric_pos + loss_metric_neg + loss_recon + loss_weight + l2_loss | |
# ADDITIONAL WEIGHT INITIALIZER | |
init_w = [] | |
init_w.append(t_weights[0].assign(tf.eye(6, 6))) | |
init_w.append(t_weights[1].assign(tf.zeros((6)))) | |
init_w.append(t_weights[2].assign(tf.eye(6, 6))) | |
init_w.append(t_weights[3].assign(tf.zeros((6)))) | |
# JITTER WEIGHTS | |
jitter_std = 0.0 | |
jitter = [] | |
for i in range(len(g_weights)): | |
jitter.append(g_weights[i].assign(g_weights[i] | |
+ tf.random_normal(tf.shape(g_weights[i]), stddev=jitter_std))) | |
# OPTIMIZER | |
# optm = tf.train.AdamOptimizer(learning_rate=0.01).minimize(loss_total) | |
optm = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(loss_total) | |
# INITIALIZER | |
init = tf.global_variables_initializer() | |
print ("MODEL DEFINED") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment