Skip to content

Instantly share code, notes, and snippets.

@wehlutyk
Created July 31, 2018 14:23
Show Gist options
  • Save wehlutyk/69852257ad36cb3f4ab9455defd8c542 to your computer and use it in GitHub Desktop.
Save wehlutyk/69852257ad36cb3f4ab9455defd8c542 to your computer and use it in GitHub Desktop.
diff --git a/nw2vec/ae.py b/nw2vec/ae.py
index 0488548..1fbb469 100644
--- a/nw2vec/ae.py
+++ b/nw2vec/ae.py
@@ -483,18 +483,19 @@ def build_p_builder(dims, use_bias=False):
p_adj = layers.Bilinear(0, use_bias=use_bias,
kernel_regularizer='l2', bias_regularizer='l2',
name='p_adj')([p_layer1, p_layer1])
- p_v_μ_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
- kernel_regularizer='l2', bias_regularizer='l2',
- name='p_v_mu_flat')(p_layer1)
- p_v_logD_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
- kernel_regularizer='l2', bias_regularizer='l2',
- name='p_v_logD_flat')(p_layer1)
- p_v_u_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
- kernel_regularizer='l2', bias_regularizer='l2',
- name='p_v_u_flat')(p_layer1)
- p_v_μlogDu_flat = keras.layers.Concatenate(name='p_v_mulogDu_flat')(
- [p_v_μ_flat, p_v_logD_flat, p_v_u_flat])
- return ([p_adj, p_v_μlogDu_flat], ('SigmoidBernoulliAdjacency', 'Gaussian'))
+ # p_v_μ_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
+ # kernel_regularizer='l2', bias_regularizer='l2',
+ # name='p_v_mu_flat')(p_layer1)
+ # p_v_logD_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
+ # kernel_regularizer='l2', bias_regularizer='l2',
+ # name='p_v_logD_flat')(p_layer1)
+ # p_v_u_flat = keras.layers.Dense(dim_data, use_bias=use_bias,
+ # kernel_regularizer='l2', bias_regularizer='l2',
+ # name='p_v_u_flat')(p_layer1)
+ # p_v_μlogDu_flat = keras.layers.Concatenate(name='p_v_mulogDu_flat')(
+ # [p_v_μ_flat, p_v_logD_flat, p_v_u_flat])
+ # return ([p_adj, p_v_μlogDu_flat], ('SigmoidBernoulliAdjacency', 'Gaussian'))
+ return ([p_adj], ('SigmoidBernoulliAdjacency',))
return p_builder
diff --git a/nw2vec/codecs.py b/nw2vec/codecs.py
index 9551357..7fe8c51 100644
--- a/nw2vec/codecs.py
+++ b/nw2vec/codecs.py
@@ -151,12 +151,14 @@ class SigmoidBernoulliAdjacency(Codec):
# `adj` now has shape (batch or 1, sampling, batch, batch)
assert len(adj.shape) == 4 # If this fails, change it to a dynamic check
- # #38 showed that weighing links and non-links so that they each contribute 1/2
- # gives a worse result on BlogCatalog than not weighing them flat. See
- # https://github.com/ixxi-dante/nw2vec/commit/3bba9c8fdd5afd66a593a61bf8473b8f794e28fb
- # for the change which led to this version.
- weighted_sigmoid_cross_entropies = \
- tf.nn.sigmoid_cross_entropy_with_logits(labels=adj, logits=self.logits)
+ # Temporary replacement for #43
+ density = K.mean(adj)
+ weighted_sigmoid_cross_entropies = (
+ .5
+ * tf.nn.weighted_cross_entropy_with_logits(
+ targets=adj, logits=self.logits, pos_weight=(1 / density) - 1)
+ / (1 - density)
+ )
return - K.mean(K.sum(weighted_sigmoid_cross_entropies, axis=-1), axis=-1)
diff --git a/projects/scale/blogcatalog.py b/projects/scale/blogcatalog.py
index bec7341..7153829 100644
--- a/projects/scale/blogcatalog.py
+++ b/projects/scale/blogcatalog.py
@@ -26,7 +26,8 @@ dim_l1, dim_ξ = 10, 10
use_bias = False
# Training
-loss_weights = [1.0, 1.0, 1.0] # q, p_adj, p_v
+# loss_weights = [1.0, 1.0, 1.0] # q, p_adj, p_v
+loss_weights = [1.0, 1.0] # q, p_adj
n_epochs = 10000
# seeds_per_batch = len(nodes) -> defined below
max_walk_length = 1
@@ -113,7 +114,7 @@ dim_data = len(groups)
dims = (dim_data, dim_l1, dim_ξ)
DATA_PARAMETERS = 'crop={crop}'.format(crop=crop)
VAE_PARAMETERS = (
- 'no_adj_cross_entropy_weighing'
+ 'no_feature_reconstruction'
'-n_ξ_samples={n_ξ_samples}'
'-dims={dims}'
'-bias={use_bias}').format(n_ξ_samples=n_ξ_samples,
@@ -173,7 +174,7 @@ def target_func(batch_adj, required_nodes, final_nodes):
0, n_ξ_samples),
0, 1
),
- utils.expand_dims_tile(features[final_nodes], 1, n_ξ_samples),
+ # utils.expand_dims_tile(features[final_nodes], 1, n_ξ_samples),
]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment