Skip to content

Instantly share code, notes, and snippets.

@sbalnojan
Created June 7, 2019 14:55
Show Gist options
  • Save sbalnojan/7b918f2bc4b798df1134679ba0386703 to your computer and use it in GitHub Desktop.
Save sbalnojan/7b918f2bc4b798df1134679ba0386703 to your computer and use it in GitHub Desktop.
import os, sys
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning")) # Adding the submodule to the module search path
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning/examples")) # Adding the submodule to the module search path
import numpy as np
from examples import utils
from keras.layers import Dense, Activation, Dropout
from keras.models import Model, Sequential
from keras.regularizers import l2
from keras.optimizers import Adam
from keras_dgl.layers import GraphCNN
import keras.backend as K
X, A, Y = utils.load_data(dataset='cora')
print("Just to check that this is indeed sparse, but not zero, check the column sums: ", sum(A.A))
y_train, y_val, y_test, idx_train, idx_val, idx_test, train_mask = utils.get_splits(Y)
A_norm = utils.preprocess_adj_numpy(A, True)
# for reference, what do we do with preprocessing?
#
# adj = adj + np.eye(adj.shape[0])
# d = np.diag(np.power(np.array(adj.sum(1)), -0.5).flatten(), 0)
# a_norm = adj.dot(d).transpose().dot(d)
# return a_norm
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment