Skip to content

Instantly share code, notes, and snippets.

View sbalnojan's full-sized avatar

Sven Balnojan sbalnojan

View GitHub Profile
@sbalnojan
sbalnojan / mini_fire_s3fs_cli.py
Last active May 22, 2019 12:04
Mini Cli to copy tree to S3
import s3fs
import fire
import os
class S3CopyMachine(object):
"""Copy to S3 via s3fs."""
def to_s3(self, local_bucket, s3_bucket):
@sbalnojan
sbalnojan / fit_dump_model_to_s3fs.py
Created May 22, 2019 12:02
Fit model, dump to S3 via s3fs
import s3fs
import pickle
import json
import numpy as np
BUCKET_NAME = "my-bucket"
# definitions, keras/tf/... imports...
if __name__ == "__main__":
@sbalnojan
sbalnojan / dump_df_to_s3fs.py
Created May 22, 2019 12:02
dump a df to s3 via s3fs
# ...
# your data preprocessing...
if __name__ == "__main__":
# ...
# your data preprocessing...
s3 = s3fs.S3FileSystem(anon=False)
df = pd.DataFrame(data={"foo":[0]})
print(df)
import os, sys
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning")) # Adding the submodule to the module search path
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning/examples")) # Adding the submodule to the module search path
import numpy as np
from keras.layers import Dense, Activation, Dropout
from keras.models import Model, Sequential
from keras.regularizers import l2
from keras.optimizers import Adam
from keras_dgl.layers import GraphCNN
import keras.backend as K
train_on_weight= np.array([1,1,0])
print("Now we won't do any fancy preprocessing, just basic training.")
NUM_FILTERS = 1
graph_conv_filters = A # you may try np.eye(3)
graph_conv_filters = K.constant(graph_conv_filters)
model = Sequential()
model.add(GraphCNN(Y.shape[1], NUM_FILTERS, graph_conv_filters, input_shape=(X.shape[1],), activation='elu', kernel_regularizer=l2(5e-4)))
model.add(Activation('softmax'))
import os, sys
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning")) # Adding the submodule to the module search path
sys.path.append(os.path.join(os.getcwd(), "keras-deep-graph-learning/examples")) # Adding the submodule to the module search path
import numpy as np
from examples import utils
from keras.layers import Dense, Activation, Dropout
from keras.models import Model, Sequential
from keras.regularizers import l2
from keras.optimizers import Adam
from keras_dgl.layers import GraphCNN
print("this simply ignores the connected edges, thus receives a pretty bad test_acc:")
graph_conv_filters = np.eye(A_norm.shape[0])
graph_conv_filters = K.constant(graph_conv_filters)
num_filters = 1
model = Sequential()
model.add(GraphCNN(Y.shape[1], num_filters, graph_conv_filters, input_shape=(X.shape[1],), activation='elu',
kernel_regularizer=l2(5e-4)))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.01), metrics=['acc'])
print("this filter includes the edges, so it should perform considerably better than before.:")
graph_conv_filters = A_norm
graph_conv_filters = K.constant(graph_conv_filters)
num_filters = 1
model = Sequential()
model.add(GraphCNN(Y.shape[1], num_filters, graph_conv_filters, input_shape=(X.shape[1],), activation='elu',
kernel_regularizer=l2(5e-4)))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.01), metrics=['acc'])
graph_conv_filters = np.concatenate([A_norm, np.matmul(A_norm, A_norm)], axis=0)
graph_conv_filters = K.constant(graph_conv_filters)
num_filters = 2
model = Sequential()
model.add(GraphCNN(Y.shape[1], num_filters, graph_conv_filters, input_shape=(X.shape[1],), activation='elu',
kernel_regularizer=l2(5e-4)))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.01), metrics=['acc'])
model.summary()
import os
import random
def convert_path(fname):
basename, _ = os.path.splitext(fname)
out_dir = basename + '_partitioned'
return out_dir
def random_split_file(fpath):
root = os.path.dirname(fpath)