Skip to content

Instantly share code, notes, and snippets.

# numpy save and load:
np.save(open('labels_from_segments.npy', 'w'), labels_from_segments)
np.save(open('labels_from_model.npy', 'w'), labels_from_model)
labels_from_segments = np.load(open('labels_from_segments.npy'))
labels_from_model = np.load(open('labels_from_model.npy'))
def saveHistory(history_dict, filename):
to_be_saved = data = {'S': history_dict}
np.save(open(filename, 'w'), to_be_saved)
def loadHistory(filename):
loaded = np.load(open(filename))
return loaded[()]['S']
hi = model.fit(...)
saveHistory(hi.history, 'tmp_saved_history.npy')
@previtus
previtus / resnet-152_keras.py
Last active July 30, 2020 03:14 — forked from mvoelk/resnet-152_keras.py
Resnet-152 pre-trained model in Keras 2.0
# -*- coding: utf-8 -*-
import numpy as np
import copy
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, AveragePooling2D, ZeroPadding2D, Flatten, Activation, add
from keras.optimizers import SGD
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras import initializers
# use inkscape to convert all *.svg into *.png on Windows
for /f "tokens=* delims=\n" %i in ('dir /b *.svg') do "C:\Program Files (x86)\Inkscape\inkscape.exe" --without-gui --file="%i" --export-png="%i.png" --export-background=white --export-dpi=300
# print the names
for /f "tokens=* delims=\n" %i in ('dir /b *.svg') do echo %i
zip -r FOLDER.zip FOLDER/* -x *.h5
zip all files exluding .h5 files
@previtus
previtus / edge to images.py
Created June 22, 2017 02:10
Scheme for generation of image urls on GoogleStreetView from edge described as tupple (self.Start, self.End)
### Inside Segment object
def getGoogleViewUrls(self, PIXELS_X, PIXELS_Y, minimal_length):
urls = []
filenames = []
min_allowed_distance = minimal_length
d = 1000*distance_between_two_points(self.Start, self.End)
number_of_splits = int(max((floor(d / min_allowed_distance)),1.0))
for file in *.jpg; do convert -crop 640x615+0+0 $file $file; done
# tensorflow inside keras metric, with debugging tf variables
def grouped_mse(k=3):
def f(y_true, y_pred):
group_by = tf.constant(k)
real_size = tf.size(y_pred)
remainder = tf.truncatemod(real_size, group_by)
remainder = K.print_tensor(remainder, message="remainder is: ")
a = "/home/ekmek/saliency_tools/models/TEST1.npy"
b = "/home/ekmek/saliency_tools/models/TEST2.npy"
aval = np.load(a)
bval = np.load(b)
# lengths
print len(aval)
print len(bval)
def short_summary(model):
from keras import backend as K
for layer in model.layers:
trainable_count = int( np.sum([K.count_params(p) for p in set(layer.trainable_weights)]))
non_trainable_count = int( np.sum([K.count_params(p) for p in set(layer.non_trainable_weights)]))
if trainable_count == 0 and non_trainable_count == 0:
print '{:<10}[{:<10}]: {:<20} => {:<20}'.format(layer.name, layer.__class__.__name__, layer.input_shape,layer.output_shape)
else:
print '{:<10}[{:<10}]: {:<20} => {:<20}, with {} trainable + {} nontrainable'.format(layer.name, layer.__class__.__name__, layer.input_shape, layer.output_shape, trainable_count, non_trainable_count)