Skip to content

Instantly share code, notes, and snippets.

@cihat645
cihat645 / LinearRegression
Last active May 20, 2017 21:32
A simple function that calculates linear regression lines in Swift
//
// main.swift
// LinearRegression
//
// Created by Thomas Ciha on 5/20/17.
// Copyright © 2017 Thomas Ciha. All rights reserved.
//
import Foundation
@cihat645
cihat645 / Offer
Last active August 30, 2018 13:43
Offer(float p, int q, milliseconds m, Term t, OrdType ot, bool b, int oID){
Price = p;
Qty = q;
OrderID = oID;
timestamp = m;
OfferTerm = t;
OrderType = ot;
my_offer = b;
}
order_info(float f, int q, Status s, int oID, bool flag){
avg_price = f;
shares_filled = q;
offer_status = s;
order_ID = oID;
incomplete_market_order = flag;
}
def create_five_nns(input_size, hidden_size, act = None):
"""
Creates 5 neural networks to be used as a baseline in determining the influence model depth & width has on performance.
:param input_size: input layer size
:param hidden_size: list of hidden layer sizes
:param act: activation function to use for each layer
:return: list of model_info hash tables
"""
act = ['relu'] if not act else [act] # default activation = 'relu'
nns = [] # list of model info hash tables
def quick_nn_test(model_info, data_dict, save_path):
model = build_nn(model_info) # use model info to build and compile a nn
stop = EarlyStopping(patience=5, monitor='acc', verbose=1) # maintain a max accuracy for a sliding window of 5 epochs. If we cannot breach max accuracy after 15 epochs, cut model off and move on.
tensorboard_path =save_path + model_info['Name'] # create path for tensorboard callback
tensorboard = TensorBoard(log_dir=tensorboard_path, histogram_freq=0, write_graph=True, write_images=True) # create tensorboard callback
save_model = ModelCheckpoint(filepath= save_path + model_info['Name'] + '\\' + model_info['Name'] + '_saved_' + '.h5') # save model after every epoch
model.fit(data_dict['Training data'], data_dict['Training labels'], epochs=150, # fit model
batch_size=model_info['Batch size'], callbacks=[save_model, stop, tensorboard]) # evaluate train accuracy
"""This section of code allows us to create and test many neural networks and save the results of a quick
test into a CSV file. Once that CSV file has been created, we will continue to add results onto the existing
file."""
rapid_testing_path = 'YOUR PATH HERE'
data_path = 'YOUR DATA PATH'
try: # try to load existing csv
rapid_mlp_results = pd.read_csv(rapid_testing_path + 'Results.csv')
index = rapid_mlp_results.shape[1]
@cihat645
cihat645 / gen_rand_model.py
Last active September 22, 2018 15:41
generate randomized nn
def generate_random_model():
optimization_methods = ['adagrad', 'rmsprop', 'adadelta', 'adam', 'adamax', 'nadam'] # possible optimization methods
activation_functions = ['sigmoid', 'relu', 'tanh'] # possible activation functions
batch_sizes = [16, 32, 64, 128, 256, 512] # possible batch sizes
range_hidden_units = range(5, 250) # range of possible hidden units
model_info = {} # create hash table
same_units = np.random.choice([0, 1], p=[1/5, 4/5]) # dictates whether all hidden layers will have the same number of units
same_act_fun = np.random.choice([0, 1], p=[1/10, 9/10]) # will each hidden layer have the same activation function?
really_deep = np.random.rand()
range_layers = range(1, 10) if really_deep < 0.8 else range(6, 20) # 80% of time constrain number of hidden layers between 1 - 10, 20% of time permit really deep architectures
def build_nn(model_info):
"""
This function builds and compiles a NN given a hash table of the model's parameters.
:param model_info:
:return:
"""
try:
if model_info["Regularization"] == "l2": # if we're using L2 regularization