Skip to content

Instantly share code, notes, and snippets.

@ADGEfficiency
ADGEfficiency / mlp.py
Last active January 19, 2018 10:51
Feedforward neural network aka mulitlayer perceptron in pure numpy
"""
A feedforward neural network aka multilayer perceptron.
Single hidden layer with relu as the activation function
after the input & hidden layers.
Linear activation function after the output layer.
Built using 100% pure numpy.
Adam Green - adgefficiency.com
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 25 10:12:34 2017
@author: Adam Green
adam.green@adgefficiency.com
adgefficiency.com
"""
import os
@ADGEfficiency
ADGEfficiency / .vimrc
Last active April 29, 2018 19:44
My personal .vimrc
" ENCODING
set encoding=utf8
scriptencoding utf-8
" VUNDLE SETUP
" git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
set nocompatible " be iMproved, required
filetype off " required
" set the runtime path to include Vundle and initialize
@ADGEfficiency
ADGEfficiency / regression.py
Last active December 19, 2019 05:07
Mistakes Data Scientists Make
import numpy as np
import pandas as pd
data = np.concatenate([
np.random.normal(5, 1, 10000),
np.random.normal(-5, 1, 10000),
np.array([-20, 20] * 1000)
])
pd.DataFrame(data).plot(kind='hist', legend=None, bins=100)
@ADGEfficiency
ADGEfficiency / search.py
Created December 19, 2019 05:09
Mistakes Data Scientists Make
# this search isn't wide enough
useless_search = sklearn.model_selection.GridSearchCV(
sklearn.ensemble.RandomForestRegressor(n_estimators=10), param_grid={'n_estimators': [10, 15, 20]
)
# this search is more informative
useful_search = sklearn.model_selection.GridSearchCV(
sklearn.ensemble.RandomForestRegressor(n_estimators=10), param_grid={'n_estimators': [10, 100, 1000]
@ADGEfficiency
ADGEfficiency / pep8.py
Last active December 19, 2019 05:11
Mistakes Data Scientists Make
# bad
Var=1
# good
var = 1
# bad
def adder ( x =10 ,y= 5):
return x+y
@ADGEfficiency
ADGEfficiency / standardizer.py
Created December 19, 2019 05:12
Mistakes Data Scientists Make
standardized = (data - np.mean(data)) / np.std(data)
standardized = (data - np.mean(data)) / np.std(data)
@ADGEfficiency
ADGEfficiency / standardizer.py
Created December 19, 2019 05:12
Mistakes Data Scientists Make
standardized = (data - np.mean(data)) / np.std(data)
@ADGEfficiency
ADGEfficiency / normalizer.py
Created December 19, 2019 05:13
Mistakes Data Scientists Make
normalized = (data - np.min(data)) / (np.max(data) - np.min(data))