Skip to content

Instantly share code, notes, and snippets.

@khannay
khannay / make_nba_trim.R
Created January 10, 2020 23:17
R code to make a trimmed down nba draft data set and a lottery column
set.seed(123)
nba_trim <- dplyr::sample_n(NBA_Draft_Data, size=100)
nba_trim_Lottery <- ifelse(nba_trim$Pick.Number<=14,1,0)
@khannay
khannay / curvefitting_poorbasis.jl
Created April 16, 2020 14:35
Curve Fitting using ANN for sin(x)
using Flux, Plots
using Base.Iterators: repeated
using Flux: @epochs
using LinearAlgebra
gridsize = 150;
tstart=0.0
tend=20.0
myfunc(x) = sin(x);
x = collect(range(tstart,stop=tend,length=gridsize));
@khannay
khannay / Lotka_Voltera_Param_Fit.jl
Created April 17, 2020 14:35
Param Fit in Julia
using DifferentialEquations, Flux, Optim, DiffEqFlux, Plots
function lotka_volterra(du,u,p,t)
x, y = u
α, β, δ, γ = p
du[1] = dx = α*x - β*x*y
du[2] = dy = -δ*y + γ*x*y
end
@khannay
khannay / lv.jl
Created April 23, 2020 13:48
LotkaVolterra Layer Example
using DifferentialEquations, Flux, Optim, DiffEqFlux, Plots
u0 = [1.0,1.0]
tstart=0.0
tend=10.0
sampling=0.1
model_params= [1.5,1.0,3.0,1.0]
function model(du,u,p,t)
@khannay
khannay / lv_adjoint.jl
Created April 23, 2020 13:52
LV adjoint function
function predict_adjoint(param) # Our 1-layer neural network
prob=ODEProblem(model,[1.0,0.0],(tstart,tend), model_params)
Array(concrete_solve(prob,Tsit5(),param[1:2],param[3:end],saveat=tstart:sampling:tend, abstol=1e-8,reltol=1e-6))
end
# Generate some data to fit, and add some noise to it
data=predict_adjoint([1.0,1.0,1.5,1.0,3.0,1.0])
σN=0.1
data+=σN*randn(size(data))
data=abs.(data) #Keep measurements positive
# Returning more than just the loss function breaks the Flux optim
function loss_adjoint(param)
prediction = predict_adjoint(param)
@khannay
khannay / fitting_lv.jl
Last active April 23, 2020 13:57
Lotka_volterra fitting
function train_model(;pguess=[0.8,1.2,1.2,1.0,2.9,1.1])
println("The initial loss function is $(loss_adjoint(pguess)[1])")
#Train the ODE
resinit=DiffEqFlux.sciml_train(loss_adjoint,pguess,ADAM(), maxiters=3000)
res = DiffEqFlux.sciml_train(loss_adjoint,resinit.minimizer,BFGS(initial_stepnorm = 1e-5))
println("The parameters are $(res.minimizer) with final loss value $(res.minimum)")
return(res)
end
@khannay
khannay / lorenz_def.jl
Last active April 23, 2020 19:22
lorenz_system_def_pf
using DifferentialEquations, Flux, Optim, DiffEqFlux, Plots
model_params=[10.0,28.0,8.0/3]
all_ic=[[1.0,0.0,0.0],[0.5,1.0,0.0], [3,0.2,0.1]];
tend=10.0
tstart=0.0
sampling=0.05
function model(du,u,p,t)
x,y,z = u
function predict_adjoint(param) # Our 1-layer neural network
prob=ODEProblem(model,[1.0,0.0,0.0],(tstart,tend), model_params)
Array(concrete_solve(prob,Tsit5(),param[1:3],param[4:end],saveat=tstart:sampling:tend,abstol=1e-8,reltol=1e-6))
end
@khannay
khannay / data_lorenz.jl
Created April 23, 2020 14:46
data_lorenz
# Generate some data to fit, and add some noise to it
data=predict_adjoint([1.0,0.0,0.0,10.0,28.0,8.0/3])
σN=0.05
data+=σN*randn(size(data))
function loss_adjoint(param)
prediction = predict_adjoint(param)
loss = sum(abs2,prediction - data)
loss
end