Skip to content

Instantly share code, notes, and snippets.

@mschauer
Last active October 17, 2022 07:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mschauer/1b2209b2d8ce3719250cd0df89fc57b9 to your computer and use it in GitHub Desktop.
Save mschauer/1b2209b2d8ce3719250cd0df89fc57b9 to your computer and use it in GitHub Desktop.
Adam to maximise expectation or variational inference with https://github.com/gaurav-arya/StochasticAD.jl
using StochasticAD, Distributions, Optimisers, GLMakie
import Random # hide
Random.seed!(1234) # hide
# Function we want to maximize the expectation of
function X(p)
a = p*(1-p)
b = rand(Binomial(10, p))
c = 2 * b + 3 * rand(Bernoulli(p))
return a * c * rand(Normal(b, a))
end
dp = 1/50
N = 1000
ps = dp:dp:1-dp
# Compute naive estimates of expectation
expected = [mean(X(p) for _ in 1:N) for p in ps]
# Estimate derivative of expectation using StochasticAD
slope = [mean(derivative_estimate(X, p) for _ in 1:N) for p in ps]
# Run Adam from Optimisers with stochastic gradient based on `derivative_estimate`
m = ([0.5],) # Almost a model
trace = Float64[]
o = Adam()
s = Optimisers.setup(o, m)
for i in 1:1000
p = destructure(m)[1][]
Optimisers.update!(s, m, -derivative_estimate(X, p))
push!(trace, p)
end
p_opt = destructure(m)[1][]
# Plot with GLMakie
f = Figure()
ax = f[1, 1] = Axis(f, title="Estimates")
lines!(ax, ps, expected, label="≈ E X(p)")
lines!(ax, ps, slope, label="≈ (E X(p))")
vlines!(ax, [p_opt], label="p_opt", color=:green, linewidth=2.0)
hlines!(ax, [0.0], color=:black, linewidth=1.0)
f[1, 2] = Legend(f, ax, framevisible = false)
ylims!(ax, (-50,80))
ax = f[2, 1:2] = Axis(f, title="Adam Trace")
lines!(ax, trace, color=:green, linewidth=2.0)
display(f)
using StochasticAD, Distributions, Optimisers
import Random # hide
Random.seed!(1234) # hide
# Variational inference: Find Poisson(p) approximating a Binomial(300, 0.1) by
# minimizing the Kullback Leibler divergence
function X(p)
i = rand(Poisson(p))
return -logpdf(Poisson(p), i) + logpdf(Binomial(300, 0.1), i)
end
dp = 1/2
N = 1000
ps = 10:dp:55
expected = [mean(X(p) for _ in 1:N) for p in ps]
slope = [mean(derivative_estimate(X, p) for _ in 1:N) for p in ps]
m = ([10.0],)
trace = Float64[]
o = Adam(0.1)
s = Optimisers.setup(o, m)
for i in 1:1000
p = destructure(m)[1][]
Optimisers.update!(s, m, -derivative_estimate(X, p))
push!(trace, p)
end
p_opt = destructure(m)[1][]
f = Figure()
ax = f[1, 1] = Axis(f, title="Estimates")
lines!(ax, ps, expected, label="≈ E X(p)")
lines!(ax, ps, slope, label="≈ (E X(p))")
vlines!(ax, [p_opt], label="p_opt", color=:green, linewidth=2.0)
hlines!(ax, [0.0], color=:black, linewidth=1.0)
f[1, 2] = Legend(f, ax, framevisible = false)
ylims!(ax, (-50,80))
ax = f[2, 1:2] = Axis(f, title="Adam Trace")
lines!(ax, trace, color=:green, linewidth=2.0)
save("adamvi.png", f)
display(f)
@mschauer
Copy link
Author

adam

@mschauer
Copy link
Author

Example for variational inference
adamvi

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment