Skip to content

Instantly share code, notes, and snippets.

@mschauer
Last active November 18, 2023 22:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mschauer/826b0fde2f6b528013fb4567d93e890d to your computer and use it in GitHub Desktop.
Save mschauer/826b0fde2f6b528013fb4567d93e890d to your computer and use it in GitHub Desktop.
Evil Rosenbrock density with nuts
using Distributions
struct HybridRosenbrock{Tμ,Ta,Tb}
μ::Tμ
a::Ta
b::Tb
end
function Distributions.logpdf(D::HybridRosenbrock, x)
n2, n1 = size(D.b)
n = length(x)
X = reshape(@view(x[2:end]), n1, n2)
y = -D.a*(x[1] - D.μ)^2
for j in 1:n2
y -= D.b[j,1]*(X[j,1] - x[1]^2)^2
for i in 2:n1
y -= D.b[j,i]*(X[j,i] - X[j,i-1]^2)^2
end
end
C = 0.5log(D.a) + 0.5sum(log.(D.b)) - n/2*log(pi)
y + C
end
function Distributions.rand(rng::Random.AbstractRNG, D::HybridRosenbrock)
x = zeros(length(D.b) + 1)
n2, n1 = size(D.b)
X = reshape(@view(x[2:end]), n2, n1)
x[1] = D.μ + randn(rng)/sqrt(2*D.a)
for j in 1:n2
X[j,1] = x[1]^2 + randn(rng)/sqrt(2D.b[j,1])
for i in 2:n1
X[j,i] = X[j,i-1]^2 + randn(rng)/sqrt(2D.b[j,i])
end
end
x
end
using Random
using Distributions
d = 5
Random.seed!(3)
include("hybridrosenbrock.jl")
const σ0 = [10.136599898636616
144.60096611643337
144.60377635508613
81881.51706273475
81889.36757394506]
D = HybridRosenbrock(1, 1/20, 100/20*ones(2,2))
x_direct = [rand(D) for k in 1:1000]
ℓ(x) = logpdf(HybridRosenbrock(1, 1/20, 100/20*ones(2,2)), x.*σ0)
# Set the number of samples to draw and warmup iterations
n_samples, n_adapts = 100_000, 8_000
samples_long = [rand(D) for i in 1:n_samples]
using AdvancedHMC, Distributions, ForwardDiff
# Choose parameter dimensionality and initial parameter value
D = d; initial_θ = zeros(d)
# Define the target distribution
ℓπ(θ) = ℓ(θ)
# Define a Hamiltonian system
metric = DiagEuclideanMetric(D)
#metric = DenseEuclideanMetric(D)
hamiltonian = Hamiltonian(metric, ℓπ, ForwardDiff)
# Define a leapfrog solver, with initial step size chosen heuristically
initial_ϵ = find_good_stepsize(hamiltonian, initial_θ)
#initial_ϵ = 1e-2
integrator = Leapfrog(initial_ϵ)
# Define an HMC sampler, with the following components
# - multinomial sampling scheme,
# - generalised No-U-Turn criteria, and
# - windowed adaption for step-size and diagonal mass matrix
proposal = AdvancedHMC.NUTS{MultinomialTS, GeneralisedNoUTurn}(integrator)
adaptor = StanHMCAdaptor(MassMatrixAdaptor(metric), StepSizeAdaptor(0.8, integrator))
#adaptor = MassMatrixAdaptor(metric)
# Run the sampler to draw samples from the specified Gaussian, where
# - `samples` will store the samples
# - `stats` will store diagnostic statistics for each sample
samples, stats = @time sample(hamiltonian, proposal, initial_θ, n_samples, adaptor, n_adapts; progress=true)
let x = samples
r = 1:20:n_samples
t = 1:n_samples
using GLMakie
fig4 = Figure(resolution=(2000,2000))
e = min(d, 5)
for i in 1:min(d^2, e^2)
u = CartesianIndices((e,e))[i]
if u[1] == u[2]
GLMakie.scatter(fig4[u[1],u[2]], t[r], σ[u[1]]*getindex.(x, u[1])[r], markersize=0.5)
elseif u[1] < u[2]
GLMakie.scatter(fig4[u[1],u[2]], getindex.(x_direct, u[1]), getindex.(x_direct, u[2]), markersize=3.5, strokewidth=0, color=(:orange,0.1))
GLMakie.scatter!(fig4[u[1],u[2]], σ[u[1]]*getindex.(x, u[1])[r], σ[u[2]]*getindex.(x, u[2])[r], markersize=0.5)
end
end
save("nuts.png", fig4)
end
@mschauer
Copy link
Author

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment