Skip to content

Instantly share code, notes, and snippets.

@mooresm
Created June 22, 2017 21:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mooresm/fe48bd91a348257add7d594aaff8a7de to your computer and use it in GitHub Desktop.
Save mooresm/fe48bd91a348257add7d594aaff8a7de to your computer and use it in GitHub Desktop.
# Piecewise basis functions: splines
# ch. 5 of ESL: Hastie, Tibshirani & Friedman (2009)
# one dimension (p=1)
x <- seq(-0.5,1,by=0.05)[-11]
y <- 2 - 5*exp(-8 * x^2) + rnorm(length(x), sd=0.5)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
# kNN regression: piecewise constant
library(FNN)
x.star <- seq(-0.5,1,by=0.025)
kNN.fit <- knn.reg(x, y=y, test=data.frame(x=x.star))
lines(x.star, kNN.fit$pred, col=2)
# linear basis: degree=1
B <- splines::bs(sort(x), df=5, intercept = TRUE, Boundary.knots=c(-0.5,1), degree=1)
dim(B)
plot(c(0,0),c(1,1), type='n', xlab='X', ylab='B(X)',xlim=c(-0.5,1),ylim=c(0,1))
#rug(x)
rug(attr(B,'knots'), col=2)
rug(attr(B,'Boundary.knots'), col=2)
#abline(v=attr(B,"knots"), lty=2, col=4)
#abline(v=attr(B,"Boundary.knots"), lty=3, col=2)
bpred <- predict(B, x.star)
for (i in 1:5) {
lines(x.star, bpred[,i], col=i, lty=i)
}
title(main="5 linear B-spline basis functions")
sp1 <- lm.fit(B, y)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
lines(x.star, kNN.fit$pred, col=2)
lines(x, sp1$fitted.values, col=3, lwd=2)
rug(attr(B,'knots'), col=3, lwd=2)
rug(attr(B,'Boundary.knots'), col=3, lwd=2)
legend("bottomright",pch=c(NA,1,NA,NA,NA),lty=c(1,NA,1,1,1),col=c(4,1,2,3),lwd=c(1,1,1,2),
legend=c("true function","observations","kNN regression","linear B-spline (5 knots)"))
# now use cubic basis
B <- splines::bs(sort(x), df=7, intercept = TRUE, Boundary.knots=c(-0.5,1))
plot(c(0,0),c(1,1), type='n', xlab='X', ylab='B(X)',xlim=c(-0.5,1),ylim=c(0,1))
#rug(x)
rug(attr(B,'knots'), col=2)
rug(attr(B,'Boundary.knots'), col=2)
bpred <- predict(B, x.star)
for (i in 1:7) {
lines(x.star, bpred[,i], col=i, lty=i)
}
title(main="7 cubic B-spline basis functions")
sp2 <- lm.fit(B, y)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
lines(x, sp1$fitted.values, col=3)
lines(x, sp2$fitted.values, col=6, lwd=2)
rug(attr(B,'knots'), col=6)
rug(attr(B,'Boundary.knots'), col=6)
legend("bottomright",pch=c(NA,1,NA,NA,NA),lty=c(1,NA,1,1,1),col=c(4,1,3,6),lwd=c(1,1,1,2),
legend=c("true function","observations","linear B-spline (5 knots)",
"cubic B-spline (5 knots)"))
# extrapolating outside the range of the data
plot(c(-1,1.5),c(-2,2), type='n', xlab='X', ylab='B(X)',xlim=c(-1,1.5),ylim=c(-2,2))
#rug(x)
rug(attr(B,'knots'), col=2)
rug(attr(B,'Boundary.knots'), col=2)
newx <- seq(-1,1.5,by=0.025)
bpred <- predict(B, newx)
for (i in 1:7) {
lines(newx, bpred[,i], col=i, lty=i)
}
range(bpred)
# natural cubic splines
B <- splines::ns(sort(x), df=7, intercept = TRUE, Boundary.knots=c(-0.5,1))
plot(c(-1,1.5),c(-2,2), type='n', xlab='X', ylab='B(X)',xlim=c(-1,1.5),ylim=c(-2,2))
#rug(x)
rug(attr(B,'knots'), col=2)
rug(attr(B,'Boundary.knots'), col=2)
bpred <- predict(B, newx)
for (i in 1:7) {
lines(newx, bpred[,i], col=i, lty=i)
}
range(bpred)
sp3 <- lm.fit(B, y)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
lines(x, sp1$fitted.values, col=3)
lines(x, sp2$fitted.values, col=6)
lines(x, sp3$fitted.values, col=1)
legend("bottomright",pch=c(NA,1,NA,NA,NA,NA),lty=c(1,NA,1,1,1,1),col=c(4,1,3,6,1),
legend=c("true function","observations","linear B-spline (5 knots)",
"cubic B-spline (5 knots)","natural cublic spline"))
# increase the number of knots: overfitting!
B <- splines::ns(sort(x), df=20, intercept = TRUE, Boundary.knots=c(-0.5,1))
image(B)
sp4 <- lm.fit(B, y)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
lines(x, sp3$fitted.values, col=1)
lines(x, sp4$fitted.values, col=2)
rug(attr(B,'knots'), col=2)
rug(attr(B,'Boundary.knots'), col=2)
legend("bottomright",pch=c(NA,1,NA,NA,NA),lty=c(1,NA,1,1,1),col=c(4,1,1,2),
legend=c("true function","observations","B-spline (5 knots)","B-spline (20 knots)"))
# Sect. 5.4: Penalised Splines
sp4$coefficients
prior.betaSD <- 3
NB <- 20
prior.betaCov <- diag(prior.betaSD^2, NB, NB)
n.iter <- 20
pen <- 0.1 # fixed smoothing penalty, lambda
# prior precision matrix for the spline coefficients, beta
library(Matrix)
B <- splines::bs(sort(x), df=20, intercept = TRUE, Boundary.knots=c(-0.5,1))
# matrix of 2nd derivatives (Eilers & Marx, 1996)
# WARNING: assumes equidistant knots!
D <- diag(NB)
D <- diff(diff(D))
g0 <- Matrix(crossprod(D), sparse=TRUE)
bTb <- Matrix(crossprod(B), sparse=TRUE)
giPrecMx <- bTb + length(y)*pen*g0
giChol <- Cholesky(giPrecMx)
beta.mu <- as.vector(solve(giChol, crossprod(B, y)))
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
bpred <- predict(B, x.star)
lines(x.star, bpred %*% beta.mu, col=2)
#lines(x.star, bpred %*% sp4$coefficients, col=6)
# inverse gamma prior for the noise
priorNoiseNu <- 4
priorNoiseSD <- 1
priorNoiseSS <- priorNoiseNu * priorNoiseSD^2
sqDiff <- sum(diag(crossprod(y))) - sum(diag(t(beta.mu) %*% giPrecMx %*% beta.mu))
newSS = (priorNoiseSS + sqDiff)/2.0
sdVec = rgamma(n.iter, (priorNoiseNu + length(y))/2.0)
newTau = sdVec / newSS;
sigma_prop = 1/sqrt(newTau)
# posterior samples of the spline function
beta.samp <- matrix(nrow=n.iter, ncol=NB)
for (it in 1:n.iter) {
stdNorm <- rnorm(NB)
blChol <- Cholesky(giPrecMx * newTau[it])
beta.samp[it,] <- as.vector(beta.mu + solve(blChol, stdNorm))
lines(x.star, bpred %*% beta.samp[it,], col=2, lty=3)
}
# integrated squared second derivative (Green & Silverman, 1994)
B <- splines::ns(sort(x), df=20, intercept = TRUE, Boundary.knots=c(-0.5,1))
t <- c(attr(B,'Boundary.knots')[1], attr(B,'knots'), attr(B,'Boundary.knots')[2])
NK <- length(t)
rug(t,col=2)
h <- diff(t)
Q <- matrix(0,nrow=NK,ncol=NK-2)
for (j in 2:(NK-1)) {
Q[(j-1),(j-1)] <- 1/h[j-1]
Q[j,(j-1)] <- -1/h[j-1] - 1/h[j]
Q[(j+1),(j-1)] <- 1/h[j]
}
R <- matrix(0,nrow=NK-2,ncol=NK-2)
for (i in 2:(NK-2)) {
R[(i-1),(i-1)] <- (h[i-1] + h[i])/3
R[(i-1),i] <- R[i,(i-1)] <- h[i]/6
}
R[i,i] <- (h[i-1] + h[i])/3
K <- Q %*% solve(R) %*% t(Q)
# Gibbs sampling for beta and lambda (Ruppert, Wand & Carroll, 2003)
betaGibbs <- function(y, var_noise, giPrecMx, B) {
stdNorm <- rnorm(ncol(B))
giChol <- Cholesky(giPrecMx)
beta.mu <- as.vector(solve(giChol, crossprod(B, y)))
tau <- 1/var_noise
blChol <- chol(giPrecMx * tau)
return(as.vector(beta.mu + solve(blChol, stdNorm)))
}
varNoiseGibbs <- function(A, n, B, ssd) {
Aprime <- A + n/2
Bprime <- 1/B + ssd/2
return(1/rgamma(1, Aprime, Bprime))
}
A_e <- B_e <- A_b <- B_b <- 0.1 # inverse gamma priors for variance
n_iter <- 2000
samp_SdB <- samp_SdE <- samp_L <- numeric(length=n_iter)
samp_Beta <- matrix(nrow=n_iter, ncol=NK)
var_noise <- 1/rgamma(1,A_e,1/B_e)
var_beta <- 1/rgamma(1,A_b,1/B_b)
for (it in 1:n_iter) {
lambda <- var_noise/var_beta
# giPrecMx <- Matrix(diag(NK) + lambda*K, sparse=TRUE)
giPrecMx <- bTb + length(y)*lambda*g0
samp_Beta[it,] <- beta <- betaGibbs(y, var_noise, giPrecMx, B)
ssd_beta <- crossprod(y - B %*% beta)
var_noise <- varNoiseGibbs(A_e, length(y), B_e, ssd_beta)
samp_SdE[it] <- sqrt(var_noise)
var_beta <- varNoiseGibbs(A_b, NK, B_b, crossprod(beta))
samp_SdB[it] <- sqrt(var_beta)
samp_L[it] <- var_noise/var_beta
}
library(coda)
samp_coda <- mcmc(cbind(samp_SdE,samp_SdB,samp_L))
varnames(samp_coda) <- c("sigma[epsilon]","sigma[beta]","lambda")
plot(samp_coda)
nburn <- n_iter/2
samp_coda <- mcmc(cbind(samp_SdE,samp_SdB,samp_L,samp_Beta))
varnames(samp_coda) <- c("sigma[epsilon]","sigma[beta]","lambda", paste0("beta[",1:NK,"]"))
samp <- window(samp_coda, start=nburn+1)
effectiveSize(samp)
summary(samp)
samp_idx <- nburn + sample(1:(n_iter-nburn), 20)
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
bpred <- predict(B, x.star)
for (idx in samp_idx) {
lines(x.star, bpred %*% samp_Beta[idx,], col=2, lty=3)
}
legend("bottomright",legend=c("observations","true function","posterior samples"),
lty=c(NA,1,3),col=c(1,4,2),pch=c(1,NA,NA), lwd=c(1,1,2))
title(main=paste(length(samp_idx), "posterior samples for Bayesian P-spline"))
pen <- 1e-2
giPrecMx <- Matrix(diag(NK) + pen*K, sparse=TRUE)
giChol <- Cholesky(giPrecMx)
beta.mu <- as.vector(solve(giChol, crossprod(B, y)))
plot(x,y, ylim=range(-3,2,y))
curve(2 - 5*exp(-8 * x^2), from=-0.5, to=1, n=length(x), col=4, add=T)
bpred <- predict(B, x.star)
lines(x.star, bpred %*% beta.mu, col=2)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment