Skip to content

Instantly share code, notes, and snippets.

@agila5
Created April 20, 2020 10:16
Show Gist options
  • Save agila5/ca54f338a131786ee52451e93478fd96 to your computer and use it in GitHub Desktop.
Save agila5/ca54f338a131786ee52451e93478fd96 to your computer and use it in GitHub Desktop.
# packages
library(INLA)
#> Loading required package: Matrix
#> Loading required package: sp
#> Loading required package: parallel
#> Loading required package: foreach
#> This is INLA_20.03.09 built 2020-03-09 09:12:35 UTC.
#> See www.r-inla.org/contact-us for how to get help.

# Setup a negative binomial regression with a random error in the linear
# predictor
set.seed(20042020)
n <- 544
ID <- 1:n
x <- rnorm(n)
random_error <- rnorm(n)
eta <- 1 + x + random_error
E <- runif(n, 0, 1)
mu <- E * exp(eta)

size <- 3
y <- rnbinom(n, size = size, mu = mu)

summary(inla(
  formula = y ~ x + f(ID), 
  family = "nbinomial", 
  data = list(y = y, x = x, ID = ID, E = E),
  control.inla = list(strategy = "laplace"),
  verbose = FALSE, 
  E = E
))
#> 
#> Call:
#>    c("inla(formula = y ~ x + f(ID), family = \"nbinomial\", data = list(y 
#>    = y, ", " x = x, ID = ID, E = E), E = E, verbose = FALSE, control.inla 
#>    = list(strategy = \"laplace\"))" ) 
#> Time used:
#>     Pre = 1.46, Running = 18.8, Post = 0.292, Total = 20.6 
#> Fixed effects:
#>              mean    sd 0.025quant 0.5quant 0.975quant  mode   kld
#> (Intercept) 0.771 0.077      0.636    0.767      0.929 0.760 0.014
#> x           1.101 0.118      0.629    1.114      1.270 1.116 0.126
#> 
#> Random effects:
#>   Name     Model
#>     ID IID model
#> 
#> Model hyperparameters:
#>                                                            mean       sd
#> size for the nbinomial observations (1/overdispersion) 2.43e+04 7.59e+06
#> Precision for ID                                       7.28e-01 1.14e-01
#>                                                        0.025quant 0.5quant
#> size for the nbinomial observations (1/overdispersion)     18.279   247.23
#> Precision for ID                                            0.486     0.74
#>                                                        0.975quant   mode
#> size for the nbinomial observations (1/overdispersion)   6.80e+04 27.945
#> Precision for ID                                         9.08e-01  0.796
#> 
#> Expected number of effective parameters(stdev): 275.40(13.84)
#> Number of equivalent replicates : 1.98 
#> 
#> Marginal log-Likelihood:  -959.79

# Repeat everything with n = 500
set.seed(20042020)
n <- 500
ID <- 1:n
x <- rnorm(n)
random_error <- rnorm(n)
eta <- 1 + x + random_error
E <- runif(n, 0, 1)
mu <- E * exp(eta)

size <- 3
y <- rnbinom(n, size = size, mu = mu)

# Now the results are quite good (but for the size parameter). Why is there such a big difference? 
summary(inla(
  formula = y ~ x + f(ID), 
  family = "nbinomial", 
  data = list(y = y, x = x, ID = ID, E = E),
  control.inla = list(strategy = "laplace"),
  verbose = FALSE, 
  E = E
))
#> 
#> Call:
#>    c("inla(formula = y ~ x + f(ID), family = \"nbinomial\", data = list(y 
#>    = y, ", " x = x, ID = ID, E = E), E = E, verbose = FALSE, control.inla 
#>    = list(strategy = \"laplace\"))" ) 
#> Time used:
#>     Pre = 1.51, Running = 27.2, Post = 0.392, Total = 29.1 
#> Fixed effects:
#>              mean    sd 0.025quant 0.5quant 0.975quant  mode kld
#> (Intercept) 1.138 0.140      0.872    1.143      1.394 1.183   0
#> x           1.007 0.076      0.860    1.006      1.156 1.005   0
#> 
#> Random effects:
#>   Name     Model
#>     ID IID model
#> 
#> Model hyperparameters:
#>                                                        mean    sd 0.025quant
#> size for the nbinomial observations (1/overdispersion) 1.33 0.377      0.768
#> Precision for ID                                       1.91 0.668      0.881
#>                                                        0.5quant 0.975quant mode
#> size for the nbinomial observations (1/overdispersion)     1.27       2.24 1.16
#> Precision for ID                                           1.82       3.48 1.64
#> 
#> Expected number of effective parameters(stdev): 131.08(49.68)
#> Number of equivalent replicates : 3.81 
#> 
#> Marginal log-Likelihood:  -909.06

Created on 2020-04-20 by the reprex package (v0.3.0)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment