Last active
April 8, 2019 05:14
-
-
Save stonegold546/3846c8e8a9a1ec742d8398cf441c51c8 to your computer and use it in GitHub Desktop.
CFA with Stan
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
library(lavaan) | |
library(rstan) | |
library(rethinking) | |
library(loo) | |
options(mc.cores = parallel::detectCores()) | |
rstan_options(auto_write = TRUE) | |
dat <- dplyr::select(HolzingerSwineford1939, x1:x9) | |
summary(cfa.mod <- sem( | |
"visual =~ x1 + x2 + x3 + x9\n verbal =~ x4 + x5 + x6\n speed =~ x7 + x8 + x9", | |
dat, meanstructure = TRUE, std.lv = TRUE)) | |
summary(cfa.mod.pe <- sem( | |
"visual =~ x1 + x2 + x3 + x9\n verbal =~ x4 + x5 + x6\n speed =~ x7 + x8 + x9\n x3 ~~ x5\n x2 ~~ x7", | |
dat, meanstructure = TRUE, std.lv = TRUE)) | |
# Diagonal residual covariance matrix | |
cfa.decomp.stan <- stan_model(stanc_ret = stanc(file = "cfa_mat_decomp.stan")) | |
# Non-diagonal residual covariance matrix using parameter expansion (srs) in Merkle & Rosseel, 2018 | |
cfa.decomp.stan.pe <- stan_model(stanc_ret = stanc(file = "cfa_mat_decomp_pe.stan")) | |
# Permitting error correlation between item pairs {3, 5} and {2, 7} | |
data.list <- list( | |
g_alpha = 1, g_beta = 1, Np = nrow(dat), Ni = ncol(dat), Nf = 3, | |
input_data = as.matrix(dat), loading_pattern = matrix(c( | |
rep(1, 3), rep(0, 5), 1, rep(0, 3), rep(1, 3), rep(0, 9), rep(1, 3)), 9), | |
error_mat = matrix(c(3, 5, 2, 7), nrow = 2, byrow = TRUE), | |
shape_phi = 1, shape_delta = 1, beta_par = 1) | |
data.list$Nl <- sum(data.list$loading_pattern == 1) | |
data.list$loading_pattern | |
data.list$v <- nrow(data.list$error_mat) | |
system.time(cfa.decomp.res <- sampling( | |
cfa.decomp.stan, data = data.list, seed = 12345)) | |
print(cfa.decomp.res, pars = c( | |
"loadings", "item_res_vars", "phi_mat[1,2]", "phi_mat[1,3]", | |
"phi_mat[2,3]", "intercepts"), probs = c(.025, .975), digits_summary = 3) | |
system.time(cfa.decomp.res.pe <- sampling( | |
cfa.decomp.stan.pe, data = data.list, seed = 12345)) | |
print(cfa.decomp.res.pe, pars = c( | |
"loadings", "res_cov", "item_res_vars", "phi_mat[1,2]", "phi_mat[1,3]", | |
"phi_mat[2,3]", "intercepts"), probs = c(.025, .975), digits_summary = 3) | |
# Model comparison | |
cfa.ll <- extract_log_lik(cfa.decomp.res) | |
cfa.ll.pe <- extract_log_lik(cfa.decomp.res.pe) | |
(cfa.ll <- loo(cfa.ll, r_eff = relative_eff(exp(cfa.ll), chain_id = rep(1:4, 1000)))) | |
(cfa.ll.pe <- loo(cfa.ll.pe, r_eff = relative_eff(exp(cfa.ll.pe), chain_id = rep(1:4, 1000)))) | |
compare(cfa.ll, cfa.ll.pe) | |
loo_model_weights(list(cfa.ll, cfa.ll.pe)) | |
loo_model_weights(list(cfa.ll, cfa.ll.pe), method = "pseudobma") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
data { | |
real g_alpha; // prior for inv_gamma | |
real g_beta; // prior for inv_gamma | |
int<lower = 1> shape_phi; // lkj prior shape for phi | |
int<lower = 0> Np; // N_persons | |
int<lower = 0> Ni; // N_items | |
int<lower = 0> Nf; // N_factors | |
int<lower = 0> Nl; // N_non-zero loadings | |
matrix[Np, Ni] input_data; // input matrix | |
matrix[Ni, Nf] loading_pattern; | |
} | |
transformed data { | |
vector[Ni] dat_mv[Np]; | |
for (i in 1:Np) { | |
dat_mv[i] = input_data[i, ]'; | |
} | |
} | |
parameters { | |
vector<lower = 0>[Nl] loadings; // loadings | |
real<lower = 0> sigma_loadings; // sd of loadings, hyperparm | |
vector<lower = 0>[Ni] item_res_vars; // item residual vars heteroskedastic | |
cholesky_factor_corr[Nf] phi_mat_chol; | |
vector[Ni] intercepts; | |
} | |
transformed parameters { | |
matrix[Ni, Nf] loading_matrix; | |
corr_matrix[Nf] phi_mat; | |
matrix[Ni, Ni] lamb_phi_lamb; | |
cov_matrix[Ni] delta_mat; | |
cov_matrix[Ni] Sigma; | |
{ | |
int pos = 0; | |
for (i in 1:Ni) { | |
for (j in 1:Nf) { | |
if (loading_pattern[i, j] != 0) { | |
pos = pos + 1; | |
loading_matrix[i, j] = loadings[pos]; | |
} else loading_matrix[i, j] = 0; | |
} | |
} | |
} | |
phi_mat = multiply_lower_tri_self_transpose(phi_mat_chol); | |
lamb_phi_lamb = quad_form_sym(phi_mat, loading_matrix'); | |
delta_mat = diag_matrix(item_res_vars); | |
Sigma = lamb_phi_lamb + delta_mat; | |
} | |
model { | |
loadings ~ lognormal(0, sigma_loadings); | |
sigma_loadings ~ cauchy(0, 2.5); | |
item_res_vars ~ inv_gamma(g_alpha, g_beta); | |
phi_mat_chol ~ lkj_corr_cholesky(shape_phi); | |
intercepts ~ normal(0, 10); | |
dat_mv ~ multi_normal(intercepts, Sigma); | |
} | |
generated quantities { | |
vector[Np] log_lik; | |
for (i in 1:Np) { | |
log_lik[i] = multi_normal_lpdf(dat_mv[i] | intercepts, Sigma); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
functions { | |
int sign(real x) { | |
if (x > 0) | |
return 1; | |
else | |
return -1; | |
} | |
} | |
data { | |
real g_alpha; // prior for inv_gamma | |
real g_beta; // prior for inv_gamma | |
real beta_par; // prior for correlations | |
int<lower = 1> shape_phi; // lkj prior shape for phi | |
int<lower = 0> Np; // N_persons | |
int<lower = 0> Ni; // N_items | |
int<lower = 0> Nf; // N_factors | |
int<lower = 0> Nl; // N_non-zero loadings | |
int<lower = 1> v; // N_correlated errors | |
int error_mat[v, 2]; // cor error matrix | |
matrix[Np, Ni] input_data; // input matrix | |
matrix[Ni, Nf] loading_pattern; | |
} | |
transformed data { | |
vector[Ni] dat_mv[Np]; | |
for (i in 1:Np) { | |
dat_mv[i] = input_data[i, ]'; | |
} | |
} | |
parameters { | |
vector<lower = 0>[Nl] loadings; // loadings | |
real<lower = 0> sigma_loadings; // sd of loadings, hyperparm | |
vector<lower = 0>[Ni] item_res_vars; // item residual vars heteroskedastic | |
cholesky_factor_corr[Nf] phi_mat_chol; | |
vector[Ni] intercepts; | |
vector<lower = 0, upper = 1>[v] cor_errs_01; // correlated errors on 01 | |
} | |
transformed parameters { | |
matrix[Ni, Nf] loading_matrix; | |
corr_matrix[Nf] phi_mat; | |
matrix[Ni, Ni] lamb_phi_lamb; | |
matrix[Ni, Ni] delta_mat_ast; | |
cov_matrix[Ni] delta_mat; | |
cov_matrix[Ni] Sigma; | |
matrix[Ni, v] loading_par_exp; | |
vector<lower = -1, upper = 1>[v] cor_errs; // correlated errors | |
vector[v] res_cov; // residual covariances | |
{ | |
int pos = 0; | |
for (i in 1:Ni) { | |
for (j in 1:Nf) { | |
if (loading_pattern[i, j] != 0) { | |
pos = pos + 1; | |
loading_matrix[i, j] = loadings[pos]; | |
} else loading_matrix[i, j] = 0; | |
} | |
} | |
} | |
phi_mat = multiply_lower_tri_self_transpose(phi_mat_chol); | |
lamb_phi_lamb = quad_form_sym(phi_mat, loading_matrix'); | |
delta_mat = diag_matrix(item_res_vars); | |
cor_errs = cor_errs_01 * 2 - 1; | |
{ | |
int pos = 0; | |
loading_par_exp = rep_matrix(0, Ni, v); | |
for (i in 1:v) { | |
res_cov[i] = cor_errs[i] * sqrt(prod(item_res_vars[error_mat[i, ]])); | |
loading_par_exp[error_mat[i, 1], i] = sqrt( | |
fabs(cor_errs[i]) * item_res_vars[error_mat[i, 1]]); | |
loading_par_exp[error_mat[i, 2], i] = sign(cor_errs[i]) * sqrt( | |
fabs(cor_errs[i]) * item_res_vars[error_mat[i, 2]]); | |
} | |
} | |
{ | |
matrix[Ni, Ni] loading_par_exp_2; | |
loading_par_exp_2 = tcrossprod(loading_par_exp); | |
delta_mat_ast = diag_matrix(item_res_vars - diagonal(loading_par_exp_2)); | |
Sigma = lamb_phi_lamb + loading_par_exp_2 + delta_mat_ast; | |
} | |
} | |
model { | |
loadings ~ lognormal(0, sigma_loadings); | |
sigma_loadings ~ cauchy(0, 2.5); | |
item_res_vars ~ inv_gamma(g_alpha, g_beta); | |
phi_mat_chol ~ lkj_corr_cholesky(shape_phi); | |
intercepts ~ normal(0, 10); | |
cor_errs_01 ~ beta(beta_par, beta_par); | |
dat_mv ~ multi_normal(intercepts, Sigma); | |
} | |
generated quantities { | |
vector[Np] log_lik; | |
for (i in 1:Np) { | |
log_lik[i] = multi_normal_lpdf(dat_mv[i] | intercepts, Sigma); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment