Skip to content

Instantly share code, notes, and snippets.

@seantalts
Last active June 21, 2019 21:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save seantalts/be40b3186cc8c7fb3f29ea5b754c8bab to your computer and use it in GitHub Desktop.
Save seantalts/be40b3186cc8c7fb3f29ea5b754c8bab to your computer and use it in GitHub Desktop.
// Code generated by Stan version 2.19.1
#include <stan/model/model_header.hpp>
namespace hepatitis_model_namespace {
using std::istream;
using std::string;
using std::stringstream;
using std::vector;
using stan::io::dump;
using stan::math::lgamma;
using stan::model::prob_grad;
using namespace stan::math;
static int current_statement_begin__;
stan::io::program_reader prog_reader__() {
stan::io::program_reader reader;
reader.add_event(0, 0, "start", "../example-models/bugs_examples/vol3/hepatitis/hepatitis.stan");
reader.add_event(70, 68, "end", "../example-models/bugs_examples/vol3/hepatitis/hepatitis.stan");
return reader;
}
class hepatitis_model : public prob_grad {
private:
int N1;
int N;
std::vector<double> Yvec1;
std::vector<double> tvec1;
std::vector<int> idxn1;
std::vector<double> y0;
double y0_mean;
public:
hepatitis_model(stan::io::var_context& context__,
std::ostream* pstream__ = 0)
: prob_grad(0) {
ctor_body(context__, 0, pstream__);
}
hepatitis_model(stan::io::var_context& context__,
unsigned int random_seed__,
std::ostream* pstream__ = 0)
: prob_grad(0) {
ctor_body(context__, random_seed__, pstream__);
}
void ctor_body(stan::io::var_context& context__,
unsigned int random_seed__,
std::ostream* pstream__) {
typedef double local_scalar_t__;
boost::ecuyer1988 base_rng__ =
stan::services::util::create_rng(random_seed__, 0);
(void) base_rng__; // suppress unused var warning
current_statement_begin__ = -1;
static const char* function__ = "hepatitis_model_namespace::hepatitis_model";
(void) function__; // dummy to suppress unused var warning
size_t pos__;
(void) pos__; // dummy to suppress unused var warning
std::vector<int> vals_i__;
std::vector<double> vals_r__;
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void) DUMMY_VAR__; // suppress unused var warning
try {
// initialize data block variables from context__
current_statement_begin__ = 16;
context__.validate_dims("data initialization", "N1", "int", context__.to_vec());
N1 = int(0);
vals_i__ = context__.vals_i("N1");
pos__ = 0;
N1 = vals_i__[pos__++];
check_greater_or_equal(function__, "N1", N1, 0);
current_statement_begin__ = 17;
context__.validate_dims("data initialization", "N", "int", context__.to_vec());
N = int(0);
vals_i__ = context__.vals_i("N");
pos__ = 0;
N = vals_i__[pos__++];
check_greater_or_equal(function__, "N", N, 0);
current_statement_begin__ = 18;
validate_non_negative_index("Yvec1", "N1", N1);
context__.validate_dims("data initialization", "Yvec1", "double", context__.to_vec(N1));
Yvec1 = std::vector<double>(N1, double(0));
vals_r__ = context__.vals_r("Yvec1");
pos__ = 0;
size_t Yvec1_k_0_max__ = N1;
for (size_t k_0__ = 0; k_0__ < Yvec1_k_0_max__; ++k_0__) {
Yvec1[k_0__] = vals_r__[pos__++];
}
current_statement_begin__ = 19;
validate_non_negative_index("tvec1", "N1", N1);
context__.validate_dims("data initialization", "tvec1", "double", context__.to_vec(N1));
tvec1 = std::vector<double>(N1, double(0));
vals_r__ = context__.vals_r("tvec1");
pos__ = 0;
size_t tvec1_k_0_max__ = N1;
for (size_t k_0__ = 0; k_0__ < tvec1_k_0_max__; ++k_0__) {
tvec1[k_0__] = vals_r__[pos__++];
}
current_statement_begin__ = 20;
validate_non_negative_index("idxn1", "N1", N1);
context__.validate_dims("data initialization", "idxn1", "int", context__.to_vec(N1));
idxn1 = std::vector<int>(N1, int(0));
vals_i__ = context__.vals_i("idxn1");
pos__ = 0;
size_t idxn1_k_0_max__ = N1;
for (size_t k_0__ = 0; k_0__ < idxn1_k_0_max__; ++k_0__) {
idxn1[k_0__] = vals_i__[pos__++];
}
size_t idxn1_i_0_max__ = N1;
for (size_t i_0__ = 0; i_0__ < idxn1_i_0_max__; ++i_0__) {
check_greater_or_equal(function__, "idxn1[i_0__]", idxn1[i_0__], 0);
}
current_statement_begin__ = 21;
validate_non_negative_index("y0", "N", N);
context__.validate_dims("data initialization", "y0", "double", context__.to_vec(N));
y0 = std::vector<double>(N, double(0));
vals_r__ = context__.vals_r("y0");
pos__ = 0;
size_t y0_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < y0_k_0_max__; ++k_0__) {
y0[k_0__] = vals_r__[pos__++];
}
// initialize transformed data variables
current_statement_begin__ = 25;
y0_mean = double(0);
stan::math::fill(y0_mean, DUMMY_VAR__);
// execute transformed data statements
current_statement_begin__ = 26;
stan::math::assign(y0_mean, mean(y0));
// validate transformed data
// validate, set parameter ranges
num_params_r__ = 0U;
param_ranges_i__.clear();
current_statement_begin__ = 30;
num_params_r__ += 1;
current_statement_begin__ = 31;
num_params_r__ += 1;
current_statement_begin__ = 32;
num_params_r__ += 1;
current_statement_begin__ = 33;
validate_non_negative_index("alpha", "N", N);
num_params_r__ += (1 * N);
current_statement_begin__ = 34;
validate_non_negative_index("beta", "N", N);
num_params_r__ += (1 * N);
current_statement_begin__ = 35;
num_params_r__ += 1;
current_statement_begin__ = 36;
num_params_r__ += 1;
current_statement_begin__ = 37;
num_params_r__ += 1;
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, current_statement_begin__, prog_reader__());
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
}
~hepatitis_model() { }
void transform_inits(const stan::io::var_context& context__,
std::vector<int>& params_i__,
std::vector<double>& params_r__,
std::ostream* pstream__) const {
typedef double local_scalar_t__;
stan::io::writer<double> writer__(params_r__, params_i__);
size_t pos__;
(void) pos__; // dummy call to supress warning
std::vector<double> vals_r__;
std::vector<int> vals_i__;
current_statement_begin__ = 30;
if (!(context__.contains_r("sigmasq_y")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable sigmasq_y missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("sigmasq_y");
pos__ = 0U;
context__.validate_dims("parameter initialization", "sigmasq_y", "double", context__.to_vec());
double sigmasq_y(0);
sigmasq_y = vals_r__[pos__++];
try {
writer__.scalar_lb_unconstrain(0, sigmasq_y);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable sigmasq_y: ") + e.what()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 31;
if (!(context__.contains_r("sigmasq_alpha")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable sigmasq_alpha missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("sigmasq_alpha");
pos__ = 0U;
context__.validate_dims("parameter initialization", "sigmasq_alpha", "double", context__.to_vec());
double sigmasq_alpha(0);
sigmasq_alpha = vals_r__[pos__++];
try {
writer__.scalar_lb_unconstrain(0, sigmasq_alpha);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable sigmasq_alpha: ") + e.what()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 32;
if (!(context__.contains_r("sigmasq_beta")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable sigmasq_beta missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("sigmasq_beta");
pos__ = 0U;
context__.validate_dims("parameter initialization", "sigmasq_beta", "double", context__.to_vec());
double sigmasq_beta(0);
sigmasq_beta = vals_r__[pos__++];
try {
writer__.scalar_lb_unconstrain(0, sigmasq_beta);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable sigmasq_beta: ") + e.what()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 33;
if (!(context__.contains_r("alpha")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable alpha missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("alpha");
pos__ = 0U;
validate_non_negative_index("alpha", "N", N);
context__.validate_dims("parameter initialization", "alpha", "double", context__.to_vec(N));
std::vector<double> alpha(N, double(0));
size_t alpha_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < alpha_k_0_max__; ++k_0__) {
alpha[k_0__] = vals_r__[pos__++];
}
size_t alpha_i_0_max__ = N;
for (size_t i_0__ = 0; i_0__ < alpha_i_0_max__; ++i_0__) {
try {
writer__.scalar_unconstrain(alpha[i_0__]);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable alpha: ") + e.what()), current_statement_begin__, prog_reader__());
}
}
current_statement_begin__ = 34;
if (!(context__.contains_r("beta")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable beta missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("beta");
pos__ = 0U;
validate_non_negative_index("beta", "N", N);
context__.validate_dims("parameter initialization", "beta", "double", context__.to_vec(N));
std::vector<double> beta(N, double(0));
size_t beta_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < beta_k_0_max__; ++k_0__) {
beta[k_0__] = vals_r__[pos__++];
}
size_t beta_i_0_max__ = N;
for (size_t i_0__ = 0; i_0__ < beta_i_0_max__; ++i_0__) {
try {
writer__.scalar_unconstrain(beta[i_0__]);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable beta: ") + e.what()), current_statement_begin__, prog_reader__());
}
}
current_statement_begin__ = 35;
if (!(context__.contains_r("gamma")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable gamma missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("gamma");
pos__ = 0U;
context__.validate_dims("parameter initialization", "gamma", "double", context__.to_vec());
double gamma(0);
gamma = vals_r__[pos__++];
try {
writer__.scalar_unconstrain(gamma);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable gamma: ") + e.what()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 36;
if (!(context__.contains_r("alpha0")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable alpha0 missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("alpha0");
pos__ = 0U;
context__.validate_dims("parameter initialization", "alpha0", "double", context__.to_vec());
double alpha0(0);
alpha0 = vals_r__[pos__++];
try {
writer__.scalar_unconstrain(alpha0);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable alpha0: ") + e.what()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 37;
if (!(context__.contains_r("beta0")))
stan::lang::rethrow_located(std::runtime_error(std::string("Variable beta0 missing")), current_statement_begin__, prog_reader__());
vals_r__ = context__.vals_r("beta0");
pos__ = 0U;
context__.validate_dims("parameter initialization", "beta0", "double", context__.to_vec());
double beta0(0);
beta0 = vals_r__[pos__++];
try {
writer__.scalar_unconstrain(beta0);
} catch (const std::exception& e) {
stan::lang::rethrow_located(std::runtime_error(std::string("Error transforming variable beta0: ") + e.what()), current_statement_begin__, prog_reader__());
}
params_r__ = writer__.data_r();
params_i__ = writer__.data_i();
}
void transform_inits(const stan::io::var_context& context,
Eigen::Matrix<double, Eigen::Dynamic, 1>& params_r,
std::ostream* pstream__) const {
std::vector<double> params_r_vec;
std::vector<int> params_i_vec;
transform_inits(context, params_i_vec, params_r_vec, pstream__);
params_r.resize(params_r_vec.size());
for (int i = 0; i < params_r.size(); ++i)
params_r(i) = params_r_vec[i];
}
template <bool propto__, bool jacobian__, typename T__>
T__ log_prob(std::vector<T__>& params_r__,
std::vector<int>& params_i__,
std::ostream* pstream__ = 0) const {
typedef T__ local_scalar_t__;
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void) DUMMY_VAR__; // dummy to suppress unused var warning
T__ lp__(0.0);
stan::math::accumulator<T__> lp_accum__;
try {
stan::io::reader<local_scalar_t__> in__(params_r__, params_i__);
// model parameters
current_statement_begin__ = 30;
local_scalar_t__ sigmasq_y;
(void) sigmasq_y; // dummy to suppress unused var warning
if (jacobian__)
sigmasq_y = in__.scalar_lb_constrain(0, lp__);
else
sigmasq_y = in__.scalar_lb_constrain(0);
current_statement_begin__ = 31;
local_scalar_t__ sigmasq_alpha;
(void) sigmasq_alpha; // dummy to suppress unused var warning
if (jacobian__)
sigmasq_alpha = in__.scalar_lb_constrain(0, lp__);
else
sigmasq_alpha = in__.scalar_lb_constrain(0);
current_statement_begin__ = 32;
local_scalar_t__ sigmasq_beta;
(void) sigmasq_beta; // dummy to suppress unused var warning
if (jacobian__)
sigmasq_beta = in__.scalar_lb_constrain(0, lp__);
else
sigmasq_beta = in__.scalar_lb_constrain(0);
current_statement_begin__ = 33;
std::vector<local_scalar_t__> alpha;
size_t alpha_d_0_max__ = N;
alpha.reserve(alpha_d_0_max__);
for (size_t d_0__ = 0; d_0__ < alpha_d_0_max__; ++d_0__) {
if (jacobian__)
alpha.push_back(in__.scalar_constrain(lp__));
else
alpha.push_back(in__.scalar_constrain());
}
current_statement_begin__ = 34;
std::vector<local_scalar_t__> beta;
size_t beta_d_0_max__ = N;
beta.reserve(beta_d_0_max__);
for (size_t d_0__ = 0; d_0__ < beta_d_0_max__; ++d_0__) {
if (jacobian__)
beta.push_back(in__.scalar_constrain(lp__));
else
beta.push_back(in__.scalar_constrain());
}
current_statement_begin__ = 35;
local_scalar_t__ gamma;
(void) gamma; // dummy to suppress unused var warning
if (jacobian__)
gamma = in__.scalar_constrain(lp__);
else
gamma = in__.scalar_constrain();
current_statement_begin__ = 36;
local_scalar_t__ alpha0;
(void) alpha0; // dummy to suppress unused var warning
if (jacobian__)
alpha0 = in__.scalar_constrain(lp__);
else
alpha0 = in__.scalar_constrain();
current_statement_begin__ = 37;
local_scalar_t__ beta0;
(void) beta0; // dummy to suppress unused var warning
if (jacobian__)
beta0 = in__.scalar_constrain(lp__);
else
beta0 = in__.scalar_constrain();
// transformed parameters
current_statement_begin__ = 42;
local_scalar_t__ sigma_y;
(void) sigma_y; // dummy to suppress unused var warning
stan::math::initialize(sigma_y, DUMMY_VAR__);
stan::math::fill(sigma_y, DUMMY_VAR__);
current_statement_begin__ = 43;
local_scalar_t__ sigma_alpha;
(void) sigma_alpha; // dummy to suppress unused var warning
stan::math::initialize(sigma_alpha, DUMMY_VAR__);
stan::math::fill(sigma_alpha, DUMMY_VAR__);
current_statement_begin__ = 44;
local_scalar_t__ sigma_beta;
(void) sigma_beta; // dummy to suppress unused var warning
stan::math::initialize(sigma_beta, DUMMY_VAR__);
stan::math::fill(sigma_beta, DUMMY_VAR__);
// transformed parameters block statements
current_statement_begin__ = 45;
stan::math::assign(sigma_y, stan::math::sqrt(sigmasq_y));
current_statement_begin__ = 46;
stan::math::assign(sigma_alpha, stan::math::sqrt(sigmasq_alpha));
current_statement_begin__ = 47;
stan::math::assign(sigma_beta, stan::math::sqrt(sigmasq_beta));
// validate transformed parameters
const char* function__ = "validate transformed params";
(void) function__; // dummy to suppress unused var warning
current_statement_begin__ = 42;
if (stan::math::is_uninitialized(sigma_y)) {
std::stringstream msg__;
msg__ << "Undefined transformed parameter: sigma_y";
stan::lang::rethrow_located(std::runtime_error(std::string("Error initializing variable sigma_y: ") + msg__.str()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 43;
if (stan::math::is_uninitialized(sigma_alpha)) {
std::stringstream msg__;
msg__ << "Undefined transformed parameter: sigma_alpha";
stan::lang::rethrow_located(std::runtime_error(std::string("Error initializing variable sigma_alpha: ") + msg__.str()), current_statement_begin__, prog_reader__());
}
current_statement_begin__ = 44;
if (stan::math::is_uninitialized(sigma_beta)) {
std::stringstream msg__;
msg__ << "Undefined transformed parameter: sigma_beta";
stan::lang::rethrow_located(std::runtime_error(std::string("Error initializing variable sigma_beta: ") + msg__.str()), current_statement_begin__, prog_reader__());
}
// model body
{
current_statement_begin__ = 51;
int oldn(0);
(void) oldn; // dummy to suppress unused var warning
stan::math::fill(oldn, std::numeric_limits<int>::min());
current_statement_begin__ = 52;
validate_non_negative_index("m", "N1", N1);
std::vector<local_scalar_t__ > m(N1, local_scalar_t__(DUMMY_VAR__));
stan::math::initialize(m, DUMMY_VAR__);
stan::math::fill(m, DUMMY_VAR__);
current_statement_begin__ = 53;
for (int n = 1; n <= N1; ++n) {
current_statement_begin__ = 54;
stan::math::assign(oldn, get_base1(idxn1, n, "idxn1", 1));
current_statement_begin__ = 55;
stan::model::assign(m,
stan::model::cons_list(stan::model::index_uni(n), stan::model::nil_index_list()),
((get_base1(alpha, oldn, "alpha", 1) + (get_base1(beta, oldn, "beta", 1) * (get_base1(tvec1, n, "tvec1", 1) - 6.5))) + (gamma * (get_base1(y0, oldn, "y0", 1) - y0_mean))),
"assigning variable m");
}
current_statement_begin__ = 57;
lp_accum__.add(normal_log<propto__>(Yvec1, m, sigma_y));
current_statement_begin__ = 59;
lp_accum__.add(normal_log<propto__>(alpha, alpha0, sigma_alpha));
current_statement_begin__ = 60;
lp_accum__.add(normal_log<propto__>(beta, beta0, sigma_beta));
current_statement_begin__ = 62;
lp_accum__.add(inv_gamma_log<propto__>(sigmasq_y, .001, .001));
current_statement_begin__ = 63;
lp_accum__.add(inv_gamma_log<propto__>(sigmasq_alpha, .001, .001));
current_statement_begin__ = 64;
lp_accum__.add(inv_gamma_log<propto__>(sigmasq_beta, .001, .001));
current_statement_begin__ = 65;
lp_accum__.add(normal_log<propto__>(alpha0, 0, 1000));
current_statement_begin__ = 66;
lp_accum__.add(normal_log<propto__>(beta0, 0, 1000));
current_statement_begin__ = 67;
lp_accum__.add(normal_log<propto__>(gamma, 0, 1000));
}
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, current_statement_begin__, prog_reader__());
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
lp_accum__.add(lp__);
return lp_accum__.sum();
} // log_prob()
template <bool propto, bool jacobian, typename T_>
T_ log_prob(Eigen::Matrix<T_,Eigen::Dynamic,1>& params_r,
std::ostream* pstream = 0) const {
std::vector<T_> vec_params_r;
vec_params_r.reserve(params_r.size());
for (int i = 0; i < params_r.size(); ++i)
vec_params_r.push_back(params_r(i));
std::vector<int> vec_params_i;
return log_prob<propto,jacobian,T_>(vec_params_r, vec_params_i, pstream);
}
void get_param_names(std::vector<std::string>& names__) const {
names__.resize(0);
names__.push_back("sigmasq_y");
names__.push_back("sigmasq_alpha");
names__.push_back("sigmasq_beta");
names__.push_back("alpha");
names__.push_back("beta");
names__.push_back("gamma");
names__.push_back("alpha0");
names__.push_back("beta0");
names__.push_back("sigma_y");
names__.push_back("sigma_alpha");
names__.push_back("sigma_beta");
}
void get_dims(std::vector<std::vector<size_t> >& dimss__) const {
dimss__.resize(0);
std::vector<size_t> dims__;
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dims__.push_back(N);
dimss__.push_back(dims__);
dims__.resize(0);
dims__.push_back(N);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
dims__.resize(0);
dimss__.push_back(dims__);
}
template <typename RNG>
void write_array(RNG& base_rng__,
std::vector<double>& params_r__,
std::vector<int>& params_i__,
std::vector<double>& vars__,
bool include_tparams__ = true,
bool include_gqs__ = true,
std::ostream* pstream__ = 0) const {
typedef double local_scalar_t__;
vars__.resize(0);
stan::io::reader<local_scalar_t__> in__(params_r__, params_i__);
static const char* function__ = "hepatitis_model_namespace::write_array";
(void) function__; // dummy to suppress unused var warning
// read-transform, write parameters
double sigmasq_y = in__.scalar_lb_constrain(0);
vars__.push_back(sigmasq_y);
double sigmasq_alpha = in__.scalar_lb_constrain(0);
vars__.push_back(sigmasq_alpha);
double sigmasq_beta = in__.scalar_lb_constrain(0);
vars__.push_back(sigmasq_beta);
std::vector<double> alpha;
size_t alpha_d_0_max__ = N;
alpha.reserve(alpha_d_0_max__);
for (size_t d_0__ = 0; d_0__ < alpha_d_0_max__; ++d_0__) {
alpha.push_back(in__.scalar_constrain());
}
size_t alpha_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < alpha_k_0_max__; ++k_0__) {
vars__.push_back(alpha[k_0__]);
}
std::vector<double> beta;
size_t beta_d_0_max__ = N;
beta.reserve(beta_d_0_max__);
for (size_t d_0__ = 0; d_0__ < beta_d_0_max__; ++d_0__) {
beta.push_back(in__.scalar_constrain());
}
size_t beta_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < beta_k_0_max__; ++k_0__) {
vars__.push_back(beta[k_0__]);
}
double gamma = in__.scalar_constrain();
vars__.push_back(gamma);
double alpha0 = in__.scalar_constrain();
vars__.push_back(alpha0);
double beta0 = in__.scalar_constrain();
vars__.push_back(beta0);
double lp__ = 0.0;
(void) lp__; // dummy to suppress unused var warning
stan::math::accumulator<double> lp_accum__;
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void) DUMMY_VAR__; // suppress unused var warning
if (!include_tparams__ && !include_gqs__) return;
try {
// declare and define transformed parameters
current_statement_begin__ = 42;
double sigma_y;
(void) sigma_y; // dummy to suppress unused var warning
stan::math::initialize(sigma_y, DUMMY_VAR__);
stan::math::fill(sigma_y, DUMMY_VAR__);
current_statement_begin__ = 43;
double sigma_alpha;
(void) sigma_alpha; // dummy to suppress unused var warning
stan::math::initialize(sigma_alpha, DUMMY_VAR__);
stan::math::fill(sigma_alpha, DUMMY_VAR__);
current_statement_begin__ = 44;
double sigma_beta;
(void) sigma_beta; // dummy to suppress unused var warning
stan::math::initialize(sigma_beta, DUMMY_VAR__);
stan::math::fill(sigma_beta, DUMMY_VAR__);
// do transformed parameters statements
current_statement_begin__ = 45;
stan::math::assign(sigma_y, stan::math::sqrt(sigmasq_y));
current_statement_begin__ = 46;
stan::math::assign(sigma_alpha, stan::math::sqrt(sigmasq_alpha));
current_statement_begin__ = 47;
stan::math::assign(sigma_beta, stan::math::sqrt(sigmasq_beta));
if (!include_gqs__ && !include_tparams__) return;
// validate transformed parameters
const char* function__ = "validate transformed params";
(void) function__; // dummy to suppress unused var warning
// write transformed parameters
if (include_tparams__) {
vars__.push_back(sigma_y);
vars__.push_back(sigma_alpha);
vars__.push_back(sigma_beta);
}
if (!include_gqs__) return;
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, current_statement_begin__, prog_reader__());
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
}
template <typename RNG>
void write_array(RNG& base_rng,
Eigen::Matrix<double,Eigen::Dynamic,1>& params_r,
Eigen::Matrix<double,Eigen::Dynamic,1>& vars,
bool include_tparams = true,
bool include_gqs = true,
std::ostream* pstream = 0) const {
std::vector<double> params_r_vec(params_r.size());
for (int i = 0; i < params_r.size(); ++i)
params_r_vec[i] = params_r(i);
std::vector<double> vars_vec;
std::vector<int> params_i_vec;
write_array(base_rng, params_r_vec, params_i_vec, vars_vec, include_tparams, include_gqs, pstream);
vars.resize(vars_vec.size());
for (int i = 0; i < vars.size(); ++i)
vars(i) = vars_vec[i];
}
static std::string model_name() {
return "hepatitis_model";
}
void constrained_param_names(std::vector<std::string>& param_names__,
bool include_tparams__ = true,
bool include_gqs__ = true) const {
std::stringstream param_name_stream__;
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_y";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_alpha";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_beta";
param_names__.push_back(param_name_stream__.str());
size_t alpha_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < alpha_k_0_max__; ++k_0__) {
param_name_stream__.str(std::string());
param_name_stream__ << "alpha" << '.' << k_0__ + 1;
param_names__.push_back(param_name_stream__.str());
}
size_t beta_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < beta_k_0_max__; ++k_0__) {
param_name_stream__.str(std::string());
param_name_stream__ << "beta" << '.' << k_0__ + 1;
param_names__.push_back(param_name_stream__.str());
}
param_name_stream__.str(std::string());
param_name_stream__ << "gamma";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "alpha0";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "beta0";
param_names__.push_back(param_name_stream__.str());
if (!include_gqs__ && !include_tparams__) return;
if (include_tparams__) {
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_y";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_alpha";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_beta";
param_names__.push_back(param_name_stream__.str());
}
if (!include_gqs__) return;
}
void unconstrained_param_names(std::vector<std::string>& param_names__,
bool include_tparams__ = true,
bool include_gqs__ = true) const {
std::stringstream param_name_stream__;
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_y";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_alpha";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigmasq_beta";
param_names__.push_back(param_name_stream__.str());
size_t alpha_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < alpha_k_0_max__; ++k_0__) {
param_name_stream__.str(std::string());
param_name_stream__ << "alpha" << '.' << k_0__ + 1;
param_names__.push_back(param_name_stream__.str());
}
size_t beta_k_0_max__ = N;
for (size_t k_0__ = 0; k_0__ < beta_k_0_max__; ++k_0__) {
param_name_stream__.str(std::string());
param_name_stream__ << "beta" << '.' << k_0__ + 1;
param_names__.push_back(param_name_stream__.str());
}
param_name_stream__.str(std::string());
param_name_stream__ << "gamma";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "alpha0";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "beta0";
param_names__.push_back(param_name_stream__.str());
if (!include_gqs__ && !include_tparams__) return;
if (include_tparams__) {
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_y";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_alpha";
param_names__.push_back(param_name_stream__.str());
param_name_stream__.str(std::string());
param_name_stream__ << "sigma_beta";
param_names__.push_back(param_name_stream__.str());
}
if (!include_gqs__) return;
}
}; // model
} // namespace
typedef hepatitis_model_namespace::hepatitis_model stan_model;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment