Skip to content

Instantly share code, notes, and snippets.

@bbbales2
Last active April 19, 2017 17:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bbbales2/e351f1b3815f3cc875a36b39d5ef69ea to your computer and use it in GitHub Desktop.
Save bbbales2/e351f1b3815f3cc875a36b39d5ef69ea to your computer and use it in GitHub Desktop.
Glenn's stuff
// Adjusted version of stan-generated cpp code for the calculate_r() function
inline Eigen::Matrix<var, Eigen::Dynamic,1> calculate_r(
const Eigen::Matrix<var, Eigen::Dynamic,1>& lambda,
const std::vector<std::vector<int> >& lambda_select,
const Eigen::Matrix<double, Eigen::Dynamic,Eigen::Dynamic>& M,
const std::vector<std::vector<std::vector<int> > >& ht_select,
const std::vector<std::vector<std::vector<std::vector<int> > > >& ht_gmap,
const std::vector<std::vector<int> >& Ht,
const Eigen::Matrix<double, Eigen::Dynamic,Eigen::Dynamic>& Z,
const std::vector<int>& Z_type,
const Eigen::Matrix<double, Eigen::Dynamic,1>& Zdvec,
const std::vector<std::vector<int> >& Zdmat_segments,
std::ostream* pstream__) {
// Main code body copied from stan-generated cpp code
// In addition to the original return vector r,
// it will be updated to generate a matrix g_r, containing
// derivatives of each element of r wrt to each element of lambda
Matrix<double, Dynamic, 1> r = MAGICALLY_COMPUTE_R();
Matrix<double, Dynamic, Dynamic> g_r = MAGICALLY_COMPUTE_G_R();
// I think this is correct if lambda are all the parameters you need to sample on
vari** operands = ChainableStack::memalloc_.alloc_array<vari*>(lambda.rows());
for (int i = 0; i < lambda.rows(); ++i)
operands[i] = lambda(i).vi_;
// Your output variable should be shaped like it would be if it didn't have autodiff info stored in it -- if that makes any sense
Eigen::Matrix<var, Dynamic ,1> output(r.rows());
for(int i = 0; i < r.rows(); i++) {
// We have the derivative of each output variable with respect to each of the lambda.rows() input variables
double* gradients = ChainableStack::memalloc_.alloc_array<double>(lambda.rows());
for(int k = 0; k < lambda.rows(); k++) {
gradients[k] = g_r(i, k);
}
// stored_gradient_vari lingo is: value of variable,
// number of params we're differentiating with respect to,
// pointers to params themselves,
// pointer to gradient values
output(i) = var(new stored_gradient_vari(r(i), lambda.rows(), operands, gradients));
}
return output;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment