1#ifndef STAN_MATH_REV_FUNCTOR_GRADIENT_HPP
2#define STAN_MATH_REV_FUNCTOR_GRADIENT_HPP
47 const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
49 Eigen::Matrix<double, Eigen::Dynamic, 1>& grad_fx) {
52 Eigen::Matrix<var, Eigen::Dynamic, 1> x_var(x);
53 var fx_var = f(x_var);
55 grad_fx.resize(x.size());
57 grad_fx = x_var.adj();
102template <
typename F,
typename EigVec,
typename InputIt,
104inline void gradient(
const F& f,
const EigVec& x,
double& fx,
105 InputIt first_grad_fx, InputIt last_grad_fx) {
108 if (last_grad_fx - first_grad_fx != x.size()) {
110 s <<
"gradient(): iterator and gradient different sizes; iterator size = "
111 << last_grad_fx - first_grad_fx <<
"; grad size = " << x.size()
113 throw std::invalid_argument(s.str());
116 Eigen::Matrix<var, Eigen::Dynamic, 1> x_var(x);
117 var fx_var = f(x_var);
120 for (Eigen::VectorXd::Index i = 0; i < x_var.size(); ++i) {
121 *first_grad_fx++ = x_var.coeff(i).adj();
A class following the RAII idiom to start and recover nested autodiff scopes.
require_t< container_type_check_base< is_eigen_vector, value_type_t, TypeCheck, Check... > > require_eigen_vector_vt
Require type satisfies is_eigen_vector.
void gradient(const F &f, const Eigen::Matrix< T, Eigen::Dynamic, 1 > &x, T &fx, Eigen::Matrix< T, Eigen::Dynamic, 1 > &grad_fx)
Calculate the value and the gradient of the specified function at the specified argument.
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...