1#ifndef STAN_MATH_REV_FUNCTOR_GRADIENT_HPP
2#define STAN_MATH_REV_FUNCTOR_GRADIENT_HPP
46void gradient(
const F& f,
const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
47 double& fx, Eigen::Matrix<double, Eigen::Dynamic, 1>& grad_fx) {
50 Eigen::Matrix<var, Eigen::Dynamic, 1> x_var(x);
51 var fx_var = f(x_var);
53 grad_fx.resize(x.size());
55 grad_fx = x_var.adj();
100template <
typename F,
typename EigVec,
typename InputIt,
102void gradient(
const F& f,
const EigVec& x,
double& fx, InputIt first_grad_fx,
103 InputIt last_grad_fx) {
106 if (last_grad_fx - first_grad_fx != x.size()) {
108 s <<
"gradient(): iterator and gradient different sizes; iterator size = "
109 << last_grad_fx - first_grad_fx <<
"; grad size = " << x.size()
111 throw std::invalid_argument(s.str());
114 Eigen::Matrix<var, Eigen::Dynamic, 1> x_var(x);
115 var fx_var = f(x_var);
118 for (Eigen::VectorXd::Index i = 0; i < x_var.size(); ++i) {
119 *first_grad_fx++ = x_var.coeff(i).adj();
A class following the RAII idiom to start and recover nested autodiff scopes.
require_t< container_type_check_base< is_eigen_vector, value_type_t, TypeCheck, Check... > > require_eigen_vector_vt
Require type satisfies is_eigen_vector.
void gradient(const F &f, const Eigen::Matrix< T, Eigen::Dynamic, 1 > &x, T &fx, Eigen::Matrix< T, Eigen::Dynamic, 1 > &grad_fx)
Calculate the value and the gradient of the specified function at the specified argument.
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...