1#ifndef STAN_MATH_MIX_FUNCTOR_GRAD_HESSIAN_HPP
2#define STAN_MATH_MIX_FUNCTOR_GRAD_HESSIAN_HPP
43 const F& f,
const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
double& fx,
44 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& H,
45 std::vector<Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic> >&
52 grad_H.resize(d, Matrix<double, Dynamic, Dynamic>(d, d));
53 for (
int i = 0; i < d; ++i) {
54 for (
int j = i; j < d; ++j) {
58 Matrix<fvar<fvar<var> >, Dynamic, 1> x_ffvar(d);
59 for (
int k = 0; k < d; ++k) {
64 H(i, j) = fx_ffvar.
d_.d_.val();
67 for (
int k = 0; k < d; ++k) {
68 grad_H[i](j, k) = x_ffvar(k).val_.val_.adj();
69 grad_H[j](i, k) = grad_H[i](j, k);
A class following the RAII idiom to start and recover nested autodiff scopes.
void grad_hessian(const F &f, const Eigen::Matrix< double, Eigen::Dynamic, 1 > &x, double &fx, Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > &H, std::vector< Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > > &grad_H)
Calculate the value, the Hessian, and the gradient of the Hessian of the specified function at the sp...
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...
Scalar d_
The tangent (derivative) of this variable.
This template class represents scalars used in forward-mode automatic differentiation,...