1#ifndef STAN_MATH_MIX_FUNCTOR_HESSIAN_HPP
2#define STAN_MATH_MIX_FUNCTOR_HESSIAN_HPP
42void hessian(
const F& f,
const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
43 double& fx, Eigen::Matrix<double, Eigen::Dynamic, 1>&
grad,
44 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& H) {
45 H.resize(x.size(), x.size());
46 grad.resize(x.size());
53 for (
int i = 0; i < x.size(); ++i) {
57 Eigen::Matrix<fvar<var>, Eigen::Dynamic, 1> x_fvar(x.size());
58 for (
int j = 0; j < x.size(); ++j) {
62 grad(i) = fx_fvar.
d_.val();
64 fx = fx_fvar.
val_.val();
67 for (
int j = 0; j < x.size(); ++j) {
68 H(i, j) = x_fvar(j).val_.adj();
A class following the RAII idiom to start and recover nested autodiff scopes.
void hessian(const F &f, const Eigen::Matrix< T, Eigen::Dynamic, 1 > &x, T &fx, Eigen::Matrix< T, Eigen::Dynamic, 1 > &grad, Eigen::Matrix< T, Eigen::Dynamic, Eigen::Dynamic > &H)
Calculate the value, the gradient, and the Hessian, of the specified function at the specified argume...
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...
Scalar val_
The value of this variable.
Scalar d_
The tangent (derivative) of this variable.
This template class represents scalars used in forward-mode automatic differentiation,...