Automatic Differentiation
 
Loading...
Searching...
No Matches
hessian.hpp
Go to the documentation of this file.
1#ifndef STAN_MATH_MIX_FUNCTOR_HESSIAN_HPP
2#define STAN_MATH_MIX_FUNCTOR_HESSIAN_HPP
3
7#include <stdexcept>
8
9namespace stan {
10namespace math {
11
41template <typename F>
42void hessian(const F& f, const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
43 double& fx, Eigen::Matrix<double, Eigen::Dynamic, 1>& grad,
44 Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& H) {
45 H.resize(x.size(), x.size());
46 grad.resize(x.size());
47
48 // need to compute fx even with size = 0
49 if (x.size() == 0) {
50 fx = f(x);
51 return;
52 }
53 for (int i = 0; i < x.size(); ++i) {
54 // Run nested autodiff in this scope
56
57 Eigen::Matrix<fvar<var>, Eigen::Dynamic, 1> x_fvar(x.size());
58 for (int j = 0; j < x.size(); ++j) {
59 x_fvar(j) = fvar<var>(x(j), i == j);
60 }
61 fvar<var> fx_fvar = f(x_fvar);
62 grad(i) = fx_fvar.d_.val();
63 if (i == 0) {
64 fx = fx_fvar.val_.val();
65 }
66 stan::math::grad(fx_fvar.d_.vi_);
67 for (int j = 0; j < x.size(); ++j) {
68 H(i, j) = x_fvar(j).val_.adj();
69 }
70 }
71}
72
73} // namespace math
74} // namespace stan
75#endif
A class following the RAII idiom to start and recover nested autodiff scopes.
void hessian(const F &f, const Eigen::Matrix< T, Eigen::Dynamic, 1 > &x, T &fx, Eigen::Matrix< T, Eigen::Dynamic, 1 > &grad, Eigen::Matrix< T, Eigen::Dynamic, Eigen::Dynamic > &H)
Calculate the value, the gradient, and the Hessian, of the specified function at the specified argume...
Definition hessian.hpp:41
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
Definition grad.hpp:26
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...
Scalar val_
The value of this variable.
Definition fvar.hpp:49
Scalar d_
The tangent (derivative) of this variable.
Definition fvar.hpp:61
This template class represents scalars used in forward-mode automatic differentiation,...
Definition fvar.hpp:40