Automatic Differentiation
 
Loading...
Searching...
No Matches
grad_tr_mat_times_hessian.hpp
Go to the documentation of this file.
1#ifndef STAN_MATH_MIX_FUNCTOR_GRAD_TR_MAT_TIMES_HESSIAN_HPP
2#define STAN_MATH_MIX_FUNCTOR_GRAD_TR_MAT_TIMES_HESSIAN_HPP
3
8#include <stdexcept>
9#include <vector>
10
11namespace stan {
12namespace math {
13
14template <typename F>
16 const F& f, const Eigen::Matrix<double, Eigen::Dynamic, 1>& x,
17 const Eigen::Matrix<double, Eigen::Dynamic, Eigen::Dynamic>& M,
18 Eigen::Matrix<double, Eigen::Dynamic, 1>& grad_tr_MH) {
19 using Eigen::Dynamic;
20 using Eigen::Matrix;
21
22 // Run nested autodiff in this scope
24
25 grad_tr_MH.resize(x.size());
26
27 Matrix<var, Dynamic, 1> x_var(x.size());
28 for (int i = 0; i < x.size(); ++i) {
29 x_var(i) = x(i);
30 }
31
32 Matrix<fvar<var>, Dynamic, 1> x_fvar(x.size());
33
34 var sum(0.0);
35 Matrix<double, Dynamic, 1> M_n(x.size());
36 for (int n = 0; n < x.size(); ++n) {
37 for (int k = 0; k < x.size(); ++k) {
38 M_n(k) = M(n, k);
39 }
40 for (int k = 0; k < x.size(); ++k) {
41 x_fvar(k) = fvar<var>(x_var(k), k == n);
42 }
43 fvar<var> fx;
44 fvar<var> grad_fx_dot_v;
45 gradient_dot_vector<fvar<var>, double>(f, x_fvar, M_n, fx, grad_fx_dot_v);
46 sum += grad_fx_dot_v.d_;
47 }
48
49 grad(sum.vi_);
50 for (int i = 0; i < x.size(); ++i) {
51 grad_tr_MH(i) = x_var(i).adj();
52 }
53}
54
55} // namespace math
56} // namespace stan
57#endif
A class following the RAII idiom to start and recover nested autodiff scopes.
fvar< T > sum(const std::vector< fvar< T > > &m)
Return the sum of the entries of the specified standard vector.
Definition sum.hpp:22
void grad_tr_mat_times_hessian(const F &f, const Eigen::Matrix< double, Eigen::Dynamic, 1 > &x, const Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic > &M, Eigen::Matrix< double, Eigen::Dynamic, 1 > &grad_tr_MH)
static void grad()
Compute the gradient for all variables starting from the end of the AD tape.
Definition grad.hpp:26
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...
Scalar d_
The tangent (derivative) of this variable.
Definition fvar.hpp:61
This template class represents scalars used in forward-mode automatic differentiation,...
Definition fvar.hpp:40