Automatic Differentiation
 
Loading...
Searching...
No Matches
softmax.hpp
Go to the documentation of this file.
1#ifndef STAN_MATH_FWD_FUN_SOFTMAX_HPP
2#define STAN_MATH_FWD_FUN_SOFTMAX_HPP
3
10
11namespace stan {
12namespace math {
13
14template <typename ColVec,
15 require_eigen_col_vector_vt<is_fvar, ColVec>* = nullptr>
16inline auto softmax(const ColVec& alpha) {
17 using Eigen::Dynamic;
18 using Eigen::Matrix;
19 using T = typename value_type_t<ColVec>::Scalar;
20 if (alpha.size() == 0) {
21 return Matrix<fvar<T>, Dynamic, 1>();
22 }
23 const auto& alpha_ref = to_ref(alpha);
24
25 Matrix<T, Dynamic, 1> softmax_alpha_t = softmax(value_of(alpha_ref));
26
27 Matrix<fvar<T>, Dynamic, 1> softmax_alpha(alpha.size());
28 for (int k = 0; k < alpha.size(); ++k) {
29 softmax_alpha.coeffRef(k).val_ = softmax_alpha_t.coeff(k);
30 softmax_alpha.coeffRef(k).d_ = 0;
31 }
32
33 for (int m = 0; m < alpha.size(); ++m) {
34 T negative_alpha_m_d_times_softmax_alpha_t_m
35 = -alpha_ref.coeff(m).d_ * softmax_alpha_t.coeff(m);
36 for (int k = 0; k < alpha.size(); ++k) {
37 if (m == k) {
38 softmax_alpha.coeffRef(k).d_
39 += softmax_alpha_t.coeff(k)
40 * (alpha_ref.coeff(m).d_
41 + negative_alpha_m_d_times_softmax_alpha_t_m);
42 } else {
43 softmax_alpha.coeffRef(k).d_
44 += softmax_alpha_t.coeff(k)
45 * negative_alpha_m_d_times_softmax_alpha_t_m;
46 }
47 }
48 }
49
50 return softmax_alpha;
51}
52
53} // namespace math
54} // namespace stan
55#endif
typename value_type< T >::type value_type_t
Helper function for accessing underlying type.
auto softmax(const ColVec &alpha)
Definition softmax.hpp:16
T value_of(const fvar< T > &v)
Return the value of the specified variable.
Definition value_of.hpp:18
ref_type_t< T && > to_ref(T &&a)
This evaluates expensive Eigen expressions.
Definition to_ref.hpp:17
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...
Definition fvar.hpp:9