1#ifndef STAN_MATH_REV_CONSTRAINT_STOCHASTIC_ROW_CONSTRAIN_HPP
2#define STAN_MATH_REV_CONSTRAINT_STOCHASTIC_ROW_CONSTRAIN_HPP
23template <
typename T, require_rev_matrix_t<T>* =
nullptr>
25 using ret_type = plain_type_t<T>;
27 const auto N = y.rows();
28 const auto M = y.cols();
29 arena_t<T> arena_y = y;
38 const auto N = arena_y.rows();
40 auto&& x_val = arena_x.val_op();
41 auto&& x_adj = arena_x.adj_op();
43 Eigen::VectorXd x_pre_softmax_adj(x_val.cols());
44 for (Eigen::Index i = 0; i < N; ++i) {
46 x_pre_softmax_adj.noalias()
47 = -x_val.row(i) * x_adj.row(i).dot(x_val.row(i))
48 + x_val.row(i).cwiseProduct(x_adj.row(i));
72template <
typename T, require_rev_matrix_t<T>* =
nullptr>
77 const auto N = y.rows();
78 const auto M = y.cols();
91 const auto N = arena_y.rows();
93 auto&& x_val = arena_x.val_op();
94 auto&& x_adj = arena_x.adj_op();
96 const auto x_val_cols = x_val.cols();
98 Eigen::VectorXd x_pre_softmax_adj(x_val.cols());
99 for (Eigen::Index i = 0; i < N; ++i) {
101 x_pre_softmax_adj.noalias()
103 * (x_adj.row(i).dot(x_val.row(i)) + lp.adj() * x_val_cols)
104 + (x_val.row(i).cwiseProduct(x_adj.row(i)).array() + lp.adj())
void sum_to_zero_vector_backprop(T &&y_adj, const Eigen::VectorXd &z_adj)
The reverse pass backprop for the sum_to_zero_constrain on vectors.
plain_type_t< Mat > stochastic_row_constrain(const Mat &y)
Return a row stochastic matrix.
void reverse_pass_callback(F &&functor)
Puts a callback on the autodiff stack to be called in reverse pass.
typename plain_type< std::decay_t< T > >::type plain_type_t
typename scalar_type< T >::type scalar_type_t
typename internal::arena_type_impl< std::decay_t< T > >::type arena_t
Determines a type that can be used in place of T that does any dynamic allocations on the AD stack.
The lgamma implementation in stan-math is based on either the reentrant safe lgamma_r implementation ...