Struct GumbelSoftmaxFuncOptions#
Defined in File activation.h
Page Contents
Struct Documentation#
-
struct GumbelSoftmaxFuncOptions#
Options for
torch::nn::functional::gumbel_softmax
.Example:
namespace F = torch::nn::functional; F::gumbel_softmax(logits, F::GumbelSoftmaxFuncOptions().hard(true).dim(-1));
Public Functions
-
inline auto tau(const double &new_tau) -> decltype(*this)#
non-negative scalar temperature
-
inline auto tau(double &&new_tau) -> decltype(*this)#
-
inline const double &tau() const noexcept#
-
inline double &tau() noexcept#
-
inline auto hard(const bool &new_hard) -> decltype(*this)#
returned samples will be discretized as one-hot vectors, but will be differentiated as if it is the soft sample in autograd.
Default: False
-
inline auto hard(bool &&new_hard) -> decltype(*this)#
-
inline const bool &hard() const noexcept#
-
inline bool &hard() noexcept#
-
inline auto dim(const int &new_dim) -> decltype(*this)#
dimension along which softmax will be computed. Default: -1
-
inline auto dim(int &&new_dim) -> decltype(*this)#
-
inline const int &dim() const noexcept#
-
inline int &dim() noexcept#
-
inline auto tau(const double &new_tau) -> decltype(*this)#