Struct TransformerDecoderLayerOptions¶
Defined in File transformerlayer.h
Page Contents
Struct Documentation¶
-
struct TransformerDecoderLayerOptions¶
Options for the
TransformerDecoderLayer
module.Example:
TransformerDecoderLayer model(TransformerDecoderLayerOptions(512, 8).dropout(0.2));
Public Functions
-
TransformerDecoderLayerOptions(int64_t d_model, int64_t nhead)¶
-
inline auto d_model(const int64_t &new_d_model) -> decltype(*this)¶
number of expected features in the input
-
inline auto d_model(int64_t &&new_d_model) -> decltype(*this)¶
-
inline const int64_t &d_model() const noexcept¶
-
inline int64_t &d_model() noexcept¶
-
inline auto nhead(const int64_t &new_nhead) -> decltype(*this)¶
number of heads in the multiheadattention models
-
inline auto nhead(int64_t &&new_nhead) -> decltype(*this)¶
-
inline const int64_t &nhead() const noexcept¶
-
inline int64_t &nhead() noexcept¶
-
inline auto dim_feedforward(const int64_t &new_dim_feedforward) -> decltype(*this)¶
dimension of the feedforward network model. Default: 2048
-
inline auto dim_feedforward(int64_t &&new_dim_feedforward) -> decltype(*this)¶
-
inline const int64_t &dim_feedforward() const noexcept¶
-
inline int64_t &dim_feedforward() noexcept¶
-
inline auto dropout(const double &new_dropout) -> decltype(*this)¶
dropout value. Default: 1
-
inline auto dropout(double &&new_dropout) -> decltype(*this)¶
-
inline const double &dropout() const noexcept¶
-
inline double &dropout() noexcept¶
-
inline auto activation(const activation_t &new_activation) -> decltype(*this)¶
activation function of intermediate layer, can be
torch::kGELU
,torch::kReLU
, or a unary callable.Default:
torch::kReLU
-
inline auto activation(activation_t &&new_activation) -> decltype(*this)¶
-
inline const activation_t &activation() const noexcept¶
-
inline activation_t &activation() noexcept¶
-
TransformerDecoderLayerOptions(int64_t d_model, int64_t nhead)¶