Safe Haskell | Safe-Inferred |
---|---|
Language | Haskell2010 |
Documentation
data Softmax (selectDim :: SelectDim (By Symbol Nat)) where Source #
Softmax
is a non-linear activation function.
Softmax | |
|
Instances
Generic (Softmax selectDim) Source # | |
HasStateDict (Softmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec (Softmax selectDim) -> StateDictKey -> m (Softmax selectDim) Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> Softmax selectDim -> m () Source # | |
HasInitialize (Softmax selectDim) generatorDevice (Softmax selectDim) generatorDevice Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec (Softmax selectDim) -> Generator generatorDevice -> m (Softmax selectDim, Generator generatorDevice) Source # | |
(shape' ~ SoftmaxF selectDim shape, Catch shape', output ~ Tensor requiresGradient layout device dataType shape') => HasForward (Softmax selectDim) (Tensor requiresGradient layout device dataType shape) generator output generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
type Rep (Softmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation type Rep (Softmax selectDim) = D1 ('MetaData "Softmax" "Torch.GraduallyTyped.NN.Activation" "hasktorch-gradually-typed-0.2.0.0-1KV1aIPzzbp6JpSr37tC1K" 'False) (C1 ('MetaCons "Softmax" 'PrefixI 'True) (S1 ('MetaSel ('Just "softmaxSelectDim") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (SSelectDim selectDim)))) | |
type ModelSpec (Softmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |
data LogSoftmax (selectDim :: SelectDim (By Symbol Nat)) where Source #
LogSoftmax
is a non-linear activation function.
LogSoftmax | |
|
Instances
Generic (LogSoftmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation from :: LogSoftmax selectDim -> Rep (LogSoftmax selectDim) x Source # to :: Rep (LogSoftmax selectDim) x -> LogSoftmax selectDim Source # | |
HasStateDict (LogSoftmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec (LogSoftmax selectDim) -> StateDictKey -> m (LogSoftmax selectDim) Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> LogSoftmax selectDim -> m () Source # | |
HasInitialize (LogSoftmax selectDim) generatorDevice (LogSoftmax selectDim) generatorDevice Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec (LogSoftmax selectDim) -> Generator generatorDevice -> m (LogSoftmax selectDim, Generator generatorDevice) Source # | |
(shape' ~ SoftmaxF selectDim shape, Catch shape', output ~ Tensor requiresGradient layout device dataType shape') => HasForward (LogSoftmax selectDim) (Tensor requiresGradient layout device dataType shape) generator output generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation forward :: MonadThrow m => LogSoftmax selectDim -> Tensor requiresGradient layout device dataType shape -> Generator generator -> m (output, Generator generator) Source # | |
type Rep (LogSoftmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation type Rep (LogSoftmax selectDim) = D1 ('MetaData "LogSoftmax" "Torch.GraduallyTyped.NN.Activation" "hasktorch-gradually-typed-0.2.0.0-1KV1aIPzzbp6JpSr37tC1K" 'False) (C1 ('MetaCons "LogSoftmax" 'PrefixI 'True) (S1 ('MetaSel ('Just "logSoftmaxSelectDim") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (SSelectDim selectDim)))) | |
type ModelSpec (LogSoftmax selectDim) Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |
Relu
is a step-wise linear activation function.
Instances
Generic Relu Source # | |
Show Relu Source # | |
Eq Relu Source # | |
Ord Relu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
HasStateDict Relu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec Relu -> StateDictKey -> m Relu Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> Relu -> m () Source # | |
HasInitialize Relu generatorDevice Relu generatorDevice Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec Relu -> Generator generatorDevice -> m (Relu, Generator generatorDevice) Source # | |
HasForward Relu (Tensor requiresGradient layout device dataType shape) generator (Tensor requiresGradient layout device dataType shape) generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
type Rep Relu Source # | |
type ModelSpec Relu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |
Gelu
is a non-linear activation function.
Instances
Generic Gelu Source # | |
Show Gelu Source # | |
Eq Gelu Source # | |
Ord Gelu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
HasStateDict Gelu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec Gelu -> StateDictKey -> m Gelu Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> Gelu -> m () Source # | |
HasInitialize Gelu generatorDevice Gelu generatorDevice Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec Gelu -> Generator generatorDevice -> m (Gelu, Generator generatorDevice) Source # | |
HasForward Gelu (Tensor requiresGradient layout device dataType shape) generator (Tensor requiresGradient layout device dataType shape) generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
type Rep Gelu Source # | |
type ModelSpec Gelu Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |
Instances
Generic GeluNew Source # | |
Show GeluNew Source # | |
Eq GeluNew Source # | |
Ord GeluNew Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
HasStateDict GeluNew Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec GeluNew -> StateDictKey -> m GeluNew Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> GeluNew -> m () Source # | |
HasInitialize GeluNew generator GeluNew generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec GeluNew -> Generator generator -> m (GeluNew, Generator generator) Source # | |
HasForward GeluNew (Tensor requiresGradient layout device dataType shape) generator (Tensor requiresGradient layout device dataType shape) generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
type Rep GeluNew Source # | |
type ModelSpec GeluNew Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |
Tanh
is a non-linear activation function.
Instances
Generic Tanh Source # | |
Show Tanh Source # | |
Eq Tanh Source # | |
Ord Tanh Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
HasStateDict Tanh Source # | |
Defined in Torch.GraduallyTyped.NN.Activation fromStateDict :: (MonadIO m, MonadThrow m, MonadState StateDict m) => ModelSpec Tanh -> StateDictKey -> m Tanh Source # toStateDict :: (MonadThrow m, MonadState StateDict m) => StateDictKey -> Tanh -> m () Source # | |
HasInitialize Tanh generator Tanh generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation initialize :: MonadThrow m => ModelSpec Tanh -> Generator generator -> m (Tanh, Generator generator) Source # | |
HasForward Tanh (Tensor requiresGradient layout device dataType shape) generator (Tensor requiresGradient layout device dataType shape) generator Source # | |
Defined in Torch.GraduallyTyped.NN.Activation | |
type Rep Tanh Source # | |
type ModelSpec Tanh Source # | |
Defined in Torch.GraduallyTyped.NN.Activation |