hasktorch-gradually-typed-0.2.0.0: experimental project for hasktorch
Safe HaskellSafe-Inferred
LanguageHaskell2010

Torch.GraduallyTyped.NN.Functional.Normalization

Synopsis

Documentation

type family LayerNormImplF (reverseNormalizedDims :: [Dim (Name Symbol) (Size Nat)]) (reverseInputDims :: [Dim (Name Symbol) (Size Nat)]) :: [Dim (Name Symbol) (Size Nat)] where ... Source #

Equations

LayerNormImplF '[] reverseInputDims = reverseInputDims 
LayerNormImplF (normalizedDim ': reverseNormalizedDims) (inputDim ': reverseInputDims) = (normalizedDim <+> inputDim) ': LayerNormImplF reverseNormalizedDims reverseInputDims 
LayerNormImplF _ '[] = TypeError LayerNormShapeErrorMessage 

type LayerNormShapeErrorMessage = "Cannot apply the layer norm. " % "The normalized shape exceeds the input shape." Source #

type family LayerNormWithBiasF (weightShape :: Shape [Dim (Name Symbol) (Size Nat)]) (biasShape :: Shape [Dim (Name Symbol) (Size Nat)]) (inputShape :: Shape [Dim (Name Symbol) (Size Nat)]) :: Shape [Dim (Name Symbol) (Size Nat)] where ... Source #

layerNormWithBias Source #

Arguments

:: forall gradient gradient' gradient'' layout layout' layout'' device device' device'' dataType dataType' dataType'' shape shape' shape''. SGetShape shape 
=> Tensor gradient layout device dataType shape

weight

-> Tensor gradient' layout' device' dataType' shape'

bias

-> Double

eps

-> Tensor gradient'' layout'' device'' dataType'' shape''

input

-> Tensor (gradient' <|> (gradient' <|> gradient'')) (layout <+> (layout' <+> layout'')) (device <+> (device' <+> device'')) (dataType <+> (dataType' <+> dataType'')) (LayerNormWithBiasF shape shape' shape'')

output

type family LayerNormWithoutBiasF (weightShape :: Shape [Dim (Name Symbol) (Size Nat)]) (inputShape :: Shape [Dim (Name Symbol) (Size Nat)]) :: Shape [Dim (Name Symbol) (Size Nat)] where ... Source #

type family LayerNormWithoutBiasBysF (weightDims :: [Dim (Name Symbol) (Size Nat)]) (inputDims :: [Dim (Name Symbol) (Size Nat)]) (inputDimsLength :: Nat) (counter :: Nat) :: [By Symbol Nat] where ... Source #

Equations

LayerNormWithoutBiasBysF '[] _ _ _ = '[] 
LayerNormWithoutBiasBysF (_ ': weightDims) (_ ': inputDims) inputDimsLength counter = 'ByIndex (inputDimsLength - counter) ': LayerNormWithoutBiasBysF weightDims inputDims inputDimsLength (counter + 1) 
LayerNormWithoutBiasBysF _ '[] inputDimsLength counter = TypeError ("Cannot apply the layer norm." % ("The provided weight tensor has more dimensions than the input tensor," % ("" % (((" '" <> counter) <> "'") % ("" % ("and" % ("" % (((" '" <> inputDimsLength) <> "',") % ("" % "respectively."))))))))) 

layerNormWithoutBias Source #

Arguments

:: forall gradient layout device dataType shape gradient' layout' device' dataType' shape'. (SGetShape shape, SGetShape shape') 
=> Tensor gradient layout device dataType shape

weight

-> Double

eps

-> Tensor gradient' layout' device' dataType' shape'

input

-> Tensor (gradient <|> gradient') (layout <+> layout') (device <+> device') (dataType <+> dataType') (LayerNormWithoutBiasF shape shape')

output

T5-style layer norm