hasktorch-gradually-typed-0.2.0.0: experimental project for hasktorch
Safe HaskellSafe-Inferred
LanguageHaskell2010

Torch.GraduallyTyped.NN.Transformer.T5.Large

Synopsis

Documentation

type T5LargeNumLayers = 24 Source #

T5-Large number of layers. 'num_layers = 24'

t5LargeNumLayers :: SNat T5LargeNumLayers Source #

T5-Large number of layers singleton.

type T5LargeHeadDim = 'Dim ('Name "*") ('Size 16) Source #

T5-Large number of attention heads. 'n_heads = 16'

type T5LargeHeadEmbedDim = 'Dim ('Name "*") ('Size 64) Source #

T5-Large head embedding dimension. 'd_kv = 64'

type T5LargeEmbedDim = 'Dim ('Name "*") ('Size 1024) Source #

T5-Large embedding dimension. 'inner_dim = n_heads * d_kv = 1024'

type T5LargeInputEmbedDim = 'Dim ('Name "*") ('Size 1024) Source #

T5-Large model dimension. 'd_model = 1024'

type T5LargeFFNDim = 'Dim ('Name "*") ('Size 4096) Source #

T5-Large feed-forward network dimension. 'd_ff = 4096'

type T5LargeVocabDim = 'Dim ('Name "*") ('Size 32128) Source #

T5-Large vocabulary dimension. 'vocab_size = 32128'

type T5Large (transformerHead :: TransformerHead) (gradient :: Gradient RequiresGradient) (device :: Device (DeviceType Nat)) (hasDropout :: HasDropout) = T5ModelF 'T5 transformerHead T5LargeNumLayers T5LargeNumLayers gradient device T5LargeHeadDim T5LargeHeadEmbedDim T5LargeEmbedDim T5LargeInputEmbedDim T5LargeFFNDim T5LargeVocabDim hasDropout Source #

T5-Large model.

t5LargeSpec :: STransformerHead transformerHead -> SGradient gradient -> SDevice device -> SHasDropout hasDropout -> ModelSpec (T5Large transformerHead gradient device hasDropout) Source #

T5-Large model specification.