hasktorch-gradually-typed-0.2.0.0: experimental project for hasktorch
Safe HaskellSafe-Inferred
LanguageHaskell2010

Torch.GraduallyTyped.Optim

Synopsis

Documentation

data AdamOptions Source #

Options for the Adam optimizer.

Constructors

AdamOptions 

Fields

defaultAdamOptions :: AdamOptions Source #

Default Adam options.

data Optimizer model where Source #

Optimizer data type.

Constructors

UnsafeOptimizer 

Fields

getStateDict :: forall model. Optimizer model -> IO StateDict Source #

Get the model state dictionary from an optimizer.

getModel :: forall model. HasStateDict model => ModelSpec model -> Optimizer model -> IO model Source #

Extract a model from an optimizer.

mkAdam Source #

Arguments

:: forall model. HasStateDict model 
=> AdamOptions

Adam options

-> model

initial model

-> IO (Optimizer model)

Adam optimizer

Create a new Adam optimizer from a model.

stepWithGenerator Source #

Arguments

:: forall model generatorDevice lossGradient lossLayout lossDataType lossDevice lossShape generatorOutputDevice. (HasStateDict model, SGetGeneratorDevice generatorDevice, SGetGeneratorDevice generatorOutputDevice, Catch (lossShape <+> 'Shape '[]), Catch (lossGradient <+> 'Gradient 'WithGradient)) 
=> Optimizer model

optimizer for the model

-> ModelSpec model

model specification

-> (model -> Generator generatorDevice -> IO (Tensor lossGradient lossLayout lossDataType lossDevice lossShape, Generator generatorOutputDevice))

loss function to minimize

-> Generator generatorDevice

random generator

-> IO (Tensor lossGradient lossLayout lossDataType lossDevice lossShape, Generator generatorOutputDevice)

loss and updated generator

Perform one step of optimization.