| Safe Haskell | None |
|---|---|
| Language | Haskell2010 |
TensorFlow.Minimize
Synopsis
- type Minimizer a = forall m. MonadBuild m => [Variable a] -> [Tensor Value a] -> m ControlNode
- minimizeWith :: (MonadBuild m, GradientCompatible a) => Minimizer a -> Tensor v a -> [Variable a] -> m ControlNode
- gradientDescent :: GradientCompatible a => a -> Minimizer a
- type OneOfAdamDataTypes t = OneOf '[Complex Double, Complex Float, Int16, Int32, Int64, Int8, Word16, Word32, Word64, Word8, Double, Float] t
- data AdamConfig t = AdamConfig {
- adamLearningRate :: t
- adamBeta1 :: t
- adamBeta2 :: t
- adamEpsilon :: t
- adam :: (OneOfAdamDataTypes t, Fractional t) => Minimizer t
- adam' :: OneOfAdamDataTypes t => AdamConfig t -> Minimizer t
Documentation
type Minimizer a = forall m. MonadBuild m => [Variable a] -> [Tensor Value a] -> m ControlNode Source #
Arguments
| :: (MonadBuild m, GradientCompatible a) | |
| => Minimizer a | |
| -> Tensor v a | Loss. |
| -> [Variable a] | Parameters of the loss function. |
| -> m ControlNode |
Arguments
| :: GradientCompatible a | |
| => a | Learning rate. |
| -> Minimizer a |
Perform one step of the gradient descent algorithm.
type OneOfAdamDataTypes t = OneOf '[Complex Double, Complex Float, Int16, Int32, Int64, Int8, Word16, Word32, Word64, Word8, Double, Float] t Source #
data AdamConfig t Source #
Constructors
| AdamConfig | |
Fields
| |
Instances
| Fractional t => Default (AdamConfig t) Source # | |
Defined in TensorFlow.Minimize Methods def :: AdamConfig t | |
adam :: (OneOfAdamDataTypes t, Fractional t) => Minimizer t Source #
Perform one step of the adam algorithm.
See https://arxiv.org/abs/1412.6980.
NOTE: Currently requires all Variables to have an initializedValue.
adam' :: OneOfAdamDataTypes t => AdamConfig t -> Minimizer t Source #