public struct OptimizerWeightStepState
State for a single step of a single weight inside an optimizer.
-
Hyperparameters.
Declaration
public let globals: [Tensor<Float>]
-
Temporary values (can only be assigned once).
Declaration
public var locals: [Tensor<Float>]
-
The actual derivative of weight wrt to the loss function.
Declaration
public var grad: Tensor<Float>
-
The weight being optimized.
Declaration
public let weight: Tensor<Float>
-
The final output of the optimizer. (should really only be set once). nil means that the weight will not be touched. This will be applied to the true weight at the end:
weight += step
.Declaration
public var step: Tensor<Float>?
-
Declaration
public subscript(local: LocalAccessor) -> Tensor<Float> { get set }
-
Declaration
public subscript(global: GlobalAccessor) -> Tensor<Float> { get }