pytorch
1from .optimizer import Optimizer, ParamsT
2
3class Adagrad(Optimizer):
4def __init__(
5self,
6params: ParamsT,
7lr: float = ...,
8lr_decay: float = ...,
9weight_decay: float = ...,
10initial_accumulator_value: float = ...,
11eps: float = ...,
12) -> None: ...
13