18
"""Learning rate decay functions."""
22
from megatron import print_rank_0
25
class AnnealingLR(object):
26
"""Anneals the learning rate."""
37
use_checkpoint_lr_scheduler=True,
38
override_lr_scheduler=False,
43
self.optimizer = optimizer
44
self.start_lr = start_lr
46
self.warmup_iter = warmup_iter
47
self.num_iters = last_iter
48
self.end_iter = total_iters
49
assert self.end_iter > 0
50
self.decay_style = decay_style
51
self.override_lr_scheduler = override_lr_scheduler
52
self.use_checkpoint_lr_scheduler = use_checkpoint_lr_scheduler
53
self.use_mup = use_mup
54
if self.override_lr_scheduler:
55
assert not self.use_checkpoint_lr_scheduler, (
56
"both override and " "use-checkpoint are set."
59
self.step(self.num_iters)
61
print_rank_0("> learning rate decay style: {}".format(self.decay_style))
64
"""Learning rate decay functions from:
65
https://openreview.net/pdf?id=BJYwwY9ll pg. 4"""
67
num_iters_ = self.num_iters
69
if self.warmup_iter > 0 and self.num_iters <= self.warmup_iter:
70
return float(self.start_lr) * num_iters_ / self.warmup_iter
72
num_iters_ = num_iters_ - self.warmup_iter
73
if self.decay_style == "linear":
74
end_iter_ = self.end_iter - self.warmup_iter
75
lr = self.start_lr * (end_iter_ - num_iters_) / end_iter_
76
elif self.decay_style == "cosine":
77
end_iter_ = self.end_iter - self.warmup_iter
79
(self.start_lr - self.min_lr)
81
* (math.cos(math.pi * num_iters_ / end_iter_) + 1)
83
elif self.decay_style == "exponential":
85
end_iter = self.end_iter - self.warmup_iter
86
lr = self.start_lr * math.exp(-0.693 * num_iters_ / end_iter)
89
return max(lr, self.min_lr)
91
def step(self, step_num=None):
92
"""Set lr for all parameters groups."""
94
step_num = self.num_iters + 1
95
self.num_iters = step_num
96
new_lr = self.get_lr()
97
for group in self.optimizer.param_groups:
98
if self.use_mup and "width_mult" in group:
99
group["lr"] = new_lr / group["width_mult"]
103
def state_dict(self):
105
"start_lr": self.start_lr,
106
"warmup_iter": self.warmup_iter,
107
"num_iters": self.num_iters,
108
"decay_style": self.decay_style,
109
"end_iter": self.end_iter,
110
"min_lr": self.min_lr,
114
def _check_and_set(self, cls_value, sd_value, name):
115
"""Auxiliary function for checking the values in the checkpoint and
117
if self.override_lr_scheduler:
118
print_rank_0(" > overriding {} value to {}".format(name, cls_value))
121
if not self.use_checkpoint_lr_scheduler:
122
assert cls_value == sd_value, (
123
"AnnealingLR: class input value"
124
"and checkpoint values for {} do not match".format(name)
126
print_rank_0(" > using checkpoint value {} for {}".format(sd_value, name))
129
def load_state_dict(self, sd):
131
self.start_lr = self._check_and_set(
132
self.start_lr, sd["start_lr"], "learning rate"
134
self.min_lr = self._check_and_set(
135
self.min_lr, sd["min_lr"], "minimum learning rate"
137
self.warmup_iter = self._check_and_set(
138
self.warmup_iter, sd["warmup_iter"], "warmup iterations"
140
self.end_iter = self._check_and_set(
141
self.end_iter, sd["end_iter"], "total number of iterations"
143
self.decay_style = self._check_and_set(
144
self.decay_style, sd["decay_style"], "decay style"
147
self.num_iters = sd["num_iters"]
148
self.step(self.num_iters)