Skip to content

Commit 74ae950

Browse files
committed
add constant lr scheduler.
1 parent 72c48db commit 74ae950

File tree

1 file changed

+22
-0
lines changed

1 file changed

+22
-0
lines changed

lavis/common/optims.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,28 @@ def step(self, cur_epoch, cur_step):
9494
)
9595

9696

97+
@registry.register_lr_scheduler("constant_lr")
98+
class ConstantLRScheduler:
99+
def __init__(self, optimizer, init_lr, warmup_start_lr=-1, warmup_steps=0, **kwargs):
100+
self.optimizer = optimizer
101+
self.lr = init_lr
102+
self.warmup_start_lr = warmup_start_lr if warmup_start_lr >= 0 else init_lr
103+
self.warmup_steps = warmup_steps
104+
105+
def step(self, cur_epoch, cur_step):
106+
if cur_epoch == 0:
107+
warmup_lr_schedule(
108+
step=cur_step,
109+
optimizer=self.optimizer,
110+
max_step=self.warmup_steps,
111+
init_lr=self.warmup_start_lr,
112+
max_lr=self.lr,
113+
)
114+
else:
115+
for param_group in self.optimizer.param_groups:
116+
param_group["lr"] = self.lr
117+
118+
97119
def cosine_lr_schedule(optimizer, epoch, max_epoch, init_lr, min_lr):
98120
"""Decay the learning rate"""
99121
lr = (init_lr - min_lr) * 0.5 * (

0 commit comments

Comments
 (0)