forked from mrq/DL-Art-School
Allow multi_step_lr_scheduler to load a new LR schedule when restoring state
This commit is contained in:
parent
e37726f302
commit
6b45b35447
|
@ -54,6 +54,12 @@ class MultiStepLR_Restart(_LRScheduler):
|
||||||
for group in self.optimizer.param_groups
|
for group in self.optimizer.param_groups
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Allow this scheduler to use newly appointed milestones partially through a training run..
|
||||||
|
def load_state_dict(self, s):
|
||||||
|
milestones_cache = self.milestones
|
||||||
|
super(MultiStepLR_Restart, self).load_state_dict(s)
|
||||||
|
self.milestones = milestones_cache
|
||||||
|
|
||||||
|
|
||||||
class CosineAnnealingLR_Restart(_LRScheduler):
|
class CosineAnnealingLR_Restart(_LRScheduler):
|
||||||
def __init__(self, optimizer, T_period, restarts=None, weights=None, eta_min=0, last_epoch=-1):
|
def __init__(self, optimizer, T_period, restarts=None, weights=None, eta_min=0, last_epoch=-1):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user