today I learned adamw_zero actually negates ANY LR schemes
This commit is contained in:
parent
cb273b8428
commit
c3b43d2429
|
@ -1394,7 +1394,8 @@ def save_training_settings( **kwargs ):
|
|||
if settings['gpus'] > get_device_count():
|
||||
settings['gpus'] = get_device_count()
|
||||
|
||||
settings['optimizer'] = 'adamw' if settings['gpus'] == 1 else 'adamw_zero'
|
||||
# what an utter mistake this was
|
||||
# settings['optimizer'] = 'adamw' if settings['gpus'] == 1 else 'adamw_zero'
|
||||
|
||||
if 'learning_rate_scheme' not in settings or settings['learning_rate_scheme'] not in LEARNING_RATE_SCHEMES:
|
||||
settings['learning_rate_scheme'] = "Multistep"
|
||||
|
|
Loading…
Reference in New Issue
Block a user