fix clip grad norm with scaler

This commit is contained in:
James Betker 2022-03-13 16:28:23 -06:00
parent 22c67ce8d3
commit e045fb0ad7

View File

@ -304,6 +304,8 @@ class ConfigurableStep(Module):
return
self.grads_generated = False
for opt in self.optimizers:
self.scaler.unscale_(opt)
# Optimizers can be opted out in the early stages of training.
after = opt._config['after'] if 'after' in opt._config.keys() else 0
after_network = self.opt['networks'][opt._config['network']]['after'] if 'after' in self.opt['networks'][opt._config['network']].keys() else 0