From ce929a6b3fa9f216d6702365109aafb1f17c5a5e Mon Sep 17 00:00:00 2001 From: James Betker Date: Fri, 21 Jan 2022 23:13:24 -0700 Subject: [PATCH] Allow grad scaler to be enabled even in fp32 mode --- codes/trainer/steps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codes/trainer/steps.py b/codes/trainer/steps.py index 3ac97f25..abd8c6c8 100644 --- a/codes/trainer/steps.py +++ b/codes/trainer/steps.py @@ -25,7 +25,7 @@ class ConfigurableStep(Module): self.gen_outputs = opt_step['generator_outputs'] self.loss_accumulator = LossAccumulator(buffer_sz=opt_get(opt_step, ['loss_log_buffer'], 50)) self.optimizers = None - self.scaler = GradScaler(enabled=self.opt['fp16']) + self.scaler = GradScaler(enabled=self.opt['fp16'] or opt_get(self.opt, ['grad_scaler_enabled'], False)) self.grads_generated = False self.min_total_loss = opt_step['min_total_loss'] if 'min_total_loss' in opt_step.keys() else -999999999 self.clip_grad_eps = opt_get(opt_step, ['clip_grad_eps'], None)