somehow between training sessions grad_norm = None even though it worked before

This commit is contained in:
mrq 2024-06-02 08:29:27 -05:00
parent c1fcd889d5
commit c2a436d368

View File

@ -493,9 +493,10 @@ class Engines(dict[str, Engine]):
total_elapsed_time += elapsed_time total_elapsed_time += elapsed_time
grad_norm = engine.get_global_grad_norm() grad_norm = engine.get_global_grad_norm()
loss_scale = 1 loss_scale = 1
if hasattr(engine.optimizer, "loss_scale"): if hasattr(engine.optimizer, "loss_scale") and engine.optimizer.loss_scale is not None:
loss_scale = engine.optimizer.loss_scale loss_scale = engine.optimizer.loss_scale
if grad_norm is not None:
grad_norm /= loss_scale grad_norm /= loss_scale
stats.update( stats.update(