somehow between training sessions grad_norm = None even though it worked before
This commit is contained in:
parent
c1fcd889d5
commit
c2a436d368
|
@ -493,10 +493,11 @@ class Engines(dict[str, Engine]):
|
|||
total_elapsed_time += elapsed_time
|
||||
grad_norm = engine.get_global_grad_norm()
|
||||
loss_scale = 1
|
||||
if hasattr(engine.optimizer, "loss_scale"):
|
||||
if hasattr(engine.optimizer, "loss_scale") and engine.optimizer.loss_scale is not None:
|
||||
loss_scale = engine.optimizer.loss_scale
|
||||
|
||||
grad_norm /= loss_scale
|
||||
if grad_norm is not None:
|
||||
grad_norm /= loss_scale
|
||||
|
||||
stats.update(
|
||||
flatten_dict(
|
||||
|
|
Loading…
Reference in New Issue
Block a user