From b2eca271a865feed65062bed2da23eb8d31cdbab Mon Sep 17 00:00:00 2001 From: mrq Date: Wed, 13 Nov 2024 10:35:44 -0600 Subject: [PATCH] ugh --- vall_e/utils/trainer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vall_e/utils/trainer.py b/vall_e/utils/trainer.py index 7f2862b..d493a4a 100755 --- a/vall_e/utils/trainer.py +++ b/vall_e/utils/trainer.py @@ -105,8 +105,9 @@ def _make_infinite_epochs(dl): while True: if dl.dataset.index() == 0: _logger.info("New epoch starts.") + yield from tqdm(dl, "Epoch progress", dynamic_ncols=True, disable=not is_global_leader()) # this number may jump from the dataloader sampling before the actual training step happens - yield from tqdm(dl, "Epoch progress", dynamic_ncols=True, disable=not is_global_leader(), initial=dl.dataset.index(), total=len(dl.dataset)) + #yield from tqdm(dl, "Epoch progress", dynamic_ncols=True, disable=not is_global_leader(), initial=dl.dataset.index(), total=len(dl.dataset)) @local_leader_only(default=None)