From e46d7ef2cb0aec4f0ba482d0d0af924d97473324 Mon Sep 17 00:00:00 2001 From: mrq Date: Tue, 20 May 2025 23:38:10 -0500 Subject: [PATCH] warn and ignore export when lora training because the state dict exported during training is wrong --- vall_e/utils/trainer.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/vall_e/utils/trainer.py b/vall_e/utils/trainer.py index a563e29..46d9c8f 100755 --- a/vall_e/utils/trainer.py +++ b/vall_e/utils/trainer.py @@ -250,7 +250,10 @@ def train( last_save_step = engines.global_step if is_global_leader(): - engines.export(userdata={"symmap": get_phone_symmap()}) + if cfg.lora is not None: + _logger.warning(f"Exporting LoRA during training not properly implemented, please use the export module explicitly.") + else: + engines.export(userdata={"symmap": get_phone_symmap()}) save_ckpt_every = cfg.trainer.save_frequency or cfg.evaluation.frequency @@ -273,7 +276,11 @@ def train( last_save_step = engines.global_step if command in export_commands and is_global_leader(): - engines.export(userdata={"symmap": get_phone_symmap()}) + # to-do: actually export the state properly + if cfg.lora is not None: + _logger.warning(f"Exporting LoRA during training not properly implemented, please use the export module explicitly.") + else: + engines.export(userdata={"symmap": get_phone_symmap()}) if engines.global_step != last_eval_step: if engines.global_step % cfg.evaluation.frequency == 0 or command in ["eval"]: