warn and ignore export when lora training because the state dict exported during training is wrong

This commit is contained in:
mrq 2025-05-20 23:38:10 -05:00
parent fee02f4153
commit e46d7ef2cb

View File

@ -250,6 +250,9 @@ def train(
last_save_step = engines.global_step
if is_global_leader():
if cfg.lora is not None:
_logger.warning(f"Exporting LoRA during training not properly implemented, please use the export module explicitly.")
else:
engines.export(userdata={"symmap": get_phone_symmap()})
save_ckpt_every = cfg.trainer.save_frequency or cfg.evaluation.frequency
@ -273,6 +276,10 @@ def train(
last_save_step = engines.global_step
if command in export_commands and is_global_leader():
# to-do: actually export the state properly
if cfg.lora is not None:
_logger.warning(f"Exporting LoRA during training not properly implemented, please use the export module explicitly.")
else:
engines.export(userdata={"symmap": get_phone_symmap()})
if engines.global_step != last_eval_step: