From 22904a86396867f4d4bffbd49ca3062903ad51d3 Mon Sep 17 00:00:00 2001 From: mrq Date: Thu, 24 Aug 2023 10:25:33 -0500 Subject: [PATCH] more oversights fixed because I've been using a cached dataloader forever now and didn't catch these problems --- vall_e/data.py | 4 ++-- vall_e/emb/qnt.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/vall_e/data.py b/vall_e/data.py index 46ca44a..629858d 100755 --- a/vall_e/data.py +++ b/vall_e/data.py @@ -138,8 +138,8 @@ class Dataset(_Dataset): self.paths = paths self.phone_symmap = phone_symmap or self._get_phone_symmap() - self.spkr_symmap = spkr_symmap or self._get_spkr_symmap() - self.task_symmap = get_task_symmap or self._get_task_symmap() + self.spkr_symmap = self._get_spkr_symmap() + self.task_symmap = self._get_task_symmap() self.training = training # assert len(self.phone_symmap) < 256, "Unique token count should be [0,255] to fit within uint8" diff --git a/vall_e/emb/qnt.py b/vall_e/emb/qnt.py index 14f0fe8..25bf618 100755 --- a/vall_e/emb/qnt.py +++ b/vall_e/emb/qnt.py @@ -26,6 +26,7 @@ def _load_encodec_model(device="cuda", levels=cfg.models.max_levels): assert cfg.sample_rate == 24_000 # too lazy to un-if ladder this shit + bandwidth_id = 6.0 if levels == 2: bandwidth_id = 1.5 elif levels == 4: @@ -50,6 +51,7 @@ def _load_vocos_model(device="cuda", levels=cfg.models.max_levels): model = model.to(device) # too lazy to un-if ladder this shit + bandwidth_id = 2 if levels == 2: bandwidth_id = 0 elif levels == 4: