forked from mrq/DL-Art-School
We don't need that encoder either..
This commit is contained in:
parent
56c4a00e71
commit
0e5a3f4712
|
@ -20,6 +20,7 @@ class PreprocessedMelDataset(torch.utils.data.Dataset):
|
|||
if os.path.exists(cache_path):
|
||||
self.paths = torch.load(cache_path)
|
||||
else:
|
||||
print("Building cache..")
|
||||
path = Path(path)
|
||||
self.paths = [str(p) for p in path.rglob("*.npz")]
|
||||
torch.save(self.paths, cache_path)
|
||||
|
|
|
@ -216,10 +216,6 @@ class TransformerDiffusionWithConditioningEncoder(nn.Module):
|
|||
self.internal_step = 0
|
||||
self.diff = TransformerDiffusion(**kwargs)
|
||||
self.conditioning_encoder = ConditioningEncoder(256, kwargs['model_channels'])
|
||||
self.encoder = UpperEncoder(256, 1024, 256).eval()
|
||||
for p in self.encoder.parameters():
|
||||
p.DO_NOT_TRAIN = True
|
||||
p.requires_grad = False
|
||||
|
||||
def forward(self, x, timesteps, true_cheater, conditioning_input=None, disable_diversity=False, conditioning_free=False):
|
||||
cond = self.conditioning_encoder(true_cheater)
|
||||
|
|
Loading…
Reference in New Issue
Block a user