tts9: fix position embeddings snafu

This commit is contained in:
James Betker 2022-03-22 11:41:32 -06:00
parent 536511fc4b
commit 927731f3b4

View File

@ -219,7 +219,7 @@ class DiffusionTts(nn.Module):
attn_dropout=dropout,
use_rmsnorm=True,
ff_glu=True,
rotary_emb_dim=True,
rotary_pos_emb=True,
)
))
self.latent_converter = nn.Conv1d(in_latent_channels, conditioning_dim, 1)
@ -238,7 +238,7 @@ class DiffusionTts(nn.Module):
attn_dropout=dropout,
use_rmsnorm=True,
ff_glu=True,
rotary_emb_dim=True,
rotary_pos_emb=True,
)
))
else: