From df27b98730553891404b22b8dc75563a1738aac1 Mon Sep 17 00:00:00 2001 From: James Betker Date: Mon, 18 Jul 2022 17:17:04 -0600 Subject: [PATCH] ddp doesnt like dropout on checkpointed values --- codes/models/audio/music/transformer_diffusion13.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codes/models/audio/music/transformer_diffusion13.py b/codes/models/audio/music/transformer_diffusion13.py index 89f09d6c..0ed685b8 100644 --- a/codes/models/audio/music/transformer_diffusion13.py +++ b/codes/models/audio/music/transformer_diffusion13.py @@ -23,7 +23,7 @@ def is_sequence(t): class SubBlock(nn.Module): def __init__(self, inp_dim, contraction_dim, blk_dim, heads, dropout): super().__init__() - self.dropout = nn.Dropout(p=dropout, inplace=True) + self.dropout = nn.Dropout(p=dropout) self.blk_emb_proj = nn.Conv1d(blk_dim, inp_dim, 1) self.attn = AttentionBlock(inp_dim, out_channels=contraction_dim, num_heads=heads) self.attnorm = nn.GroupNorm(8, contraction_dim)