don't need attn weight in decoder

This commit is contained in:
buaahsh 2022-12-06 18:31:17 +08:00
parent be167b3dda
commit 2005ab1f26

View File

@ -462,7 +462,7 @@ class Decoder(nn.Module):
return x, { return x, {
"inner_states": inner_states, "inner_states": inner_states,
"l_aux": l_aux, "l_aux": l_aux,
"attn": [layer_attn.mean(dim=0)], "attn": None,
} }
def output_layer(self, features): def output_layer(self, features):