Merge pull request #8 from buaahsh/main
don't need attn weight in decoder
This commit is contained in:
commit
6d62bbbf67
|
@ -462,7 +462,7 @@ class Decoder(nn.Module):
|
||||||
return x, {
|
return x, {
|
||||||
"inner_states": inner_states,
|
"inner_states": inner_states,
|
||||||
"l_aux": l_aux,
|
"l_aux": l_aux,
|
||||||
"attn": [layer_attn.mean(dim=0)],
|
"attn": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
def output_layer(self, features):
|
def output_layer(self, features):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user