fix a off by one error with embedding at the start of the sentence
This commit is contained in:
parent
54a097a818
commit
90401d96a6
|
@ -401,7 +401,7 @@ class EmbeddingsWithFixes(torch.nn.Module):
|
|||
for offset, word in fixes:
|
||||
emb = self.embeddings.word_embeddings[word]
|
||||
emb_len = min(tensor.shape[0]-offset, emb.shape[0])
|
||||
tensor[offset:offset+emb_len] = self.embeddings.word_embeddings[word][0:emb_len]
|
||||
tensor[offset+1:offset+1+emb_len] = self.embeddings.word_embeddings[word][0:emb_len]
|
||||
|
||||
return inputs_embeds
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user