Remove entmax dep

This commit is contained in:
James Betker 2022-05-02 21:43:14 -06:00
parent 14617f8963
commit 29b2f36f55
3 changed files with 1 additions and 4 deletions

View File

@ -6,6 +6,5 @@ inflect
progressbar
einops
unidecode
entmax
scipy
librosa

View File

@ -21,7 +21,6 @@ setuptools.setup(
'progressbar',
'einops',
'unidecode',
'entmax',
'scipy',
'librosa',
'transformers',

View File

@ -10,7 +10,6 @@ from collections import namedtuple
from einops import rearrange, repeat, reduce
from einops.layers.torch import Rearrange
from entmax import entmax15
from torch.utils.checkpoint import checkpoint
DEFAULT_DIM_HEAD = 64
@ -556,7 +555,7 @@ class Attention(nn.Module):
self.sparse_topk = sparse_topk
# entmax
self.attn_fn = entmax15 if use_entmax15 else F.softmax
self.attn_fn = F.softmax
# add memory key / values
self.num_mem_kv = num_mem_kv