Remove entmax dep

This commit is contained in:
James Betker 2022-05-02 21:43:14 -06:00
parent 14617f8963
commit 29b2f36f55
3 changed files with 1 additions and 4 deletions

View File

@ -6,6 +6,5 @@ inflect
progressbar progressbar
einops einops
unidecode unidecode
entmax
scipy scipy
librosa librosa

View File

@ -21,7 +21,6 @@ setuptools.setup(
'progressbar', 'progressbar',
'einops', 'einops',
'unidecode', 'unidecode',
'entmax',
'scipy', 'scipy',
'librosa', 'librosa',
'transformers', 'transformers',

View File

@ -10,7 +10,6 @@ from collections import namedtuple
from einops import rearrange, repeat, reduce from einops import rearrange, repeat, reduce
from einops.layers.torch import Rearrange from einops.layers.torch import Rearrange
from entmax import entmax15
from torch.utils.checkpoint import checkpoint from torch.utils.checkpoint import checkpoint
DEFAULT_DIM_HEAD = 64 DEFAULT_DIM_HEAD = 64
@ -556,7 +555,7 @@ class Attention(nn.Module):
self.sparse_topk = sparse_topk self.sparse_topk = sparse_topk
# entmax # entmax
self.attn_fn = entmax15 if use_entmax15 else F.softmax self.attn_fn = F.softmax
# add memory key / values # add memory key / values
self.num_mem_kv = num_mem_kv self.num_mem_kv = num_mem_kv