emergency fix for xformers (continue + shared)
This commit is contained in:
parent
48feae37ff
commit
7ff1170a2e
|
@ -1,19 +1,19 @@
|
|||
import math
|
||||
import torch
|
||||
from torch import einsum
|
||||
try:
|
||||
import xformers.ops
|
||||
import functorch
|
||||
xformers._is_functorch_available = True
|
||||
shared.xformers_available = True
|
||||
except:
|
||||
print('Cannot find xformers, defaulting to split attention. Try setting --xformers in your webui-user file if you wish to install it.')
|
||||
continue
|
||||
|
||||
from ldm.util import default
|
||||
from einops import rearrange
|
||||
|
||||
from modules import shared
|
||||
|
||||
try:
|
||||
import xformers.ops
|
||||
import functorch
|
||||
xformers._is_functorch_available = True
|
||||
shared.xformers_available = True
|
||||
except Exception:
|
||||
print('Cannot find xformers, defaulting to split attention. Try adding --xformers commandline argument to your webui-user file if you wish to install it.')
|
||||
|
||||
# see https://github.com/basujindal/stable-diffusion/pull/117 for discussion
|
||||
def split_cross_attention_forward_v1(self, x, context=None, mask=None):
|
||||
|
|
Loading…
Reference in New Issue
Block a user