emergency fix for xformers (continue + shared)

This commit is contained in:
AUTOMATIC 2022-10-08 16:33:39 +03:00
parent 48feae37ff
commit 7ff1170a2e

View File

@ -1,19 +1,19 @@
import math import math
import torch import torch
from torch import einsum from torch import einsum
try:
import xformers.ops
import functorch
xformers._is_functorch_available = True
shared.xformers_available = True
except:
print('Cannot find xformers, defaulting to split attention. Try setting --xformers in your webui-user file if you wish to install it.')
continue
from ldm.util import default from ldm.util import default
from einops import rearrange from einops import rearrange
from modules import shared from modules import shared
try:
import xformers.ops
import functorch
xformers._is_functorch_available = True
shared.xformers_available = True
except Exception:
print('Cannot find xformers, defaulting to split attention. Try adding --xformers commandline argument to your webui-user file if you wish to install it.')
# see https://github.com/basujindal/stable-diffusion/pull/117 for discussion # see https://github.com/basujindal/stable-diffusion/pull/117 for discussion
def split_cross_attention_forward_v1(self, x, context=None, mask=None): def split_cross_attention_forward_v1(self, x, context=None, mask=None):