From b1a72bc7e292246e70ec8ebebd3a9ca42dffff03 Mon Sep 17 00:00:00 2001 From: "Vivek K. Vasishtha" Date: Sat, 3 Jun 2023 21:54:27 +0530 Subject: [PATCH] torch.cuda.is_available() check for SdOptimizationXformers --- modules/sd_hijack_optimizations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 80e48a42..c2660177 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -48,7 +48,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100 def is_available(self): - return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)) + return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0) def apply(self): ldm.modules.attention.CrossAttention.forward = xformers_attention_forward