From b1a72bc7e292246e70ec8ebebd3a9ca42dffff03 Mon Sep 17 00:00:00 2001 From: "Vivek K. Vasishtha" Date: Sat, 3 Jun 2023 21:54:27 +0530 Subject: [PATCH 1/2] torch.cuda.is_available() check for SdOptimizationXformers --- modules/sd_hijack_optimizations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 80e48a42..c2660177 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -48,7 +48,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100 def is_available(self): - return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)) + return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0) def apply(self): ldm.modules.attention.CrossAttention.forward = xformers_attention_forward From 2e23c9c568617b4da16ca67d5bab0368ef14f68c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 4 Jun 2023 11:33:51 +0300 Subject: [PATCH 2/2] fix the broken line for #10990 --- modules/sd_hijack_optimizations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index c2660177..49f4bd16 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -48,7 +48,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100 def is_available(self): - return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0) + return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)) def apply(self): ldm.modules.attention.CrossAttention.forward = xformers_attention_forward