From 615b2fc9ce8cb0c61424aa03655f82209f425d21 Mon Sep 17 00:00:00 2001 From: guaneec Date: Sun, 25 Sep 2022 14:13:03 +0800 Subject: [PATCH] Fix token max length --- modules/sd_hijack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 62ba9101..ccbaa9ad 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -300,7 +300,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module): remade_batch_tokens = [] id_start = self.wrapped.tokenizer.bos_token_id id_end = self.wrapped.tokenizer.eos_token_id - maxlen = self.wrapped.max_length - 2 + maxlen = self.wrapped.max_length used_custom_terms = [] cache = {}