From 20f8ec877a99ce2ebf193cb1e2e773cfc77b7c41 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Thu, 6 Oct 2022 00:09:32 +0300 Subject: [PATCH] remove type annotations in new code because presumably they don't work in 3.7 --- modules/prompt_parser.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/prompt_parser.py b/modules/prompt_parser.py index 800b12c7..ee4c5d02 100644 --- a/modules/prompt_parser.py +++ b/modules/prompt_parser.py @@ -175,14 +175,14 @@ def get_multicond_prompt_list(prompts): class ComposableScheduledPromptConditioning: def __init__(self, schedules, weight=1.0): - self.schedules: list[ScheduledPromptConditioning] = schedules + self.schedules = schedules # : list[ScheduledPromptConditioning] self.weight: float = weight class MulticondLearnedConditioning: def __init__(self, shape, batch): self.shape: tuple = shape # the shape field is needed to send this object to DDIM/PLMS - self.batch: list[list[ComposableScheduledPromptConditioning]] = batch + self.batch = batch # : list[list[ComposableScheduledPromptConditioning]] def get_multicond_learned_conditioning(model, prompts, steps) -> MulticondLearnedConditioning: @@ -203,7 +203,7 @@ def get_multicond_learned_conditioning(model, prompts, steps) -> MulticondLearne return MulticondLearnedConditioning(shape=(len(prompts),), batch=res) -def reconstruct_cond_batch(c: list[list[ScheduledPromptConditioning]], current_step): +def reconstruct_cond_batch(c, current_step): # c: list[list[ScheduledPromptConditioning]] param = c[0][0].cond res = torch.zeros((len(c),) + param.shape, device=param.device, dtype=param.dtype) for i, cond_schedule in enumerate(c):