2022-09-03 16:32:45 +00:00
|
|
|
from collections import namedtuple
|
|
|
|
from copy import copy
|
2022-10-06 10:55:21 +00:00
|
|
|
from itertools import permutations, chain
|
2022-09-03 16:32:45 +00:00
|
|
|
import random
|
2022-10-06 10:55:21 +00:00
|
|
|
import csv
|
|
|
|
from io import StringIO
|
2022-09-26 13:46:18 +00:00
|
|
|
from PIL import Image
|
2022-09-09 14:54:04 +00:00
|
|
|
import numpy as np
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
import modules.scripts as scripts
|
|
|
|
import gradio as gr
|
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
from modules import images, paths, sd_samplers, processing
|
2022-10-11 12:51:22 +00:00
|
|
|
from modules.hypernetworks import hypernetwork
|
2022-11-19 09:01:51 +00:00
|
|
|
from modules.processing import process_images, Processed, StableDiffusionProcessingTxt2Img
|
2022-09-03 16:32:45 +00:00
|
|
|
from modules.shared import opts, cmd_opts, state
|
2022-09-17 10:49:36 +00:00
|
|
|
import modules.shared as shared
|
2022-09-03 16:32:45 +00:00
|
|
|
import modules.sd_samplers
|
2022-09-17 10:49:36 +00:00
|
|
|
import modules.sd_models
|
2022-12-18 15:47:02 +00:00
|
|
|
import modules.sd_vae
|
|
|
|
import glob
|
|
|
|
import os
|
2022-09-06 07:11:25 +00:00
|
|
|
import re
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def apply_field(field):
|
|
|
|
def fun(p, x, xs):
|
|
|
|
setattr(p, field, x)
|
|
|
|
|
|
|
|
return fun
|
|
|
|
|
|
|
|
|
|
|
|
def apply_prompt(p, x, xs):
|
2022-10-11 10:59:56 +00:00
|
|
|
if xs[0] not in p.prompt and xs[0] not in p.negative_prompt:
|
|
|
|
raise RuntimeError(f"Prompt S/R did not find {xs[0]} in prompt or negative prompt.")
|
2022-10-11 10:16:57 +00:00
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
p.prompt = p.prompt.replace(xs[0], x)
|
2022-09-09 05:58:31 +00:00
|
|
|
p.negative_prompt = p.negative_prompt.replace(xs[0], x)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
2022-10-04 02:20:09 +00:00
|
|
|
def apply_order(p, x, xs):
|
|
|
|
token_order = []
|
|
|
|
|
2022-10-04 06:18:00 +00:00
|
|
|
# Initally grab the tokens from the prompt, so they can be replaced in order of earliest seen
|
2022-10-04 02:20:09 +00:00
|
|
|
for token in x:
|
|
|
|
token_order.append((p.prompt.find(token), token))
|
|
|
|
|
|
|
|
token_order.sort(key=lambda t: t[0])
|
|
|
|
|
2022-10-04 05:07:36 +00:00
|
|
|
prompt_parts = []
|
|
|
|
|
|
|
|
# Split the prompt up, taking out the tokens
|
|
|
|
for _, token in token_order:
|
|
|
|
n = p.prompt.find(token)
|
|
|
|
prompt_parts.append(p.prompt[0:n])
|
|
|
|
p.prompt = p.prompt[n + len(token):]
|
|
|
|
|
|
|
|
# Rebuild the prompt with the tokens in the order we want
|
|
|
|
prompt_tmp = ""
|
|
|
|
for idx, part in enumerate(prompt_parts):
|
|
|
|
prompt_tmp += part
|
|
|
|
prompt_tmp += x[idx]
|
|
|
|
p.prompt = prompt_tmp + p.prompt
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def apply_sampler(p, x, xs):
|
2022-11-27 10:43:10 +00:00
|
|
|
sampler_name = sd_samplers.samplers_map.get(x.lower(), None)
|
2022-11-27 10:17:39 +00:00
|
|
|
if sampler_name is None:
|
2022-09-03 16:32:45 +00:00
|
|
|
raise RuntimeError(f"Unknown sampler: {x}")
|
|
|
|
|
2022-11-27 10:17:39 +00:00
|
|
|
p.sampler_name = sampler_name
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
2022-10-09 17:20:35 +00:00
|
|
|
def confirm_samplers(p, xs):
|
|
|
|
for x in xs:
|
2022-11-27 10:43:10 +00:00
|
|
|
if x.lower() not in sd_samplers.samplers_map:
|
2022-10-09 17:20:35 +00:00
|
|
|
raise RuntimeError(f"Unknown sampler: {x}")
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
2022-09-17 10:49:36 +00:00
|
|
|
def apply_checkpoint(p, x, xs):
|
2022-09-28 21:31:53 +00:00
|
|
|
info = modules.sd_models.get_closet_checkpoint_match(x)
|
2022-10-09 17:20:35 +00:00
|
|
|
if info is None:
|
|
|
|
raise RuntimeError(f"Unknown checkpoint: {x}")
|
2022-09-17 10:49:36 +00:00
|
|
|
modules.sd_models.reload_model_weights(shared.sd_model, info)
|
2022-10-20 23:01:27 +00:00
|
|
|
p.sd_model = shared.sd_model
|
2022-09-17 10:49:36 +00:00
|
|
|
|
|
|
|
|
2022-10-09 17:20:35 +00:00
|
|
|
def confirm_checkpoints(p, xs):
|
|
|
|
for x in xs:
|
|
|
|
if modules.sd_models.get_closet_checkpoint_match(x) is None:
|
|
|
|
raise RuntimeError(f"Unknown checkpoint: {x}")
|
|
|
|
|
|
|
|
|
2022-10-07 07:17:52 +00:00
|
|
|
def apply_hypernetwork(p, x, xs):
|
2022-10-09 15:56:36 +00:00
|
|
|
if x.lower() in ["", "none"]:
|
|
|
|
name = None
|
|
|
|
else:
|
|
|
|
name = hypernetwork.find_closest_hypernetwork_name(x)
|
2022-10-09 16:37:09 +00:00
|
|
|
if not name:
|
|
|
|
raise RuntimeError(f"Unknown hypernetwork: {x}")
|
2022-10-09 15:56:36 +00:00
|
|
|
hypernetwork.load_hypernetwork(name)
|
2022-10-07 07:17:52 +00:00
|
|
|
|
|
|
|
|
2022-10-13 17:12:37 +00:00
|
|
|
def apply_hypernetwork_strength(p, x, xs):
|
|
|
|
hypernetwork.apply_strength(x)
|
|
|
|
|
|
|
|
|
2022-10-09 17:20:35 +00:00
|
|
|
def confirm_hypernetworks(p, xs):
|
2022-10-09 16:37:09 +00:00
|
|
|
for x in xs:
|
|
|
|
if x.lower() in ["", "none"]:
|
|
|
|
continue
|
|
|
|
if not hypernetwork.find_closest_hypernetwork_name(x):
|
|
|
|
raise RuntimeError(f"Unknown hypernetwork: {x}")
|
|
|
|
|
|
|
|
|
2022-10-09 15:58:55 +00:00
|
|
|
def apply_clip_skip(p, x, xs):
|
2022-10-09 18:57:17 +00:00
|
|
|
opts.data["CLIP_stop_at_last_layers"] = x
|
2022-10-09 15:58:55 +00:00
|
|
|
|
|
|
|
|
2022-12-18 15:47:02 +00:00
|
|
|
def apply_upscale_latent_space(p, x, xs):
|
|
|
|
if x.lower().strip() != '0':
|
|
|
|
opts.data["use_scale_latent_for_hires_fix"] = True
|
|
|
|
else:
|
|
|
|
opts.data["use_scale_latent_for_hires_fix"] = False
|
|
|
|
|
|
|
|
|
|
|
|
def find_vae(name: str):
|
2023-01-14 16:56:09 +00:00
|
|
|
if name.lower() in ['auto', 'automatic']:
|
|
|
|
return modules.sd_vae.unspecified
|
|
|
|
if name.lower() == 'none':
|
|
|
|
return None
|
2022-12-18 15:47:02 +00:00
|
|
|
else:
|
2023-01-14 16:56:09 +00:00
|
|
|
choices = [x for x in sorted(modules.sd_vae.vae_dict, key=lambda x: len(x)) if name.lower().strip() in x.lower()]
|
|
|
|
if len(choices) == 0:
|
|
|
|
print(f"No VAE found for {name}; using automatic")
|
|
|
|
return modules.sd_vae.unspecified
|
2022-12-18 15:47:02 +00:00
|
|
|
else:
|
2023-01-14 16:56:09 +00:00
|
|
|
return modules.sd_vae.vae_dict[choices[0]]
|
2022-12-18 15:47:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
def apply_vae(p, x, xs):
|
2023-01-14 16:56:09 +00:00
|
|
|
modules.sd_vae.reload_vae_weights(shared.sd_model, vae_file=find_vae(x))
|
2022-12-18 15:47:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
def apply_styles(p: StableDiffusionProcessingTxt2Img, x: str, _):
|
|
|
|
p.styles = x.split(',')
|
2022-09-17 10:49:36 +00:00
|
|
|
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
def format_value_add_label(p, opt, x):
|
2022-09-09 15:05:43 +00:00
|
|
|
if type(x) == float:
|
|
|
|
x = round(x, 8)
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
return f"{opt.label}: {x}"
|
|
|
|
|
|
|
|
|
|
|
|
def format_value(p, opt, x):
|
2022-09-09 15:05:43 +00:00
|
|
|
if type(x) == float:
|
|
|
|
x = round(x, 8)
|
2022-09-03 16:32:45 +00:00
|
|
|
return x
|
|
|
|
|
2022-10-04 06:18:00 +00:00
|
|
|
|
|
|
|
def format_value_join_list(p, opt, x):
|
|
|
|
return ", ".join(x)
|
|
|
|
|
|
|
|
|
2022-09-09 14:54:04 +00:00
|
|
|
def do_nothing(p, x, xs):
|
|
|
|
pass
|
|
|
|
|
2022-10-04 06:18:00 +00:00
|
|
|
|
2022-09-09 14:54:04 +00:00
|
|
|
def format_nothing(p, opt, x):
|
|
|
|
return ""
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-04 06:18:00 +00:00
|
|
|
def str_permutations(x):
|
|
|
|
"""dummy function for specifying it in AxisOption's type when you want to get a list of permutations"""
|
|
|
|
return x
|
|
|
|
|
2022-10-09 17:20:35 +00:00
|
|
|
AxisOption = namedtuple("AxisOption", ["label", "type", "apply", "format_value", "confirm"])
|
|
|
|
AxisOptionImg2Img = namedtuple("AxisOptionImg2Img", ["label", "type", "apply", "format_value", "confirm"])
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
axis_options = [
|
2022-10-09 17:20:35 +00:00
|
|
|
AxisOption("Nothing", str, do_nothing, format_nothing, None),
|
|
|
|
AxisOption("Seed", int, apply_field("seed"), format_value_add_label, None),
|
|
|
|
AxisOption("Var. seed", int, apply_field("subseed"), format_value_add_label, None),
|
|
|
|
AxisOption("Var. strength", float, apply_field("subseed_strength"), format_value_add_label, None),
|
|
|
|
AxisOption("Steps", int, apply_field("steps"), format_value_add_label, None),
|
|
|
|
AxisOption("CFG Scale", float, apply_field("cfg_scale"), format_value_add_label, None),
|
|
|
|
AxisOption("Prompt S/R", str, apply_prompt, format_value, None),
|
|
|
|
AxisOption("Prompt order", str_permutations, apply_order, format_value_join_list, None),
|
|
|
|
AxisOption("Sampler", str, apply_sampler, format_value, confirm_samplers),
|
|
|
|
AxisOption("Checkpoint name", str, apply_checkpoint, format_value, confirm_checkpoints),
|
|
|
|
AxisOption("Hypernetwork", str, apply_hypernetwork, format_value, confirm_hypernetworks),
|
2022-10-13 17:12:37 +00:00
|
|
|
AxisOption("Hypernet str.", float, apply_hypernetwork_strength, format_value_add_label, None),
|
2022-10-09 17:20:35 +00:00
|
|
|
AxisOption("Sigma Churn", float, apply_field("s_churn"), format_value_add_label, None),
|
|
|
|
AxisOption("Sigma min", float, apply_field("s_tmin"), format_value_add_label, None),
|
|
|
|
AxisOption("Sigma max", float, apply_field("s_tmax"), format_value_add_label, None),
|
|
|
|
AxisOption("Sigma noise", float, apply_field("s_noise"), format_value_add_label, None),
|
|
|
|
AxisOption("Eta", float, apply_field("eta"), format_value_add_label, None),
|
|
|
|
AxisOption("Clip skip", int, apply_clip_skip, format_value_add_label, None),
|
2022-10-13 16:49:58 +00:00
|
|
|
AxisOption("Denoising", float, apply_field("denoising_strength"), format_value_add_label, None),
|
2023-01-02 16:42:10 +00:00
|
|
|
AxisOption("Hires upscaler", str, apply_field("hr_upscaler"), format_value_add_label, None),
|
2022-10-25 20:15:08 +00:00
|
|
|
AxisOption("Cond. Image Mask Weight", float, apply_field("inpainting_mask_weight"), format_value_add_label, None),
|
2022-12-18 15:47:02 +00:00
|
|
|
AxisOption("VAE", str, apply_vae, format_value_add_label, None),
|
|
|
|
AxisOption("Styles", str, apply_styles, format_value_add_label, None),
|
2022-09-03 16:32:45 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-10-12 00:05:20 +00:00
|
|
|
def draw_xy_grid(p, xs, ys, x_labels, y_labels, cell, draw_legend, include_lone_images):
|
2022-09-17 11:55:40 +00:00
|
|
|
ver_texts = [[images.GridAnnotation(y)] for y in y_labels]
|
|
|
|
hor_texts = [[images.GridAnnotation(x)] for x in x_labels]
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 23:12:12 +00:00
|
|
|
# Temporary list of all the images that are generated to be populated into the grid.
|
|
|
|
# Will be filled with empty images for any individual step that fails to process properly
|
|
|
|
image_cache = []
|
|
|
|
|
|
|
|
processed_result = None
|
|
|
|
cell_mode = "P"
|
|
|
|
cell_size = (1,1)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-09-14 10:08:05 +00:00
|
|
|
state.job_count = len(xs) * len(ys) * p.n_iter
|
2022-09-05 23:09:01 +00:00
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
for iy, y in enumerate(ys):
|
|
|
|
for ix, x in enumerate(xs):
|
2022-09-24 05:23:01 +00:00
|
|
|
state.job = f"{ix + iy * len(xs) + 1} out of {len(xs) * len(ys)}"
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 23:12:12 +00:00
|
|
|
processed:Processed = cell(x, y)
|
2022-09-26 13:46:18 +00:00
|
|
|
try:
|
2022-10-12 23:12:12 +00:00
|
|
|
# this dereference will throw an exception if the image was not processed
|
|
|
|
# (this happens in cases such as if the user stops the process from the UI)
|
|
|
|
processed_image = processed.images[0]
|
|
|
|
|
|
|
|
if processed_result is None:
|
|
|
|
# Use our first valid processed result as a template container to hold our full results
|
|
|
|
processed_result = copy(processed)
|
|
|
|
cell_mode = processed_image.mode
|
|
|
|
cell_size = processed_image.size
|
|
|
|
processed_result.images = [Image.new(cell_mode, cell_size)]
|
|
|
|
|
|
|
|
image_cache.append(processed_image)
|
|
|
|
if include_lone_images:
|
|
|
|
processed_result.images.append(processed_image)
|
|
|
|
processed_result.all_prompts.append(processed.prompt)
|
|
|
|
processed_result.all_seeds.append(processed.seed)
|
|
|
|
processed_result.infotexts.append(processed.infotexts[0])
|
2022-09-26 13:46:18 +00:00
|
|
|
except:
|
2022-10-12 23:12:12 +00:00
|
|
|
image_cache.append(Image.new(cell_mode, cell_size))
|
|
|
|
|
|
|
|
if not processed_result:
|
|
|
|
print("Unexpected error: draw_xy_grid failed to return even a single processed image")
|
|
|
|
return Processed()
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 23:12:12 +00:00
|
|
|
grid = images.image_grid(image_cache, rows=len(ys))
|
2022-09-14 12:01:16 +00:00
|
|
|
if draw_legend:
|
2022-10-12 23:12:12 +00:00
|
|
|
grid = images.draw_grid_annotations(grid, cell_size[0], cell_size[1], hor_texts, ver_texts)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 23:12:12 +00:00
|
|
|
processed_result.images[0] = grid
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 23:12:12 +00:00
|
|
|
return processed_result
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
2022-10-16 19:10:07 +00:00
|
|
|
class SharedSettingsStackHelper(object):
|
|
|
|
def __enter__(self):
|
|
|
|
self.CLIP_stop_at_last_layers = opts.CLIP_stop_at_last_layers
|
|
|
|
self.hypernetwork = opts.sd_hypernetwork
|
|
|
|
self.model = shared.sd_model
|
2022-12-18 15:47:02 +00:00
|
|
|
self.vae = opts.sd_vae
|
2022-10-16 19:10:07 +00:00
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_value, tb):
|
|
|
|
modules.sd_models.reload_model_weights(self.model)
|
2023-01-14 16:56:09 +00:00
|
|
|
|
|
|
|
opts.data["sd_vae"] = self.vae
|
|
|
|
modules.sd_vae.reload_vae_weights(self.model)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-16 19:10:07 +00:00
|
|
|
hypernetwork.load_hypernetwork(self.hypernetwork)
|
|
|
|
hypernetwork.apply_strength()
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-16 19:10:07 +00:00
|
|
|
opts.data["CLIP_stop_at_last_layers"] = self.CLIP_stop_at_last_layers
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
|
2022-09-06 07:11:25 +00:00
|
|
|
re_range = re.compile(r"\s*([+-]?\s*\d+)\s*-\s*([+-]?\s*\d+)(?:\s*\(([+-]\d+)\s*\))?\s*")
|
2022-09-09 14:54:04 +00:00
|
|
|
re_range_float = re.compile(r"\s*([+-]?\s*\d+(?:.\d*)?)\s*-\s*([+-]?\s*\d+(?:.\d*)?)(?:\s*\(([+-]\d+(?:.\d*)?)\s*\))?\s*")
|
2022-09-06 07:11:25 +00:00
|
|
|
|
2022-09-14 11:56:26 +00:00
|
|
|
re_range_count = re.compile(r"\s*([+-]?\s*\d+)\s*-\s*([+-]?\s*\d+)(?:\s*\[(\d+)\s*\])?\s*")
|
|
|
|
re_range_count_float = re.compile(r"\s*([+-]?\s*\d+(?:.\d*)?)\s*-\s*([+-]?\s*\d+(?:.\d*)?)(?:\s*\[(\d+(?:.\d*)?)\s*\])?\s*")
|
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
class Script(scripts.Script):
|
|
|
|
def title(self):
|
|
|
|
return "X/Y plot"
|
|
|
|
|
|
|
|
def ui(self, is_img2img):
|
|
|
|
current_axis_options = [x for x in axis_options if type(x) == AxisOption or type(x) == AxisOptionImg2Img and is_img2img]
|
|
|
|
|
|
|
|
with gr.Row():
|
2023-01-05 08:29:07 +00:00
|
|
|
x_type = gr.Dropdown(label="X type", choices=[x.label for x in current_axis_options], value=current_axis_options[1].label, type="index", elem_id=self.elem_id("x_type"))
|
|
|
|
x_values = gr.Textbox(label="X values", lines=1, elem_id=self.elem_id("x_values"))
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
with gr.Row():
|
2023-01-05 08:29:07 +00:00
|
|
|
y_type = gr.Dropdown(label="Y type", choices=[x.label for x in current_axis_options], value=current_axis_options[0].label, type="index", elem_id=self.elem_id("y_type"))
|
|
|
|
y_values = gr.Textbox(label="Y values", lines=1, elem_id=self.elem_id("y_values"))
|
2022-09-14 12:01:16 +00:00
|
|
|
|
2023-01-05 08:29:07 +00:00
|
|
|
draw_legend = gr.Checkbox(label='Draw legend', value=True, elem_id=self.elem_id("draw_legend"))
|
|
|
|
include_lone_images = gr.Checkbox(label='Include Separate Images', value=False, elem_id=self.elem_id("include_lone_images"))
|
|
|
|
no_fixed_seeds = gr.Checkbox(label='Keep -1 for seeds', value=False, elem_id=self.elem_id("no_fixed_seeds"))
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-10-12 00:05:20 +00:00
|
|
|
return [x_type, x_values, y_type, y_values, draw_legend, include_lone_images, no_fixed_seeds]
|
2022-09-24 05:09:59 +00:00
|
|
|
|
2022-10-12 00:05:20 +00:00
|
|
|
def run(self, p, x_type, x_values, y_type, y_values, draw_legend, include_lone_images, no_fixed_seeds):
|
2022-10-06 23:31:36 +00:00
|
|
|
if not no_fixed_seeds:
|
|
|
|
modules.processing.fix_seed(p)
|
|
|
|
|
2022-10-10 18:24:11 +00:00
|
|
|
if not opts.return_grid:
|
|
|
|
p.batch_size = 1
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
def process_axis(opt, vals):
|
2022-09-29 18:16:12 +00:00
|
|
|
if opt.label == 'Nothing':
|
|
|
|
return [0]
|
|
|
|
|
2022-10-06 17:16:21 +00:00
|
|
|
valslist = [x.strip() for x in chain.from_iterable(csv.reader(StringIO(vals)))]
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
if opt.type == int:
|
|
|
|
valslist_ext = []
|
|
|
|
|
|
|
|
for val in valslist:
|
2022-09-06 07:11:25 +00:00
|
|
|
m = re_range.fullmatch(val)
|
2022-09-14 11:56:26 +00:00
|
|
|
mc = re_range_count.fullmatch(val)
|
2022-09-06 07:11:25 +00:00
|
|
|
if m is not None:
|
|
|
|
start = int(m.group(1))
|
|
|
|
end = int(m.group(2))+1
|
|
|
|
step = int(m.group(3)) if m.group(3) is not None else 1
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
valslist_ext += list(range(start, end, step))
|
2022-09-14 11:56:26 +00:00
|
|
|
elif mc is not None:
|
|
|
|
start = int(mc.group(1))
|
|
|
|
end = int(mc.group(2))
|
|
|
|
num = int(mc.group(3)) if mc.group(3) is not None else 1
|
|
|
|
|
2022-09-24 05:23:01 +00:00
|
|
|
valslist_ext += [int(x) for x in np.linspace(start=start, stop=end, num=num).tolist()]
|
2022-09-03 16:32:45 +00:00
|
|
|
else:
|
|
|
|
valslist_ext.append(val)
|
|
|
|
|
|
|
|
valslist = valslist_ext
|
2022-09-09 14:54:04 +00:00
|
|
|
elif opt.type == float:
|
|
|
|
valslist_ext = []
|
|
|
|
|
|
|
|
for val in valslist:
|
|
|
|
m = re_range_float.fullmatch(val)
|
2022-09-14 11:56:26 +00:00
|
|
|
mc = re_range_count_float.fullmatch(val)
|
2022-09-09 14:54:04 +00:00
|
|
|
if m is not None:
|
|
|
|
start = float(m.group(1))
|
|
|
|
end = float(m.group(2))
|
|
|
|
step = float(m.group(3)) if m.group(3) is not None else 1
|
|
|
|
|
|
|
|
valslist_ext += np.arange(start, end + step, step).tolist()
|
2022-09-14 11:56:26 +00:00
|
|
|
elif mc is not None:
|
|
|
|
start = float(mc.group(1))
|
|
|
|
end = float(mc.group(2))
|
|
|
|
num = int(mc.group(3)) if mc.group(3) is not None else 1
|
|
|
|
|
2022-09-24 05:23:01 +00:00
|
|
|
valslist_ext += np.linspace(start=start, stop=end, num=num).tolist()
|
2022-09-09 14:54:04 +00:00
|
|
|
else:
|
|
|
|
valslist_ext.append(val)
|
|
|
|
|
|
|
|
valslist = valslist_ext
|
2022-10-04 06:18:00 +00:00
|
|
|
elif opt.type == str_permutations:
|
|
|
|
valslist = list(permutations(valslist))
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
valslist = [opt.type(x) for x in valslist]
|
|
|
|
|
2022-10-08 05:30:49 +00:00
|
|
|
# Confirm options are valid before starting
|
2022-10-09 17:20:35 +00:00
|
|
|
if opt.confirm:
|
|
|
|
opt.confirm(p, valslist)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
|
|
|
return valslist
|
|
|
|
|
|
|
|
x_opt = axis_options[x_type]
|
|
|
|
xs = process_axis(x_opt, x_values)
|
|
|
|
|
|
|
|
y_opt = axis_options[y_type]
|
|
|
|
ys = process_axis(y_opt, y_values)
|
|
|
|
|
2022-09-24 05:09:59 +00:00
|
|
|
def fix_axis_seeds(axis_opt, axis_list):
|
2023-01-04 16:19:11 +00:00
|
|
|
if axis_opt.label in ['Seed', 'Var. seed']:
|
2022-09-24 05:09:59 +00:00
|
|
|
return [int(random.randrange(4294967294)) if val is None or val == '' or val == -1 else val for val in axis_list]
|
|
|
|
else:
|
|
|
|
return axis_list
|
|
|
|
|
2022-09-24 05:23:01 +00:00
|
|
|
if not no_fixed_seeds:
|
2022-09-24 05:09:59 +00:00
|
|
|
xs = fix_axis_seeds(x_opt, xs)
|
|
|
|
ys = fix_axis_seeds(y_opt, ys)
|
|
|
|
|
|
|
|
if x_opt.label == 'Steps':
|
|
|
|
total_steps = sum(xs) * len(ys)
|
|
|
|
elif y_opt.label == 'Steps':
|
|
|
|
total_steps = sum(ys) * len(xs)
|
|
|
|
else:
|
|
|
|
total_steps = p.steps * len(xs) * len(ys)
|
|
|
|
|
2022-10-14 21:26:38 +00:00
|
|
|
if isinstance(p, StableDiffusionProcessingTxt2Img) and p.enable_hr:
|
|
|
|
total_steps *= 2
|
|
|
|
|
2022-09-24 05:23:01 +00:00
|
|
|
print(f"X/Y plot will create {len(xs) * len(ys) * p.n_iter} images on a {len(xs)}x{len(ys)} grid. (Total steps to process: {total_steps * p.n_iter})")
|
2022-09-24 05:09:59 +00:00
|
|
|
shared.total_tqdm.updateTotal(total_steps * p.n_iter)
|
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
grid_infotext = [None]
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
def cell(x, y):
|
2023-01-16 03:43:34 +00:00
|
|
|
if shared.state.interrupted:
|
|
|
|
return Processed(p, [], p.seed, "")
|
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
pc = copy(p)
|
|
|
|
x_opt.apply(pc, x, xs)
|
|
|
|
y_opt.apply(pc, y, ys)
|
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
res = process_images(pc)
|
|
|
|
|
|
|
|
if grid_infotext[0] is None:
|
|
|
|
pc.extra_generation_params = copy(pc.extra_generation_params)
|
|
|
|
|
|
|
|
if x_opt.label != 'Nothing':
|
|
|
|
pc.extra_generation_params["X Type"] = x_opt.label
|
|
|
|
pc.extra_generation_params["X Values"] = x_values
|
|
|
|
if x_opt.label in ["Seed", "Var. seed"] and not no_fixed_seeds:
|
|
|
|
pc.extra_generation_params["Fixed X Values"] = ", ".join([str(x) for x in xs])
|
|
|
|
|
|
|
|
if y_opt.label != 'Nothing':
|
|
|
|
pc.extra_generation_params["Y Type"] = y_opt.label
|
|
|
|
pc.extra_generation_params["Y Values"] = y_values
|
|
|
|
if y_opt.label in ["Seed", "Var. seed"] and not no_fixed_seeds:
|
|
|
|
pc.extra_generation_params["Fixed Y Values"] = ", ".join([str(y) for y in ys])
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
grid_infotext[0] = processing.create_infotext(pc, pc.all_prompts, pc.all_seeds, pc.all_subseeds)
|
2022-10-01 05:02:29 +00:00
|
|
|
|
2023-01-04 16:19:11 +00:00
|
|
|
return res
|
2022-10-01 05:02:29 +00:00
|
|
|
|
2022-10-16 19:10:07 +00:00
|
|
|
with SharedSettingsStackHelper():
|
|
|
|
processed = draw_xy_grid(
|
|
|
|
p,
|
|
|
|
xs=xs,
|
|
|
|
ys=ys,
|
|
|
|
x_labels=[x_opt.format_value(p, x_opt, x) for x in xs],
|
|
|
|
y_labels=[y_opt.format_value(p, y_opt, y) for y in ys],
|
|
|
|
cell=cell,
|
|
|
|
draw_legend=draw_legend,
|
|
|
|
include_lone_images=include_lone_images
|
|
|
|
)
|
2022-09-03 16:32:45 +00:00
|
|
|
|
2022-09-04 00:38:24 +00:00
|
|
|
if opts.grid_save:
|
2023-01-04 16:19:11 +00:00
|
|
|
images.save_image(processed.images[0], p.outpath_grids, "xy_grid", info=grid_infotext[0], extension=opts.grid_format, prompt=p.prompt, seed=processed.seed, grid=True, p=p)
|
2022-09-17 10:49:36 +00:00
|
|
|
|
2022-09-03 16:32:45 +00:00
|
|
|
return processed
|