option to pad prompt/neg prompt to be same length
This commit is contained in:
parent
8faac8b963
commit
3366e494a1
@ -508,6 +508,11 @@ def load_model(checkpoint_info=None, already_loaded_state_dict=None):
|
|||||||
|
|
||||||
timer.record("scripts callbacks")
|
timer.record("scripts callbacks")
|
||||||
|
|
||||||
|
with devices.autocast(), torch.no_grad():
|
||||||
|
sd_model.cond_stage_model_empty_prompt = sd_model.cond_stage_model([""])
|
||||||
|
|
||||||
|
timer.record("calculate empty prompt")
|
||||||
|
|
||||||
print(f"Model loaded in {timer.summary()}.")
|
print(f"Model loaded in {timer.summary()}.")
|
||||||
|
|
||||||
return sd_model
|
return sd_model
|
||||||
|
@ -125,6 +125,16 @@ class CFGDenoiser(torch.nn.Module):
|
|||||||
x_in = x_in[:-batch_size]
|
x_in = x_in[:-batch_size]
|
||||||
sigma_in = sigma_in[:-batch_size]
|
sigma_in = sigma_in[:-batch_size]
|
||||||
|
|
||||||
|
# TODO add infotext entry
|
||||||
|
if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]:
|
||||||
|
empty = shared.sd_model.cond_stage_model_empty_prompt
|
||||||
|
num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1]
|
||||||
|
|
||||||
|
if num_repeats < 0:
|
||||||
|
tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1)
|
||||||
|
elif num_repeats > 0:
|
||||||
|
uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1)
|
||||||
|
|
||||||
if tensor.shape[1] == uncond.shape[1] or skip_uncond:
|
if tensor.shape[1] == uncond.shape[1] or skip_uncond:
|
||||||
if is_edit_model:
|
if is_edit_model:
|
||||||
cond_in = torch.cat([tensor, uncond, uncond])
|
cond_in = torch.cat([tensor, uncond, uncond])
|
||||||
|
@ -423,6 +423,7 @@ options_templates.update(options_section(('optimizations', "Optimizations"), {
|
|||||||
"token_merging_ratio": OptionInfo(0.0, "Token merging ratio", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).link("PR", "https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/9256").info("0=disable, higher=faster"),
|
"token_merging_ratio": OptionInfo(0.0, "Token merging ratio", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).link("PR", "https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/9256").info("0=disable, higher=faster"),
|
||||||
"token_merging_ratio_img2img": OptionInfo(0.0, "Token merging ratio for img2img", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).info("only applies if non-zero and overrides above"),
|
"token_merging_ratio_img2img": OptionInfo(0.0, "Token merging ratio for img2img", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).info("only applies if non-zero and overrides above"),
|
||||||
"token_merging_ratio_hr": OptionInfo(0.0, "Token merging ratio for high-res pass", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).info("only applies if non-zero and overrides above"),
|
"token_merging_ratio_hr": OptionInfo(0.0, "Token merging ratio for high-res pass", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).info("only applies if non-zero and overrides above"),
|
||||||
|
"pad_cond_uncond": OptionInfo(False, "Pad prompt/negative prompt to be same length").info("improves performance when prompt and negative prompt have different lengths; changes seeds"),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
options_templates.update(options_section(('compatibility', "Compatibility"), {
|
options_templates.update(options_section(('compatibility', "Compatibility"), {
|
||||||
|
Loading…
Reference in New Issue
Block a user