first
This commit is contained in:
parent
22bcc7be42
commit
56680cd84a
@ -238,12 +238,14 @@ def prepare_environment():
|
|||||||
k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
|
k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
|
||||||
codeformer_repo = os.environ.get('CODEFORMER_REPO', 'https://github.com/sczhou/CodeFormer.git')
|
codeformer_repo = os.environ.get('CODEFORMER_REPO', 'https://github.com/sczhou/CodeFormer.git')
|
||||||
blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
|
blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
|
||||||
|
tomesd_repo = os.environ.get('TOMESD_REPO', 'https://github.com/dbolya/tomesd.git')
|
||||||
|
|
||||||
stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "cf1d67a6fd5ea1aa600c4df58e5b47da45f6bdbf")
|
stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "cf1d67a6fd5ea1aa600c4df58e5b47da45f6bdbf")
|
||||||
taming_transformers_commit_hash = os.environ.get('TAMING_TRANSFORMERS_COMMIT_HASH', "24268930bf1dce879235a7fddd0b2355b84d7ea6")
|
taming_transformers_commit_hash = os.environ.get('TAMING_TRANSFORMERS_COMMIT_HASH', "24268930bf1dce879235a7fddd0b2355b84d7ea6")
|
||||||
k_diffusion_commit_hash = os.environ.get('K_DIFFUSION_COMMIT_HASH', "5b3af030dd83e0297272d861c19477735d0317ec")
|
k_diffusion_commit_hash = os.environ.get('K_DIFFUSION_COMMIT_HASH', "5b3af030dd83e0297272d861c19477735d0317ec")
|
||||||
codeformer_commit_hash = os.environ.get('CODEFORMER_COMMIT_HASH', "c5b4593074ba6214284d6acd5f1719b6c5d739af")
|
codeformer_commit_hash = os.environ.get('CODEFORMER_COMMIT_HASH', "c5b4593074ba6214284d6acd5f1719b6c5d739af")
|
||||||
blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
|
blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
|
||||||
|
tomesd_commit_hash = os.environ.get('TOMESD_COMMIT_HASH', "4f936c257e10848e0399fc6d0484a1761812092a")
|
||||||
|
|
||||||
if not args.skip_python_version_check:
|
if not args.skip_python_version_check:
|
||||||
check_python_version()
|
check_python_version()
|
||||||
@ -280,6 +282,10 @@ def prepare_environment():
|
|||||||
elif platform.system() == "Linux":
|
elif platform.system() == "Linux":
|
||||||
run_pip(f"install {xformers_package}", "xformers")
|
run_pip(f"install {xformers_package}", "xformers")
|
||||||
|
|
||||||
|
if (not is_installed("tomesd") or args.reinstall_tomesd) and args.token_merging:
|
||||||
|
git_clone(tomesd_repo, repo_dir('tomesd'), "tomesd", tomesd_commit_hash)
|
||||||
|
run_pip(f"install {repo_dir('tomesd')}")
|
||||||
|
|
||||||
if not is_installed("pyngrok") and args.ngrok:
|
if not is_installed("pyngrok") and args.ngrok:
|
||||||
run_pip("install pyngrok", "ngrok")
|
run_pip("install pyngrok", "ngrok")
|
||||||
|
|
||||||
|
@ -101,3 +101,7 @@ parser.add_argument("--no-gradio-queue", action='store_true', help="Disables gra
|
|||||||
parser.add_argument("--skip-version-check", action='store_true', help="Do not check versions of torch and xformers")
|
parser.add_argument("--skip-version-check", action='store_true', help="Do not check versions of torch and xformers")
|
||||||
parser.add_argument("--no-hashing", action='store_true', help="disable sha256 hashing of checkpoints to help loading performance", default=False)
|
parser.add_argument("--no-hashing", action='store_true', help="disable sha256 hashing of checkpoints to help loading performance", default=False)
|
||||||
parser.add_argument("--no-download-sd-model", action='store_true', help="don't download SD1.5 model even if no model is found in --ckpt-dir", default=False)
|
parser.add_argument("--no-download-sd-model", action='store_true', help="don't download SD1.5 model even if no model is found in --ckpt-dir", default=False)
|
||||||
|
|
||||||
|
# token merging / tomesd
|
||||||
|
parser.add_argument("--token-merging", action='store_true', help="Provides generation speedup by merging redundant tokens. (compatible with --xformers)", default=False)
|
||||||
|
parser.add_argument("--token-merging-ratio", type=float, help="Adjusts ratio of merged to untouched tokens. Range: (0.0-1.0], Defaults to 0.5", default=0.5)
|
||||||
|
@ -9,6 +9,7 @@ from omegaconf import OmegaConf
|
|||||||
from os import mkdir
|
from os import mkdir
|
||||||
from urllib import request
|
from urllib import request
|
||||||
import ldm.modules.midas as midas
|
import ldm.modules.midas as midas
|
||||||
|
import tomesd
|
||||||
|
|
||||||
from ldm.util import instantiate_from_config
|
from ldm.util import instantiate_from_config
|
||||||
|
|
||||||
@ -430,6 +431,13 @@ def load_model(checkpoint_info=None, already_loaded_state_dict=None, time_taken_
|
|||||||
try:
|
try:
|
||||||
with sd_disable_initialization.DisableInitialization(disable_clip=clip_is_included_into_sd):
|
with sd_disable_initialization.DisableInitialization(disable_clip=clip_is_included_into_sd):
|
||||||
sd_model = instantiate_from_config(sd_config.model)
|
sd_model = instantiate_from_config(sd_config.model)
|
||||||
|
|
||||||
|
if shared.cmd_opts.token_merging:
|
||||||
|
ratio = shared.cmd_opts.token_merging_ratio
|
||||||
|
|
||||||
|
tomesd.apply_patch(sd_model, ratio=ratio)
|
||||||
|
print(f"Model accelerated using {(ratio * 100)}% token merging via tomesd.")
|
||||||
|
timer.record("token merging")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user