Merge pull request #11201 from akx/ruff-upg

Upgrade Ruff to 0.0.272
This commit is contained in:
AUTOMATIC1111 2023-06-27 09:19:55 +03:00 committed by GitHub
commit d4f9250c5a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 45 additions and 5 deletions

View File

@ -18,7 +18,7 @@ jobs:
# not to have GHA download an (at the time of writing) 4 GB cache # not to have GHA download an (at the time of writing) 4 GB cache
# of PyTorch and other dependencies. # of PyTorch and other dependencies.
- name: Install Ruff - name: Install Ruff
run: pip install ruff==0.0.265 run: pip install ruff==0.0.272
- name: Run Ruff - name: Run Ruff
run: ruff . run: ruff .
lint-js: lint-js:

View File

@ -2,11 +2,51 @@ import datetime
import json import json
import os import os
saved_params_shared = {"model_name", "model_hash", "initial_step", "num_of_dataset_images", "learn_rate", "batch_size", "clip_grad_mode", "clip_grad_value", "gradient_step", "data_root", "log_directory", "training_width", "training_height", "steps", "create_image_every", "template_file", "gradient_step", "latent_sampling_method"} saved_params_shared = {
saved_params_ti = {"embedding_name", "num_vectors_per_token", "save_embedding_every", "save_image_with_stored_embedding"} "batch_size",
saved_params_hypernet = {"hypernetwork_name", "layer_structure", "activation_func", "weight_init", "add_layer_norm", "use_dropout", "save_hypernetwork_every"} "clip_grad_mode",
"clip_grad_value",
"create_image_every",
"data_root",
"gradient_step",
"initial_step",
"latent_sampling_method",
"learn_rate",
"log_directory",
"model_hash",
"model_name",
"num_of_dataset_images",
"steps",
"template_file",
"training_height",
"training_width",
}
saved_params_ti = {
"embedding_name",
"num_vectors_per_token",
"save_embedding_every",
"save_image_with_stored_embedding",
}
saved_params_hypernet = {
"activation_func",
"add_layer_norm",
"hypernetwork_name",
"layer_structure",
"save_hypernetwork_every",
"use_dropout",
"weight_init",
}
saved_params_all = saved_params_shared | saved_params_ti | saved_params_hypernet saved_params_all = saved_params_shared | saved_params_ti | saved_params_hypernet
saved_params_previews = {"preview_prompt", "preview_negative_prompt", "preview_steps", "preview_sampler_index", "preview_cfg_scale", "preview_seed", "preview_width", "preview_height"} saved_params_previews = {
"preview_cfg_scale",
"preview_height",
"preview_negative_prompt",
"preview_prompt",
"preview_sampler_index",
"preview_seed",
"preview_steps",
"preview_width",
}
def save_settings_to_file(log_directory, all_params): def save_settings_to_file(log_directory, all_params):