diff --git a/modules/api/api.py b/modules/api/api.py index 2a43085f..112000b8 100644 --- a/modules/api/api.py +++ b/modules/api/api.py @@ -231,6 +231,10 @@ class Api: return options def set_config(self, req: OptionsModel): + # currently req has all options fields even if you send a dict like { "send_seed": false }, which means it will + # overwrite all options with default values. + raise RuntimeError('Setting options via API is not supported') + reqDict = vars(req) for o in reqDict: setattr(shared.opts, o, reqDict[o]) diff --git a/modules/api/models.py b/modules/api/models.py index 2ae75f43..a44c5ddd 100644 --- a/modules/api/models.py +++ b/modules/api/models.py @@ -1,6 +1,6 @@ import inspect from pydantic import BaseModel, Field, create_model -from typing import Any, Optional, Union +from typing import Any, Optional from typing_extensions import Literal from inflection import underscore from modules.processing import StableDiffusionProcessingTxt2Img, StableDiffusionProcessingImg2Img @@ -185,22 +185,22 @@ _options = vars(parser)['_option_string_actions'] for key in _options: if(_options[key].dest != 'help'): flag = _options[key] - _type = str - if(_options[key].default != None): _type = type(_options[key].default) + _type = str + if _options[key].default is not None: _type = type(_options[key].default) flags.update({flag.dest: (_type,Field(default=flag.default, description=flag.help))}) FlagsModel = create_model("Flags", **flags) class SamplerItem(BaseModel): name: str = Field(title="Name") - aliases: list[str] = Field(title="Aliases") + aliases: list[str] = Field(title="Aliases") options: dict[str, str] = Field(title="Options") class UpscalerItem(BaseModel): name: str = Field(title="Name") - model_name: str | None = Field(title="Model Name") - model_path: str | None = Field(title="Path") - model_url: str | None = Field(title="URL") + model_name: Optional[str] = Field(title="Model Name") + model_path: Optional[str] = Field(title="Path") + model_url: Optional[str] = Field(title="URL") class SDModelItem(BaseModel): title: str = Field(title="Title") @@ -211,21 +211,21 @@ class SDModelItem(BaseModel): class HypernetworkItem(BaseModel): name: str = Field(title="Name") - path: str | None = Field(title="Path") + path: Optional[str] = Field(title="Path") class FaceRestorerItem(BaseModel): name: str = Field(title="Name") - cmd_dir: str | None = Field(title="Path") + cmd_dir: Optional[str] = Field(title="Path") class RealesrganItem(BaseModel): name: str = Field(title="Name") - path: str | None = Field(title="Path") - scale: int | None = Field(title="Scale") + path: Optional[str] = Field(title="Path") + scale: Optional[int] = Field(title="Scale") class PromptStyleItem(BaseModel): name: str = Field(title="Name") - prompt: str | None = Field(title="Prompt") - negative_prompt: str | None = Field(title="Negative Prompt") + prompt: Optional[str] = Field(title="Prompt") + negative_prompt: Optional[str] = Field(title="Negative Prompt") class ArtistItem(BaseModel): name: str = Field(title="Name") diff --git a/modules/extensions.py b/modules/extensions.py index 897af96e..8e0977fd 100644 --- a/modules/extensions.py +++ b/modules/extensions.py @@ -34,8 +34,11 @@ class Extension: if repo is None or repo.bare: self.remote = None else: - self.remote = next(repo.remote().urls, None) - self.status = 'unknown' + try: + self.remote = next(repo.remote().urls, None) + self.status = 'unknown' + except Exception: + self.remote = None def list_files(self, subdir, extension): from modules import scripts diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py index 6e1a10cf..7f182712 100644 --- a/modules/hypernetworks/hypernetwork.py +++ b/modules/hypernetworks/hypernetwork.py @@ -22,6 +22,8 @@ from collections import defaultdict, deque from statistics import stdev, mean +optimizer_dict = {optim_name : cls_obj for optim_name, cls_obj in inspect.getmembers(torch.optim, inspect.isclass) if optim_name != "Optimizer"} + class HypernetworkModule(torch.nn.Module): multiplier = 1.0 activation_dict = { @@ -142,6 +144,8 @@ class Hypernetwork: self.use_dropout = use_dropout self.activate_output = activate_output self.last_layer_dropout = kwargs['last_layer_dropout'] if 'last_layer_dropout' in kwargs else True + self.optimizer_name = None + self.optimizer_state_dict = None for size in enable_sizes or []: self.layers[size] = ( @@ -163,6 +167,7 @@ class Hypernetwork: def save(self, filename): state_dict = {} + optimizer_saved_dict = {} for k, v in self.layers.items(): state_dict[k] = (v[0].state_dict(), v[1].state_dict()) @@ -178,8 +183,15 @@ class Hypernetwork: state_dict['sd_checkpoint_name'] = self.sd_checkpoint_name state_dict['activate_output'] = self.activate_output state_dict['last_layer_dropout'] = self.last_layer_dropout - + + if self.optimizer_name is not None: + optimizer_saved_dict['optimizer_name'] = self.optimizer_name + torch.save(state_dict, filename) + if shared.opts.save_optimizer_state and self.optimizer_state_dict: + optimizer_saved_dict['hash'] = sd_models.model_hash(filename) + optimizer_saved_dict['optimizer_state_dict'] = self.optimizer_state_dict + torch.save(optimizer_saved_dict, filename + '.optim') def load(self, filename): self.filename = filename @@ -202,6 +214,18 @@ class Hypernetwork: print(f"Activate last layer is set to {self.activate_output}") self.last_layer_dropout = state_dict.get('last_layer_dropout', False) + optimizer_saved_dict = torch.load(self.filename + '.optim', map_location = 'cpu') if os.path.exists(self.filename + '.optim') else {} + self.optimizer_name = optimizer_saved_dict.get('optimizer_name', 'AdamW') + print(f"Optimizer name is {self.optimizer_name}") + if sd_models.model_hash(filename) == optimizer_saved_dict.get('hash', None): + self.optimizer_state_dict = optimizer_saved_dict.get('optimizer_state_dict', None) + else: + self.optimizer_state_dict = None + if self.optimizer_state_dict: + print("Loaded existing optimizer from checkpoint") + else: + print("No saved optimizer exists in checkpoint") + for size, sd in state_dict.items(): if type(size) == int: self.layers[size] = ( @@ -219,11 +243,11 @@ class Hypernetwork: def list_hypernetworks(path): res = {} - for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True): + for filename in sorted(glob.iglob(os.path.join(path, '**/*.pt'), recursive=True)): name = os.path.splitext(os.path.basename(filename))[0] # Prevent a hypothetical "None.pt" from being listed. if name != "None": - res[name] = filename + res[name + f"({sd_models.model_hash(filename)})"] = filename return res @@ -358,6 +382,7 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log shared.state.textinfo = "Initializing hypernetwork training..." shared.state.job_count = steps + hypernetwork_name = hypernetwork_name.rsplit('(', 1)[0] filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork_name}.pt') log_directory = os.path.join(log_directory, datetime.datetime.now().strftime("%Y-%m-%d"), hypernetwork_name) @@ -404,8 +429,22 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log weights = hypernetwork.weights() for weight in weights: weight.requires_grad = True - # if optimizer == "AdamW": or else Adam / AdamW / SGD, etc... - optimizer = torch.optim.AdamW(weights, lr=scheduler.learn_rate) + + # Here we use optimizer from saved HN, or we can specify as UI option. + if hypernetwork.optimizer_name in optimizer_dict: + optimizer = optimizer_dict[hypernetwork.optimizer_name](params=weights, lr=scheduler.learn_rate) + optimizer_name = hypernetwork.optimizer_name + else: + print(f"Optimizer type {hypernetwork.optimizer_name} is not defined!") + optimizer = torch.optim.AdamW(params=weights, lr=scheduler.learn_rate) + optimizer_name = 'AdamW' + + if hypernetwork.optimizer_state_dict: # This line must be changed if Optimizer type can be different from saved optimizer. + try: + optimizer.load_state_dict(hypernetwork.optimizer_state_dict) + except RuntimeError as e: + print("Cannot resume from saved optimizer!") + print(e) steps_without_grad = 0 @@ -467,7 +506,11 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, data_root, log # Before saving, change name to match current checkpoint. hypernetwork_name_every = f'{hypernetwork_name}-{steps_done}' last_saved_file = os.path.join(hypernetwork_dir, f'{hypernetwork_name_every}.pt') + hypernetwork.optimizer_name = optimizer_name + if shared.opts.save_optimizer_state: + hypernetwork.optimizer_state_dict = optimizer.state_dict() save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, last_saved_file) + hypernetwork.optimizer_state_dict = None # dereference it after saving, to save memory. textual_inversion.write_loss(log_directory, "hypernetwork_loss.csv", hypernetwork.step, len(ds), { "loss": f"{previous_mean_loss:.7f}", @@ -530,8 +573,12 @@ Last saved image: {html.escape(last_saved_image)}
report_statistics(loss_dict) filename = os.path.join(shared.cmd_opts.hypernetwork_dir, f'{hypernetwork_name}.pt') + hypernetwork.optimizer_name = optimizer_name + if shared.opts.save_optimizer_state: + hypernetwork.optimizer_state_dict = optimizer.state_dict() save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename) - + del optimizer + hypernetwork.optimizer_state_dict = None # dereference it after saving, to save memory. return hypernetwork, filename def save_hypernetwork(hypernetwork, checkpoint, hypernetwork_name, filename): diff --git a/modules/hypernetworks/ui.py b/modules/hypernetworks/ui.py index aad09ffc..c2d4b51c 100644 --- a/modules/hypernetworks/ui.py +++ b/modules/hypernetworks/ui.py @@ -9,7 +9,7 @@ from modules import devices, sd_hijack, shared from modules.hypernetworks import hypernetwork not_available = ["hardswish", "multiheadattention"] -keys = ["linear"] + list(x for x in hypernetwork.HypernetworkModule.activation_dict.keys() if x not in not_available) +keys = list(x for x in hypernetwork.HypernetworkModule.activation_dict.keys() if x not in not_available) def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False): # Remove illegal characters from name. diff --git a/modules/shared.py b/modules/shared.py index a9e28b9c..70b998ff 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -86,6 +86,10 @@ parser.add_argument("--nowebui", action='store_true', help="use api=True to laun parser.add_argument("--ui-debug-mode", action='store_true', help="Don't load model to quickly launch UI") parser.add_argument("--device-id", type=str, help="Select the default CUDA device to use (export CUDA_VISIBLE_DEVICES=0,1,etc might be needed before)", default=None) parser.add_argument("--administrator", action='store_true', help="Administrator rights", default=False) +parser.add_argument("--cors-allow-origins", type=str, help="Allowed CORS origins", default=None) +parser.add_argument("--tls-keyfile", type=str, help="Partially enables TLS, requires --tls-certfile to fully function", default=None) +parser.add_argument("--tls-certfile", type=str, help="Partially enables TLS, requires --tls-keyfile to fully function", default=None) +parser.add_argument("--server-name", type=str, help="Sets hostname of server", default=None) cmd_opts = parser.parse_args() restricted_opts = { @@ -147,9 +151,9 @@ class State: self.interrupted = True def nextjob(self): - if opts.show_progress_every_n_steps == -1: + if opts.show_progress_every_n_steps == -1: self.do_set_current_image() - + self.job_no += 1 self.sampling_step = 0 self.current_image_sampling_step = 0 @@ -198,7 +202,7 @@ class State: return if self.current_latent is None: return - + if opts.show_progress_grid: self.current_image = sd_samplers.samples_to_image_grid(self.current_latent) else: @@ -317,6 +321,7 @@ options_templates.update(options_section(('system', "System"), { options_templates.update(options_section(('training', "Training"), { "unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."), + "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training can be resumed with HN itself and matching optim file."), "dataset_filename_word_regex": OptionInfo("", "Filename word regex"), "dataset_filename_join_string": OptionInfo(" ", "Filename join string"), "training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}), @@ -406,7 +411,8 @@ class Options: if key in self.data or key in self.data_labels: assert not cmd_opts.freeze_settings, "changing settings is disabled" - comp_args = opts.data_labels[key].component_args + info = opts.data_labels.get(key, None) + comp_args = info.component_args if info else None if isinstance(comp_args, dict) and comp_args.get('visible', True) is False: raise RuntimeError(f"not possible to set {key} because it is restricted") diff --git a/modules/ui.py b/modules/ui.py index 4c2829af..76ca9b07 100644 --- a/modules/ui.py +++ b/modules/ui.py @@ -1446,17 +1446,19 @@ def create_ui(wrap_gradio_gpu_call): continue oldval = opts.data.get(key, None) - - setattr(opts, key, value) - + try: + setattr(opts, key, value) + except RuntimeError: + continue if oldval != value: if opts.data_labels[key].onchange is not None: opts.data_labels[key].onchange() changed += 1 - - opts.save(shared.config_filename) - + try: + opts.save(shared.config_filename) + except RuntimeError: + return opts.dumpjson(), f'{changed} settings changed without save.' return opts.dumpjson(), f'{changed} settings changed.' def run_settings_single(value, key): diff --git a/modules/ui_extensions.py b/modules/ui_extensions.py index a81de9a7..8e0d41d5 100644 --- a/modules/ui_extensions.py +++ b/modules/ui_extensions.py @@ -188,7 +188,7 @@ def refresh_available_extensions_from_data(): code += f""" - {html.escape(name)} + {html.escape(name)} {html.escape(description)} {install_code} diff --git a/modules/upscaler.py b/modules/upscaler.py index 83fde7ca..c4e6e6bd 100644 --- a/modules/upscaler.py +++ b/modules/upscaler.py @@ -57,10 +57,18 @@ class Upscaler: self.scale = scale dest_w = img.width * scale dest_h = img.height * scale + for i in range(3): - if img.width > dest_w and img.height > dest_h: - break + shape = (img.width, img.height) + img = self.do_upscale(img, selected_model) + + if shape == (img.width, img.height): + break + + if img.width >= dest_w and img.height >= dest_h: + break + if img.width != dest_w or img.height != dest_h: img = img.resize((int(dest_w), int(dest_h)), resample=LANCZOS) diff --git a/webui.py b/webui.py index 81df09dd..a5a520f0 100644 --- a/webui.py +++ b/webui.py @@ -5,6 +5,7 @@ import importlib import signal import threading from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.gzip import GZipMiddleware from modules.paths import script_path @@ -34,7 +35,7 @@ from modules.shared import cmd_opts import modules.hypernetworks.hypernetwork queue_lock = threading.Lock() - +server_name = "0.0.0.0" if cmd_opts.listen else cmd_opts.server_name def wrap_queued_call(func): def f(*args, **kwargs): @@ -85,6 +86,20 @@ def initialize(): shared.opts.onchange("sd_hypernetwork", wrap_queued_call(lambda: modules.hypernetworks.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork))) shared.opts.onchange("sd_hypernetwork_strength", modules.hypernetworks.hypernetwork.apply_strength) + if cmd_opts.tls_keyfile is not None and cmd_opts.tls_keyfile is not None: + + try: + if not os.path.exists(cmd_opts.tls_keyfile): + print("Invalid path to TLS keyfile given") + if not os.path.exists(cmd_opts.tls_certfile): + print(f"Invalid path to TLS certfile: '{cmd_opts.tls_certfile}'") + except TypeError: + cmd_opts.tls_keyfile = cmd_opts.tls_certfile = None + print("TLS setup invalid, running webui without TLS") + else: + print("Running with TLS") + + # make the program just exit at ctrl+c without waiting for anything def sigint_handler(sig, frame): print(f'Interrupted with signal {sig} in {frame}') @@ -93,6 +108,11 @@ def initialize(): signal.signal(signal.SIGINT, sigint_handler) +def setup_cors(app): + if cmd_opts.cors_allow_origins: + app.add_middleware(CORSMiddleware, allow_origins=cmd_opts.cors_allow_origins.split(','), allow_methods=['*']) + + def create_api(app): from modules.api.api import Api api = Api(app, queue_lock) @@ -114,6 +134,7 @@ def api_only(): initialize() app = FastAPI() + setup_cors(app) app.add_middleware(GZipMiddleware, minimum_size=1000) api = create_api(app) @@ -131,8 +152,10 @@ def webui(): app, local_url, share_url = demo.launch( share=cmd_opts.share, - server_name="0.0.0.0" if cmd_opts.listen else None, + server_name=server_name, server_port=cmd_opts.port, + ssl_keyfile=cmd_opts.tls_keyfile, + ssl_certfile=cmd_opts.tls_certfile, debug=cmd_opts.gradio_debug, auth=[tuple(cred.split(':')) for cred in cmd_opts.gradio_auth.strip('"').split(',')] if cmd_opts.gradio_auth else None, inbrowser=cmd_opts.autolaunch, @@ -147,6 +170,8 @@ def webui(): # runnnig its code. We disable this here. Suggested by RyotaK. app.user_middleware = [x for x in app.user_middleware if x.cls.__name__ != 'CORSMiddleware'] + setup_cors(app) + app.add_middleware(GZipMiddleware, minimum_size=1000) if launch_api: