36 lines
1.2 KiB
Python
36 lines
1.2 KiB
Python
import torch
|
|
|
|
import lora
|
|
import extra_networks_lora
|
|
import ui_extra_networks_lora
|
|
from modules import script_callbacks, ui_extra_networks, extra_networks, shared
|
|
|
|
|
|
def unload():
|
|
torch.nn.Linear.forward = torch.nn.Linear_forward_before_lora
|
|
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
|
|
|
|
|
|
def before_ui():
|
|
ui_extra_networks.register_page(ui_extra_networks_lora.ExtraNetworksPageLora())
|
|
extra_networks.register_extra_network(extra_networks_lora.ExtraNetworkLora())
|
|
|
|
|
|
if not hasattr(torch.nn, 'Linear_forward_before_lora'):
|
|
torch.nn.Linear_forward_before_lora = torch.nn.Linear.forward
|
|
|
|
if not hasattr(torch.nn, 'Conv2d_forward_before_lora'):
|
|
torch.nn.Conv2d_forward_before_lora = torch.nn.Conv2d.forward
|
|
|
|
torch.nn.Linear.forward = lora.lora_Linear_forward
|
|
torch.nn.Conv2d.forward = lora.lora_Conv2d_forward
|
|
|
|
script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules)
|
|
script_callbacks.on_script_unloaded(unload)
|
|
script_callbacks.on_before_ui(before_ui)
|
|
|
|
|
|
shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
|
|
"lora_apply_to_outputs": shared.OptionInfo(False, "Apply Lora to outputs rather than inputs when possible (experimental)"),
|
|
}))
|