Import switched_conv as a submodule

This commit is contained in:
James Betker 2020-10-07 23:10:54 -06:00
parent c352c8bce4
commit c96f5b2686
9 changed files with 17 additions and 13 deletions

3
.gitmodules vendored
View File

@ -4,3 +4,6 @@
[submodule "codes/models/flownet2"]
path = codes/models/flownet2
url = https://github.com/NVIDIA/flownet2-pytorch.git
[submodule "codes/switched_conv"]
path = codes/switched_conv
url = https://github.com/neonbjb/SwitchedConvolutions.git

View File

@ -9,8 +9,8 @@ import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
import models.archs.SwitchedResidualGenerator_arch as srg
from switched_conv_util import save_attention_to_image, save_attention_to_image_rgb
from switched_conv import compute_attention_specificity
from switched_conv.switched_conv_util import save_attention_to_image, save_attention_to_image_rgb
from switched_conv.switched_conv import compute_attention_specificity
from data import create_dataset, create_dataloader
from models import create_model
from tqdm import tqdm

View File

@ -109,7 +109,7 @@ class ExtensibleTrainer(BaseModel):
if opt['dist']:
dnet = DistributedDataParallel(anet,
device_ids=[torch.cuda.current_device()],
find_unused_parameters=True)
find_unused_parameters=False)
else:
dnet = DataParallel(anet)
if self.is_train:

View File

@ -1,8 +1,8 @@
import models.archs.SwitchedResidualGenerator_arch as srg
import torch
import torch.nn as nn
from switched_conv_util import save_attention_to_image
from switched_conv import compute_attention_specificity
from switched_conv.switched_conv_util import save_attention_to_image
from switched_conv.switched_conv import compute_attention_specificity
from models.archs.arch_util import ConvGnLelu, ExpansionBlock, MultiConvBlock
import functools
import torch.nn.functional as F

View File

@ -12,8 +12,8 @@ from models.archs import SPSR_util as B
from models.archs.SwitchedResidualGenerator_arch import ConfigurableSwitchComputer, ReferenceImageBranch, \
QueryKeyMultiplexer, QueryKeyPyramidMultiplexer
from models.archs.arch_util import ConvGnLelu, UpconvBlock, MultiConvBlock, ReferenceJoinBlock
from switched_conv import compute_attention_specificity
from switched_conv_util import save_attention_to_image_rgb
from switched_conv.switched_conv import compute_attention_specificity
from switched_conv.switched_conv_util import save_attention_to_image_rgb
from .RRDBNet_arch import RRDB

View File

@ -6,8 +6,8 @@ from models.archs.SPSR_arch import ImageGradientNoPadding
from torch import nn
import torch
import torch.nn.functional as F
from switched_conv_util import save_attention_to_image_rgb
from switched_conv import compute_attention_specificity
from switched_conv.switched_conv_util import save_attention_to_image_rgb
from switched_conv.switched_conv import compute_attention_specificity
import os
import torchvision
from torch.utils.checkpoint import checkpoint

View File

@ -1,11 +1,11 @@
import torch
from torch import nn
from switched_conv import BareConvSwitch, compute_attention_specificity, AttentionNorm
from switched_conv.switched_conv import BareConvSwitch, compute_attention_specificity, AttentionNorm
import torch.nn.functional as F
import functools
from collections import OrderedDict
from models.archs.arch_util import ConvBnLelu, ConvGnSilu, ExpansionBlock, ExpansionBlock2, ConvGnLelu, MultiConvBlock, SiLU
from switched_conv_util import save_attention_to_image_rgb
from switched_conv.switched_conv_util import save_attention_to_image_rgb
import os
from models.archs.spinenet_arch import SpineNet

1
codes/switched_conv Submodule

@ -0,0 +1 @@
Subproject commit b69d031766e042d40ecd7dc4986422e77ab278cf

View File

@ -9,8 +9,8 @@ import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
import models.archs.SwitchedResidualGenerator_arch as srg
from switched_conv_util import save_attention_to_image, save_attention_to_image_rgb
from switched_conv import compute_attention_specificity
from switched_conv.switched_conv_util import save_attention_to_image, save_attention_to_image_rgb
from switched_conv.switched_conv import compute_attention_specificity
from data import create_dataset, create_dataloader
from models import create_model
from tqdm import tqdm