From eda0eadba21ed838a90d9fbf4ce124bbf1b4cf59 Mon Sep 17 00:00:00 2001 From: James Betker Date: Wed, 23 Sep 2020 21:05:06 -0600 Subject: [PATCH] Use custom SiLU Torch didnt have this before 1.7 --- codes/models/archs/SwitchedResidualGenerator_arch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/codes/models/archs/SwitchedResidualGenerator_arch.py b/codes/models/archs/SwitchedResidualGenerator_arch.py index 1abe2b5f..5cb94cc4 100644 --- a/codes/models/archs/SwitchedResidualGenerator_arch.py +++ b/codes/models/archs/SwitchedResidualGenerator_arch.py @@ -4,7 +4,7 @@ from switched_conv import BareConvSwitch, compute_attention_specificity, Attenti import torch.nn.functional as F import functools from collections import OrderedDict -from models.archs.arch_util import ConvBnLelu, ConvGnSilu, ExpansionBlock, ExpansionBlock2, ConvGnLelu, MultiConvBlock +from models.archs.arch_util import ConvBnLelu, ConvGnSilu, ExpansionBlock, ExpansionBlock2, ConvGnLelu, MultiConvBlock, SiLU from switched_conv_util import save_attention_to_image_rgb import os from torch.utils.checkpoint import checkpoint @@ -433,7 +433,7 @@ class ResBlock(nn.Module): self.downsample = ConvGnSilu(nf, nf_out, kernel_size=1, stride=stride, bias=False, activation=False, norm=True) else: self.downsample = None - self.act = nn.SiLU(inplace=True) + self.act = SiLU(inplace=True) def forward(self, x): identity = x