Use custom SiLU
Torch didnt have this before 1.7
This commit is contained in:
parent
05963157c1
commit
eda0eadba2
|
@ -4,7 +4,7 @@ from switched_conv import BareConvSwitch, compute_attention_specificity, Attenti
|
||||||
import torch.nn.functional as F
|
import torch.nn.functional as F
|
||||||
import functools
|
import functools
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from models.archs.arch_util import ConvBnLelu, ConvGnSilu, ExpansionBlock, ExpansionBlock2, ConvGnLelu, MultiConvBlock
|
from models.archs.arch_util import ConvBnLelu, ConvGnSilu, ExpansionBlock, ExpansionBlock2, ConvGnLelu, MultiConvBlock, SiLU
|
||||||
from switched_conv_util import save_attention_to_image_rgb
|
from switched_conv_util import save_attention_to_image_rgb
|
||||||
import os
|
import os
|
||||||
from torch.utils.checkpoint import checkpoint
|
from torch.utils.checkpoint import checkpoint
|
||||||
|
@ -433,7 +433,7 @@ class ResBlock(nn.Module):
|
||||||
self.downsample = ConvGnSilu(nf, nf_out, kernel_size=1, stride=stride, bias=False, activation=False, norm=True)
|
self.downsample = ConvGnSilu(nf, nf_out, kernel_size=1, stride=stride, bias=False, activation=False, norm=True)
|
||||||
else:
|
else:
|
||||||
self.downsample = None
|
self.downsample = None
|
||||||
self.act = nn.SiLU(inplace=True)
|
self.act = SiLU(inplace=True)
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
identity = x
|
identity = x
|
||||||
|
|
Loading…
Reference in New Issue
Block a user