spsr7 adjustments

This commit is contained in:
James Betker 2020-09-29 17:19:59 -06:00
parent dc8f3b24de
commit 9fee1cec71

View File

@ -528,16 +528,16 @@ class Spsr7(nn.Module):
transform_count=self.transformation_counts, init_temp=init_temperature,
add_scalable_noise_to_transforms=False, feed_transforms_into_multiplexer=True)
self.sw1_out = nn.Sequential(ConvGnLelu(nf, nf, kernel_size=3, norm=False, activation=True),
ConvGnLelu(nf, 3, kernel_size=1, norm=False, activation=False, bias=True))
ConvGnLelu(nf, 3, kernel_size=1, norm=False, activation=False))
self.sw2 = ConfigurableSwitchComputer(transformation_filters, multiplx_fn,
pre_transform_block=None, transform_block=transform_fn,
attention_norm=True,
transform_count=self.transformation_counts, init_temp=init_temperature,
add_scalable_noise_to_transforms=False, feed_transforms_into_multiplexer=True)
self.feature_lr_conv = ConvGnLelu(nf, nf, kernel_size=3, norm=True, activation=False)
self.feature_lr_conv = ConvGnLelu(nf, nf, kernel_size=3, norm=True, activation=False, bias=False)
self.feature_lr_conv2 = ConvGnLelu(nf, nf, kernel_size=3, norm=False, activation=False, bias=False)
self.sw2_out = nn.Sequential(ConvGnLelu(nf, nf, kernel_size=3, norm=False, activation=True),
ConvGnLelu(nf, 3, kernel_size=1, norm=False, activation=False, bias=True))
ConvGnLelu(nf, 3, kernel_size=1, norm=False, activation=False))
# Grad branch. Note - groupnorm on this branch is REALLY bad. Avoid it like the plague.
self.get_g_nopadding = ImageGradientNoPadding()
@ -587,6 +587,7 @@ class Spsr7(nn.Module):
x2 = x1
x2, a2 = self.sw2(x2, True, identity=x1, att_in=(x2, ref_embedding))
x2 = self.feature_lr_conv2(self.feature_lr_conv(x2))
s2out = self.sw2_out(x2)
x_grad = self.grad_conv(x_grad)