From c4543ce1242860b8c695836efc0eee692bf707a3 Mon Sep 17 00:00:00 2001 From: James Betker Date: Thu, 15 Oct 2020 17:20:42 -0600 Subject: [PATCH] Set post_transform_block to None where applicable --- codes/models/archs/SwitchedResidualGenerator_arch.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/codes/models/archs/SwitchedResidualGenerator_arch.py b/codes/models/archs/SwitchedResidualGenerator_arch.py index 28376bcf..4885a35a 100644 --- a/codes/models/archs/SwitchedResidualGenerator_arch.py +++ b/codes/models/archs/SwitchedResidualGenerator_arch.py @@ -99,8 +99,7 @@ class ConfigurableSwitchComputer(nn.Module): # And the switch itself, including learned scalars self.switch = BareConvSwitch(initial_temperature=init_temp, attention_norm=AttentionNorm(transform_count, accumulator_size=anorm_multiplier * transform_count) if attention_norm else None) self.switch_scale = nn.Parameter(torch.full((1,), float(1))) - if post_transform_block is not None: - self.post_transform_block = post_transform_block + self.post_transform_block = post_transform_block if post_switch_conv: self.post_switch_conv = ConvBnLelu(base_filters, base_filters, norm=False, bias=True) # The post_switch_conv gets a low scale initially. The network can decide to magnify it (or not)