From afe6af88aff4b87d6aa099a3397165c246f4a64d Mon Sep 17 00:00:00 2001 From: James Betker Date: Thu, 8 Oct 2020 18:34:00 -0600 Subject: [PATCH] Fix attention print issue --- codes/models/archs/SwitchedResidualGenerator_arch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codes/models/archs/SwitchedResidualGenerator_arch.py b/codes/models/archs/SwitchedResidualGenerator_arch.py index 9c0e43fa..48586de1 100644 --- a/codes/models/archs/SwitchedResidualGenerator_arch.py +++ b/codes/models/archs/SwitchedResidualGenerator_arch.py @@ -562,7 +562,7 @@ class SwitchModelBase(nn.Module): prefix = "amap_%i_a%i_%%i.png" [save_attention_to_image_rgb(output_path, self.attentions[i], self.nf, prefix % (step, i), step, output_mag=False) for i in range(len(self.attentions))] - if self.lr: + if self.lr is not None: torchvision.utils.save_image(self.lr[:, :3], os.path.join(experiments_path, "attention_maps", "amap_%i_base_image.png" % (step,)))