Fix attention print issue

This commit is contained in:
James Betker 2020-10-08 18:34:00 -06:00
parent 4c85ee51a4
commit afe6af88af

View File

@ -562,7 +562,7 @@ class SwitchModelBase(nn.Module):
prefix = "amap_%i_a%i_%%i.png"
[save_attention_to_image_rgb(output_path, self.attentions[i], self.nf, prefix % (step, i), step,
output_mag=False) for i in range(len(self.attentions))]
if self.lr:
if self.lr is not None:
torchvision.utils.save_image(self.lr[:, :3], os.path.join(experiments_path, "attention_maps",
"amap_%i_base_image.png" % (step,)))