Fix attention print issue
This commit is contained in:
parent
4c85ee51a4
commit
afe6af88af
|
@ -562,7 +562,7 @@ class SwitchModelBase(nn.Module):
|
||||||
prefix = "amap_%i_a%i_%%i.png"
|
prefix = "amap_%i_a%i_%%i.png"
|
||||||
[save_attention_to_image_rgb(output_path, self.attentions[i], self.nf, prefix % (step, i), step,
|
[save_attention_to_image_rgb(output_path, self.attentions[i], self.nf, prefix % (step, i), step,
|
||||||
output_mag=False) for i in range(len(self.attentions))]
|
output_mag=False) for i in range(len(self.attentions))]
|
||||||
if self.lr:
|
if self.lr is not None:
|
||||||
torchvision.utils.save_image(self.lr[:, :3], os.path.join(experiments_path, "attention_maps",
|
torchvision.utils.save_image(self.lr[:, :3], os.path.join(experiments_path, "attention_maps",
|
||||||
"amap_%i_base_image.png" % (step,)))
|
"amap_%i_base_image.png" % (step,)))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user