From 43c4f92123dcd045a31e65f51b6d9238e7705b21 Mon Sep 17 00:00:00 2001 From: James Betker Date: Wed, 21 Oct 2020 22:37:23 -0600 Subject: [PATCH] Collapse progressive zoom candidates into the batch dimension This contributes a significant speedup to training this type of network since losses can operate on the entire prediction spectrum at once. --- codes/models/steps/progressive_zoom.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/codes/models/steps/progressive_zoom.py b/codes/models/steps/progressive_zoom.py index c516e0bb..3da24320 100644 --- a/codes/models/steps/progressive_zoom.py +++ b/codes/models/steps/progressive_zoom.py @@ -103,8 +103,10 @@ class ProgressiveGeneratorInjector(Injector): self.produce_progressive_visual_debugs(chain_input, chain_output, debug_index) debug_index += 1 results[self.hq_output_key] = results_hq + + # Results are concatenated into the batch dimension, to allow normal losses to be used against the output. for k, v in results.items(): - results[k] = torch.stack(v, dim=1) + results[k] = torch.cat(v, dim=0) return results