Turns out, can't do that

This commit is contained in:
James Betker 2020-08-25 17:18:52 -06:00
parent 935a735327
commit f85f1e21db

View File

@ -291,9 +291,6 @@ class SRGANModel(BaseModel):
self.netD_grad = DistributedDataParallel(self.netD_grad, self.netD_grad = DistributedDataParallel(self.netD_grad,
device_ids=[torch.cuda.current_device()], device_ids=[torch.cuda.current_device()],
find_unused_parameters=True) find_unused_parameters=True)
self.get_grad_nopadding = DistributedDataParallel(self.get_grad_nopadding,
device_ids=[torch.cuda.current_device()],
find_unused_parameters=True)
else: else:
self.netD = DataParallel(self.netD) self.netD = DataParallel(self.netD)
if self.spsr_enabled: if self.spsr_enabled: