From 4328c2f713bfeb84ae6adc0d838f2ec71361fe73 Mon Sep 17 00:00:00 2001 From: James Betker Date: Sat, 19 Dec 2020 08:28:03 -0700 Subject: [PATCH] Change default ReLU slope to .2 BREAKS COMPATIBILITY This conforms my ConvGnLelu implementation with the generally accepted negative_slope=.2. I have no idea where I got .1. This will break backwards compatibility with some older models but will likely improve their performance when freshly trained. I did some auditing to find what these models might be, and I am not actively using any of them, so probably OK. --- codes/models/arch_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codes/models/arch_util.py b/codes/models/arch_util.py index 7748be6c..73a3b63d 100644 --- a/codes/models/arch_util.py +++ b/codes/models/arch_util.py @@ -366,7 +366,7 @@ class ConvGnLelu(nn.Module): else: self.gn = None if activation: - self.lelu = nn.LeakyReLU(negative_slope=.1) + self.lelu = nn.LeakyReLU(negative_slope=.2) else: self.lelu = None