From 1c065c41b4cff89cf6e0944ac403a3a25bb3194b Mon Sep 17 00:00:00 2001 From: James Betker Date: Wed, 11 Nov 2020 17:24:27 -0700 Subject: [PATCH] Revert "..." This reverts commit 4b92191880a3dddcef1da720b182b06f0cac2ace. --- codes/models/archs/discriminator_vgg_arch.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/codes/models/archs/discriminator_vgg_arch.py b/codes/models/archs/discriminator_vgg_arch.py index 3570523a..731f2ffc 100644 --- a/codes/models/archs/discriminator_vgg_arch.py +++ b/codes/models/archs/discriminator_vgg_arch.py @@ -2,7 +2,7 @@ import torch import torch.nn as nn from models.archs.RRDBNet_arch import RRDB, RRDBWithBypass -from models.archs.arch_util import ConvBnLelu, ConvGnLelu, ExpansionBlock, ConvGnSilu +from models.archs.arch_util import ConvBnLelu, ConvGnLelu, ExpansionBlock, ConvGnSilu, ResidualBlockGN import torch.nn.functional as F from models.archs.SwitchedResidualGenerator_arch import gather_2d from models.archs.pyramid_arch import Pyramid @@ -666,10 +666,15 @@ class PyramidDiscriminator(nn.Module): def __init__(self, in_nc, nf, block=ConvGnLelu): super(PyramidDiscriminator, self).__init__() self.initial_conv = block(in_nc, nf, kernel_size=3, stride=2, bias=True, norm=False, activation=True) - self.top_proc = nn.Sequential(*[ConvGnLelu(nf, nf, kernel_size=3, stride=2, bias=False, norm=True, activation=True)]) + self.top_proc = nn.Sequential(*[ResidualBlockGN(nf), + ResidualBlockGN(nf), + ResidualBlockGN(nf)]) self.pyramid = Pyramid(nf, depth=3, processing_convs_per_layer=2, processing_at_point=2, scale_per_level=1.5, norm=True, return_outlevels=False) - self.bottom_proc = nn.Sequential(*[ + self.bottom_proc = nn.Sequential(*[ResidualBlockGN(nf), + ResidualBlockGN(nf), + ResidualBlockGN(nf), + ResidualBlockGN(nf), ConvGnLelu(nf, nf // 2, kernel_size=1, activation=True, norm=True, bias=True), ConvGnLelu(nf // 2, nf // 4, kernel_size=1, activation=True, norm=True, bias=True), ConvGnLelu(nf // 4, 1, activation=False, norm=False, bias=True)])