forked from mrq/DL-Art-School
Turn BN off in SRG1
This wont work well but just testing if GAN performance comes back
This commit is contained in:
parent
0ee39d419b
commit
726e946e79
|
@ -58,7 +58,7 @@ class HalvingProcessingBlock(nn.Module):
|
||||||
def __init__(self, filters):
|
def __init__(self, filters):
|
||||||
super(HalvingProcessingBlock, self).__init__()
|
super(HalvingProcessingBlock, self).__init__()
|
||||||
self.bnconv1 = ConvBnLelu(filters, filters * 2, stride=2, bn=False)
|
self.bnconv1 = ConvBnLelu(filters, filters * 2, stride=2, bn=False)
|
||||||
self.bnconv2 = ConvBnLelu(filters * 2, filters * 2, bn=True)
|
self.bnconv2 = ConvBnLelu(filters * 2, filters * 2, bn=False)
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
x = self.bnconv1(x)
|
x = self.bnconv1(x)
|
||||||
|
@ -71,7 +71,7 @@ def create_sequential_growing_processing_block(filters_init, filter_growth, num_
|
||||||
convs = []
|
convs = []
|
||||||
current_filters = filters_init
|
current_filters = filters_init
|
||||||
for i in range(num_convs):
|
for i in range(num_convs):
|
||||||
convs.append(ConvBnLelu(current_filters, current_filters + filter_growth, bn=True))
|
convs.append(ConvBnLelu(current_filters, current_filters + filter_growth, bn=False))
|
||||||
current_filters += filter_growth
|
current_filters += filter_growth
|
||||||
return nn.Sequential(*convs), current_filters
|
return nn.Sequential(*convs), current_filters
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user