diff --git a/codes/models/ExtensibleTrainer.py b/codes/models/ExtensibleTrainer.py index 9d4339f8..86d7e1fd 100644 --- a/codes/models/ExtensibleTrainer.py +++ b/codes/models/ExtensibleTrainer.py @@ -300,7 +300,8 @@ class ExtensibleTrainer(BaseModel): for name, net in netdict.items(): load_path = self.opt['path']['pretrain_model_%s' % (name,)] if load_path is not None: - logger.info('Loading model for [%s]' % (load_path)) + if self.rank <= 0: + logger.info('Loading model for [%s]' % (load_path,)) self.load_network(load_path, net, self.opt['path']['strict_load']) def save(self, iter_step): diff --git a/codes/train.py b/codes/train.py index af709a3f..0c732664 100644 --- a/codes/train.py +++ b/codes/train.py @@ -294,7 +294,7 @@ def main(): # log logger.info('# Validation # PSNR: {:.4e} Fea: {:.4e}'.format(avg_psnr, avg_fea_loss)) # tensorboard logger - if opt['use_tb_logger'] and 'debug' not in opt['name']: + if opt['use_tb_logger'] and 'debug' not in opt['name'] and rank <= 0: #tb_logger.add_scalar('val_psnr', avg_psnr, current_step) tb_logger.add_scalar('val_fea', avg_fea_loss, current_step)