Cap test workers at 10

This commit is contained in:
James Betker 2020-05-13 09:20:45 -06:00
parent 037a5a3cdb
commit 585b05e66b

View File

@ -14,7 +14,7 @@ def create_dataloader(dataset, dataset_opt, opt=None, sampler=None):
batch_size = dataset_opt['batch_size'] // world_size
shuffle = False
else:
num_workers = dataset_opt['n_workers'] * len(opt['gpu_ids'])
num_workers = max(dataset_opt['n_workers'] * len(opt['gpu_ids']), 10)
batch_size = dataset_opt['batch_size']
shuffle = True
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=shuffle,