2019-08-23 13:42:47 +00:00
|
|
|
"""create dataset and dataloader"""
|
|
|
|
import logging
|
|
|
|
import torch
|
|
|
|
import torch.utils.data
|
|
|
|
|
|
|
|
|
|
|
|
def create_dataloader(dataset, dataset_opt, opt=None, sampler=None):
|
|
|
|
phase = dataset_opt['phase']
|
|
|
|
if phase == 'train':
|
|
|
|
if opt['dist']:
|
|
|
|
world_size = torch.distributed.get_world_size()
|
|
|
|
num_workers = dataset_opt['n_workers']
|
|
|
|
assert dataset_opt['batch_size'] % world_size == 0
|
|
|
|
batch_size = dataset_opt['batch_size'] // world_size
|
|
|
|
shuffle = False
|
|
|
|
else:
|
2020-09-11 14:44:06 +00:00
|
|
|
num_workers = dataset_opt['n_workers'] * len(opt['gpu_ids'])
|
2019-08-23 13:42:47 +00:00
|
|
|
batch_size = dataset_opt['batch_size']
|
|
|
|
shuffle = True
|
|
|
|
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=shuffle,
|
|
|
|
num_workers=num_workers, sampler=sampler, drop_last=True,
|
2020-09-04 21:30:46 +00:00
|
|
|
pin_memory=True)
|
2019-08-23 13:42:47 +00:00
|
|
|
else:
|
2020-05-04 14:48:25 +00:00
|
|
|
batch_size = dataset_opt['batch_size'] or 1
|
2020-05-19 15:37:58 +00:00
|
|
|
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=max(int(batch_size/2), 1),
|
2020-09-04 21:30:46 +00:00
|
|
|
pin_memory=True)
|
2019-08-23 13:42:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def create_dataset(dataset_opt):
|
|
|
|
mode = dataset_opt['mode']
|
|
|
|
# datasets for image restoration
|
|
|
|
if mode == 'LQ':
|
|
|
|
from data.LQ_dataset import LQDataset as D
|
|
|
|
elif mode == 'LQGT':
|
|
|
|
from data.LQGT_dataset import LQGTDataset as D
|
2020-04-24 05:59:09 +00:00
|
|
|
# datasets for image corruption
|
|
|
|
elif mode == 'downsample':
|
|
|
|
from data.Downsample_dataset import DownsampleDataset as D
|
2020-08-25 17:56:59 +00:00
|
|
|
elif mode == 'fullimage':
|
|
|
|
from data.full_image_dataset import FullImageDataset as D
|
2020-09-26 04:19:38 +00:00
|
|
|
elif mode == 'single_image_extensible':
|
|
|
|
from data.single_image_dataset import SingleImageDataset as D
|
2020-09-28 20:26:15 +00:00
|
|
|
elif mode == 'multi_frame_extensible':
|
|
|
|
from data.multi_frame_dataset import MultiFrameDataset as D
|
2020-09-11 14:44:06 +00:00
|
|
|
elif mode == 'combined':
|
|
|
|
from data.combined_dataset import CombinedDataset as D
|
2019-08-23 13:42:47 +00:00
|
|
|
else:
|
|
|
|
raise NotImplementedError('Dataset [{:s}] is not recognized.'.format(mode))
|
|
|
|
dataset = D(dataset_opt)
|
|
|
|
|
|
|
|
logger = logging.getLogger('base')
|
|
|
|
logger.info('Dataset [{:s} - {:s}] is created.'.format(dataset.__class__.__name__,
|
|
|
|
dataset_opt['name']))
|
|
|
|
return dataset
|