File size: 1,731 Bytes
36fdbcf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
from src.models.build_sam3D import sam_model_registry3D
from src.dataset.dataloader import Dataset_promise, Dataloader_promise
import torchio as tio
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.data.distributed import DistributedSampler
import torch
def get_dataloader(args, split='', use_small=False):
transforms_list = [tio.ToCanonical(), tio.Resample(1), ]
if split == 'train':
transforms_list.append(tio.RandomFlip(axes=(0, 1, 2)))
transforms = tio.Compose(transforms_list)
dataset = Dataset_promise(
data=args.data,
data_dir=args.data_dir,
split=split,
transform=transforms,
image_size=args.image_size,
args=args,
)
batch_size = args.batch_size if split == 'train' else 1
if split == 'train':
train_sampler = None
shuffle = True
if args.ddp:
train_sampler = DistributedSampler(dataset)
shuffle = False
else:
train_sampler = None
shuffle = False
pin_memory = True
if split != 'train' and args.data == 'lits':
pin_memory = False
dataloader = Dataloader_promise(
dataset=dataset,
sampler=train_sampler,
batch_size=batch_size,
shuffle=shuffle,
num_workers=args.num_workers,
pin_memory=pin_memory,
)
return dataloader
def build_model(args, checkpoint=None):
sam_model = sam_model_registry3D[args.model_type](checkpoint=checkpoint, args=args).to(args.device)
if args.ddp:
sam_model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(sam_model)
sam_model = DDP(sam_model, device_ids=[args.rank], output_device=args.rank)
return sam_model
|