1
0
mirror of https://github.com/malarinv/tacotron2 synced 2026-03-07 17:32:33 +00:00

train.py: shuffling at every epoch

This commit is contained in:
rafaelvalle
2019-03-15 17:49:27 -07:00
parent bff304f432
commit f37998c59d

View File

@@ -59,7 +59,7 @@ def prepare_dataloaders(hparams):
train_sampler = DistributedSampler(trainset) \
if hparams.distributed_run else None
train_loader = DataLoader(trainset, num_workers=1, shuffle=False,
train_loader = DataLoader(trainset, num_workers=1, shuffle=True,
sampler=train_sampler,
batch_size=hparams.batch_size, pin_memory=False,
drop_last=True, collate_fn=collate_fn)