mirror of
https://github.com/malarinv/tacotron2
synced 2026-03-08 01:32:35 +00:00
train.py: shuffling at every epoch
This commit is contained in:
2
train.py
2
train.py
@@ -59,7 +59,7 @@ def prepare_dataloaders(hparams):
|
|||||||
train_sampler = DistributedSampler(trainset) \
|
train_sampler = DistributedSampler(trainset) \
|
||||||
if hparams.distributed_run else None
|
if hparams.distributed_run else None
|
||||||
|
|
||||||
train_loader = DataLoader(trainset, num_workers=1, shuffle=False,
|
train_loader = DataLoader(trainset, num_workers=1, shuffle=True,
|
||||||
sampler=train_sampler,
|
sampler=train_sampler,
|
||||||
batch_size=hparams.batch_size, pin_memory=False,
|
batch_size=hparams.batch_size, pin_memory=False,
|
||||||
drop_last=True, collate_fn=collate_fn)
|
drop_last=True, collate_fn=collate_fn)
|
||||||
|
|||||||
Reference in New Issue
Block a user