@@ -162,7 +162,6 @@ def train(output_directory, log_directory, checkpoint_path, warm_start, n_gpus,
if hparams.distributed_run:
init_distributed(hparams, n_gpus, rank, group_name)
torch.manual_seed(hparams.seed)
torch.cuda.manual_seed(hparams.seed)
The note is not visible to the blocked user.