1
0
mirror of https://github.com/malarinv/tacotron2 synced 2026-03-08 01:32:35 +00:00

Merge pull request #136 from GrzegorzKarchNV/master

Fixing concatenation error for fp16 distributed training
This commit is contained in:
Rafael Valle
2019-02-01 12:10:42 -08:00
committed by GitHub

View File

@@ -140,7 +140,7 @@ def apply_gradient_allreduce(module):
buckets = {}
for param in module.parameters():
if param.requires_grad and param.grad is not None:
tp = type(param.data)
tp = param.data.dtype
if tp not in buckets:
buckets[tp] = []
buckets[tp].append(param)