model.py: moving for better readibility

attention_full_mel
Rafael Valle 2018-05-20 12:22:06 -07:00
parent 977cb37cea
commit d5b64729d1
1 changed files with 1 additions and 1 deletions

View File

@ -351,7 +351,6 @@ class Decoder(nn.Module):
attention_weights:
"""
prenet_output = self.prenet(decoder_input)
cell_input = torch.cat((self.decoder_hidden, self.attention_context), -1)
self.attention_hidden, self.attention_cell = self.attention_rnn(
cell_input, (self.attention_hidden, self.attention_cell))
@ -364,6 +363,7 @@ class Decoder(nn.Module):
attention_weights_cat, self.mask)
self.attention_weights_cum += self.attention_weights
prenet_output = self.prenet(decoder_input)
decoder_input = torch.cat((prenet_output, self.attention_context), -1)
self.decoder_hidden, self.decoder_cell = self.decoder_rnn(
decoder_input, (self.decoder_hidden, self.decoder_cell))