Fix embedding layer implementation

Embedding dropout parameter is not used
This commit is contained in:
Victoria X Lin 2018-09-11 23:06:58 -07:00 committed by Bryan McCann
parent 4523bcabe8
commit c0e4f7083a
1 changed files with 1 additions and 1 deletions

View File

@ -333,7 +333,7 @@ class Embedding(nn.Module):
if self.project:
self.projection = Feedforward(dimension, trained_dimension)
dimension = trained_dimension
self.dropout = nn.Dropout(0.2)
self.dropout = nn.Dropout(dropout)
self.dimension = dimension
def forward(self, x, lengths=None, device=-1):