Fix embedding layer implementation
Embedding dropout parameter is not used
This commit is contained in:
parent
4523bcabe8
commit
c0e4f7083a
|
@ -333,7 +333,7 @@ class Embedding(nn.Module):
|
||||||
if self.project:
|
if self.project:
|
||||||
self.projection = Feedforward(dimension, trained_dimension)
|
self.projection = Feedforward(dimension, trained_dimension)
|
||||||
dimension = trained_dimension
|
dimension = trained_dimension
|
||||||
self.dropout = nn.Dropout(0.2)
|
self.dropout = nn.Dropout(dropout)
|
||||||
self.dimension = dimension
|
self.dimension = dimension
|
||||||
|
|
||||||
def forward(self, x, lengths=None, device=-1):
|
def forward(self, x, lengths=None, device=-1):
|
||||||
|
|
Loading…
Reference in New Issue