Use LayerNorm and SELU in Tok2Vec

This commit is contained in:
Matthew Honnibal 2017-08-06 18:33:18 +02:00
parent 78498a072d
commit 3ed203de25
1 changed files with 6 additions and 5 deletions

View File

@ -10,6 +10,7 @@ import cytoolz
from thinc.neural._classes.convolution import ExtractWindow
from thinc.neural._classes.static_vectors import StaticVectors
from thinc.neural._classes.batchnorm import BatchNorm
from thinc.neural._classes.layernorm import LayerNorm as LN
from thinc.neural._classes.resnet import Residual
from thinc.neural import ReLu
from thinc.neural._classes.selu import SELU
@ -220,11 +221,11 @@ def Tok2Vec(width, embed_size, preprocess=None):
with_flatten(
asarray(Model.ops, dtype='uint64')
>> uniqued(embed, column=5)
>> Maxout(width, width*4, pieces=3)
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3))
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3))
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3))
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3)),
>> LN(Maxout(width, width*4, pieces=3))
>> Residual(ExtractWindow(nW=1) >> SELU(width, width*3))
>> Residual(ExtractWindow(nW=1) >> SELU(width, width*3))
>> Residual(ExtractWindow(nW=1) >> SELU(width, width*3))
>> Residual(ExtractWindow(nW=1) >> SELU(width, width*3)),
pad=4)
)
if preprocess not in (False, None):