Unset LayerNorm backwards compat hack

This commit is contained in:
Matthew Honnibal 2017-10-03 20:47:10 -05:00
parent 73ac0aa0b5
commit af75b74208
1 changed files with 2 additions and 2 deletions

View File

@ -31,8 +31,8 @@ import numpy
import io
# TODO: Unset this once we don't want to support models previous models.
import thinc.neural._classes.layernorm
thinc.neural._classes.layernorm.set_compat_six_eight(True)
#import thinc.neural._classes.layernorm
#thinc.neural._classes.layernorm.set_compat_six_eight(True)
VECTORS_KEY = 'spacy_pretrained_vectors'