Update test_tokenizer.py

This commit is contained in:
Henning Peters 2016-02-10 19:24:37 +01:00
parent 82c57f21a4
commit 9d8966a2c0
1 changed files with 7 additions and 7 deletions

View File

@ -8,13 +8,13 @@ import cloudpickle
import tempfile import tempfile
@pytest.mark.xfail # @pytest.mark.xfail
def test_pickle(en_tokenizer): # def test_pickle(en_tokenizer):
file_ = io.BytesIO() # file_ = io.BytesIO()
cloudpickle.dump(en_tokenizer, file_) # cloudpickle.dump(en_tokenizer, file_)
file_.seek(0) # file_.seek(0)
loaded = pickle.load(file_) # loaded = pickle.load(file_)
assert loaded is not None # assert loaded is not None
def test_no_word(en_tokenizer): def test_no_word(en_tokenizer):