mirror of https://github.com/explosion/spaCy.git
15 lines
287 B
Python
15 lines
287 B
Python
|
from __future__ import unicode_literals
|
||
|
from ...en import English
|
||
|
|
||
|
import pytest
|
||
|
|
||
|
|
||
|
@pytest.fixture
|
||
|
def en_tokenizer():
|
||
|
return English.Defaults.create_tokenizer()
|
||
|
|
||
|
|
||
|
def test_big_ellipsis(en_tokenizer):
|
||
|
tokens = en_tokenizer(u'$45...............Asking')
|
||
|
assert len(tokens) > 2
|