mirror of https://github.com/explosion/spaCy.git
6 lines
173 B
Python
6 lines
173 B
Python
|
def test_issue6755(en_tokenizer):
|
||
|
doc = en_tokenizer("This is a magnificent sentence.")
|
||
|
span = doc[:0]
|
||
|
assert span.text_with_ws == ""
|
||
|
assert span.text == ""
|