mirror of https://github.com/explosion/spaCy.git
15 lines
361 B
Python
15 lines
361 B
Python
from __future__ import unicode_literals
|
|
import pytest
|
|
|
|
from ...displacy import render
|
|
from ..util import get_doc
|
|
|
|
def test_issue2361(de_tokenizer):
|
|
tokens = de_tokenizer('< > & " ')
|
|
html = render(get_doc(tokens.vocab, [t.text for t in tokens]))
|
|
|
|
assert '<' in html
|
|
assert '>' in html
|
|
assert '&' in html
|
|
assert '"' in html
|