2017-01-10 14:53:15 +00:00
|
|
|
# coding: utf-8
|
2017-01-03 17:17:57 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
2017-01-10 14:53:15 +00:00
|
|
|
|
2017-01-12 22:40:02 +00:00
|
|
|
URLS_BASIC = [
|
2017-01-10 14:54:25 +00:00
|
|
|
"http://www.nytimes.com/2016/04/20/us/politics/new-york-primary-preview.html?hp&action=click&pgtype=Homepage&clickSource=story-heading&module=a-lede-package-region®ion=top-news&WT.nav=top-news&_r=0",
|
|
|
|
"www.red-stars.com",
|
|
|
|
"mailto:foo.bar@baz.com",
|
2017-01-12 22:40:02 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
URLS_FULL = URLS_BASIC + [
|
|
|
|
"mailto:foo-bar@baz-co.com",
|
2019-10-05 11:00:09 +00:00
|
|
|
"mailto:foo-bar@baz-co.com?subject=hi",
|
2017-01-12 22:40:02 +00:00
|
|
|
"www.google.com?q=google",
|
2018-11-27 00:09:36 +00:00
|
|
|
"http://foo.com/blah_(wikipedia)#cite-1",
|
2017-01-03 17:17:57 +00:00
|
|
|
]
|
|
|
|
|
2017-03-04 22:13:11 +00:00
|
|
|
# URL SHOULD_MATCH and SHOULD_NOT_MATCH patterns courtesy of https://mathiasbynens.be/demo/url-regex
|
|
|
|
URLS_SHOULD_MATCH = [
|
|
|
|
"http://foo.com/blah_blah",
|
2020-01-06 13:58:30 +00:00
|
|
|
"http://BlahBlah.com/Blah_Blah",
|
2017-03-04 22:13:11 +00:00
|
|
|
"http://foo.com/blah_blah/",
|
|
|
|
"http://www.example.com/wpstyle/?p=364",
|
|
|
|
"https://www.example.com/foo/?bar=baz&inga=42&quux",
|
|
|
|
"http://userid:password@example.com:8080",
|
|
|
|
"http://userid:password@example.com:8080/",
|
|
|
|
"http://userid@example.com",
|
|
|
|
"http://userid@example.com/",
|
|
|
|
"http://userid@example.com:8080",
|
|
|
|
"http://userid@example.com:8080/",
|
|
|
|
"http://userid:password@example.com",
|
|
|
|
"http://userid:password@example.com/",
|
|
|
|
"http://142.42.1.1/",
|
|
|
|
"http://142.42.1.1:8080/",
|
|
|
|
"http://foo.com/blah_(wikipedia)#cite-1",
|
|
|
|
"http://foo.com/blah_(wikipedia)_blah#cite-1",
|
|
|
|
"http://foo.com/unicode_(✪)_in_parens",
|
|
|
|
"http://foo.com/(something)?after=parens",
|
|
|
|
"http://code.google.com/events/#&product=browser",
|
|
|
|
"http://j.mp",
|
|
|
|
"ftp://foo.bar/baz",
|
|
|
|
"http://foo.bar/?q=Test%20URL-encoded%20stuff",
|
|
|
|
"http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
|
|
|
|
"http://1337.net",
|
|
|
|
"http://a.b-c.de",
|
|
|
|
"http://223.255.255.254",
|
2018-11-27 00:09:36 +00:00
|
|
|
"http://a.b--c.de/", # this is a legit domain name see: https://gist.github.com/dperini/729294 comment on 9/9/2014
|
2019-10-05 11:00:09 +00:00
|
|
|
"ssh://login@server.com:12345/repository.git",
|
|
|
|
"svn+ssh://user@ssh.yourdomain.com/path",
|
2019-10-18 09:27:38 +00:00
|
|
|
pytest.param(
|
|
|
|
"chrome://extensions/?id=mhjfbmdgcfjbbpaeojofohoefgiehjai",
|
|
|
|
marks=pytest.mark.xfail(),
|
|
|
|
),
|
|
|
|
pytest.param(
|
|
|
|
"chrome-extension://mhjfbmdgcfjbbpaeojofohoefgiehjai", marks=pytest.mark.xfail()
|
|
|
|
),
|
2019-11-16 19:20:53 +00:00
|
|
|
"http://foo.com/blah_blah_(wikipedia)",
|
|
|
|
"http://foo.com/blah_blah_(wikipedia)_(again)",
|
2020-01-06 13:58:30 +00:00
|
|
|
"http://www.foo.co.uk",
|
|
|
|
"http://www.foo.co.uk/",
|
|
|
|
"http://www.foo.co.uk/blah/blah",
|
|
|
|
"http://⌘.ws",
|
|
|
|
"http://⌘.ws/",
|
|
|
|
"http://☺.damowmow.com/",
|
|
|
|
"http://✪df.ws/123",
|
|
|
|
"http://➡.ws/䨹",
|
|
|
|
"http://مثال.إختبار",
|
2020-01-08 15:50:19 +00:00
|
|
|
"http://例子.测试",
|
2020-01-06 13:58:30 +00:00
|
|
|
"http://उदाहरण.परीक्षा",
|
2017-03-04 22:13:11 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
URLS_SHOULD_NOT_MATCH = [
|
|
|
|
"http://",
|
|
|
|
"http://.",
|
|
|
|
"http://..",
|
|
|
|
"http://../",
|
|
|
|
"http://?",
|
|
|
|
"http://??",
|
|
|
|
"http://??/",
|
|
|
|
"http://#",
|
|
|
|
"http://##",
|
|
|
|
"http://##/",
|
|
|
|
"http://foo.bar?q=Spaces should be encoded",
|
|
|
|
"//",
|
|
|
|
"//a",
|
|
|
|
"///a",
|
|
|
|
"///",
|
|
|
|
"http:///a",
|
|
|
|
"rdar://1234",
|
|
|
|
"h://test",
|
|
|
|
"http:// shouldfail.com",
|
|
|
|
":// should fail",
|
|
|
|
"http://foo.bar/foo(bar)baz quux",
|
|
|
|
"http://-error-.invalid/",
|
|
|
|
"http://a.b-.co",
|
|
|
|
"http://0.0.0.0",
|
|
|
|
"http://10.1.1.0",
|
|
|
|
"http://10.1.1.255",
|
|
|
|
"http://224.1.1.1",
|
|
|
|
"http://123.123.123",
|
|
|
|
"http://3628126748",
|
|
|
|
"http://.www.foo.bar/",
|
|
|
|
"http://.www.foo.bar./",
|
|
|
|
"http://10.1.1.1",
|
2017-03-10 13:22:50 +00:00
|
|
|
"NASDAQ:GOOG",
|
2019-02-01 07:05:22 +00:00
|
|
|
"http://-a.b.co",
|
2018-11-27 00:09:36 +00:00
|
|
|
pytest.param("foo.com", marks=pytest.mark.xfail()),
|
2019-11-16 19:20:53 +00:00
|
|
|
"http://1.1.1.1.1",
|
|
|
|
"http://www.foo.bar./",
|
2017-03-04 22:13:11 +00:00
|
|
|
]
|
|
|
|
|
2017-01-10 14:56:35 +00:00
|
|
|
|
2017-01-03 17:17:57 +00:00
|
|
|
# Punctuation we want to check is split away before the URL
|
2018-11-27 00:09:36 +00:00
|
|
|
PREFIXES = ["(", '"', ">"]
|
2017-01-03 17:17:57 +00:00
|
|
|
|
2017-01-10 14:56:35 +00:00
|
|
|
|
2017-01-03 17:17:57 +00:00
|
|
|
# Punctuation we want to check is split away after the URL
|
2018-11-27 00:09:36 +00:00
|
|
|
SUFFIXES = ['"', ":", ">"]
|
|
|
|
|
2017-01-03 17:17:57 +00:00
|
|
|
|
2017-03-04 22:13:11 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_SHOULD_MATCH)
|
|
|
|
def test_should_match(en_tokenizer, url):
|
2019-10-01 19:36:04 +00:00
|
|
|
assert en_tokenizer.token_match(url) is not None
|
2017-03-04 22:13:11 +00:00
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
|
2017-03-04 22:13:11 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_SHOULD_NOT_MATCH)
|
|
|
|
def test_should_not_match(en_tokenizer, url):
|
2019-10-01 19:36:04 +00:00
|
|
|
assert en_tokenizer.token_match(url) is None
|
2017-01-10 14:56:35 +00:00
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_BASIC)
|
2017-01-10 14:56:35 +00:00
|
|
|
def test_tokenizer_handles_simple_url(tokenizer, url):
|
|
|
|
tokens = tokenizer(url)
|
2017-01-03 17:17:57 +00:00
|
|
|
assert len(tokens) == 1
|
2017-01-10 14:56:35 +00:00
|
|
|
assert tokens[0].text == url
|
2017-01-03 17:17:57 +00:00
|
|
|
|
|
|
|
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_BASIC)
|
|
|
|
def test_tokenizer_handles_simple_surround_url(tokenizer, url):
|
|
|
|
tokens = tokenizer("(" + url + ")")
|
|
|
|
assert len(tokens) == 3
|
|
|
|
assert tokens[0].text == "("
|
|
|
|
assert tokens[1].text == url
|
|
|
|
assert tokens[2].text == ")"
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.slow
|
2017-01-03 17:17:57 +00:00
|
|
|
@pytest.mark.parametrize("prefix", PREFIXES)
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_FULL)
|
2017-01-10 14:56:35 +00:00
|
|
|
def test_tokenizer_handles_prefixed_url(tokenizer, prefix, url):
|
2017-01-09 13:10:40 +00:00
|
|
|
tokens = tokenizer(prefix + url)
|
2017-01-10 14:57:00 +00:00
|
|
|
assert len(tokens) == 2
|
2017-01-03 17:17:57 +00:00
|
|
|
assert tokens[0].text == prefix
|
|
|
|
assert tokens[1].text == url
|
2017-01-10 14:56:35 +00:00
|
|
|
|
|
|
|
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.slow
|
2017-01-03 17:17:57 +00:00
|
|
|
@pytest.mark.parametrize("suffix", SUFFIXES)
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_FULL)
|
2017-01-10 14:56:35 +00:00
|
|
|
def test_tokenizer_handles_suffixed_url(tokenizer, url, suffix):
|
2017-01-09 13:10:40 +00:00
|
|
|
tokens = tokenizer(url + suffix)
|
2017-01-10 14:57:00 +00:00
|
|
|
assert len(tokens) == 2
|
2017-01-03 17:17:57 +00:00
|
|
|
assert tokens[0].text == url
|
|
|
|
assert tokens[1].text == suffix
|
2017-01-10 14:56:35 +00:00
|
|
|
|
|
|
|
|
2017-01-10 14:57:35 +00:00
|
|
|
@pytest.mark.slow
|
2017-01-03 17:17:57 +00:00
|
|
|
@pytest.mark.parametrize("prefix", PREFIXES)
|
|
|
|
@pytest.mark.parametrize("suffix", SUFFIXES)
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_FULL)
|
2017-01-10 14:56:35 +00:00
|
|
|
def test_tokenizer_handles_surround_url(tokenizer, prefix, suffix, url):
|
2017-01-09 13:10:40 +00:00
|
|
|
tokens = tokenizer(prefix + url + suffix)
|
2017-01-10 14:57:00 +00:00
|
|
|
assert len(tokens) == 3
|
2017-01-03 17:17:57 +00:00
|
|
|
assert tokens[0].text == prefix
|
|
|
|
assert tokens[1].text == url
|
|
|
|
assert tokens[2].text == suffix
|
2017-01-10 14:56:35 +00:00
|
|
|
|
|
|
|
|
2017-01-10 14:57:35 +00:00
|
|
|
@pytest.mark.slow
|
2017-01-03 17:17:57 +00:00
|
|
|
@pytest.mark.parametrize("prefix1", PREFIXES)
|
|
|
|
@pytest.mark.parametrize("prefix2", PREFIXES)
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_FULL)
|
2017-01-10 14:56:35 +00:00
|
|
|
def test_tokenizer_handles_two_prefix_url(tokenizer, prefix1, prefix2, url):
|
2017-01-09 13:10:40 +00:00
|
|
|
tokens = tokenizer(prefix1 + prefix2 + url)
|
2017-01-10 14:57:00 +00:00
|
|
|
assert len(tokens) == 3
|
2017-01-03 17:17:57 +00:00
|
|
|
assert tokens[0].text == prefix1
|
|
|
|
assert tokens[1].text == prefix2
|
|
|
|
assert tokens[2].text == url
|
2017-01-10 14:56:35 +00:00
|
|
|
|
|
|
|
|
2017-01-10 14:57:35 +00:00
|
|
|
@pytest.mark.slow
|
2017-01-03 17:17:57 +00:00
|
|
|
@pytest.mark.parametrize("suffix1", SUFFIXES)
|
|
|
|
@pytest.mark.parametrize("suffix2", SUFFIXES)
|
2017-01-12 22:40:02 +00:00
|
|
|
@pytest.mark.parametrize("url", URLS_FULL)
|
2017-03-04 22:13:11 +00:00
|
|
|
def test_tokenizer_handles_two_suffix_url(tokenizer, suffix1, suffix2, url):
|
2017-01-09 13:10:40 +00:00
|
|
|
tokens = tokenizer(url + suffix1 + suffix2)
|
2017-01-10 14:57:00 +00:00
|
|
|
assert len(tokens) == 3
|
2017-01-03 17:17:57 +00:00
|
|
|
assert tokens[0].text == url
|
|
|
|
assert tokens[1].text == suffix1
|
|
|
|
assert tokens[2].text == suffix2
|