move code to netlib

This commit is contained in:
Thomas Kriechbaumer 2015-08-09 22:15:58 +02:00
parent a10c31c698
commit 891fa50e55
6 changed files with 9 additions and 122 deletions

View File

@ -9,20 +9,21 @@
# `--ignore :80$` as an additional parameter.
# (see http://mitmproxy.org/doc/features/passthrough.html)
from libmproxy.protocol.http import HTTPRequest
import netlib.http.semantics
from libmproxy.protocol.tcp import TCPHandler
from libmproxy.protocol import KILL
from libmproxy.script import concurrent
def start(context, argv):
HTTPRequest._headers_to_strip_off.remove("Connection")
HTTPRequest._headers_to_strip_off.remove("Upgrade")
netlib.http.semantics.Request._headers_to_strip_off.remove("Connection")
netlib.http.semantics.Request._headers_to_strip_off.remove("Upgrade")
def done(context):
HTTPRequest._headers_to_strip_off.append("Connection")
HTTPRequest._headers_to_strip_off.append("Upgrade")
netlib.http.semantics.Request._headers_to_strip_off.append("Connection")
netlib.http.semantics.Request._headers_to_strip_off.append("Upgrade")
@concurrent

View File

@ -76,7 +76,7 @@ class ViewAuto:
def __call__(self, hdrs, content, limit):
ctype = hdrs.get_first("content-type")
if ctype:
ct = utils.parse_content_type(ctype) if ctype else None
ct = netlib.utils.parse_content_type(ctype) if ctype else None
ct = "%s/%s" % (ct[0], ct[1])
if ct in content_types_map:
return content_types_map[ct][0](hdrs, content, limit)
@ -241,7 +241,7 @@ class ViewMultipart:
content_types = ["multipart/form-data"]
def __call__(self, hdrs, content, limit):
v = utils.multipartdecode(hdrs, content)
v = netlib.utils.multipartdecode(hdrs, content)
if v:
r = [
urwid.Text(("highlight", "Form data:\n")),

View File

@ -219,14 +219,6 @@ class HTTPRequest(MessageMixin, semantics.Request):
is_replay=bool
)
# This list is adopted legacy code.
# We probably don't need to strip off keep-alive.
_headers_to_strip_off = ['Proxy-Connection',
'Keep-Alive',
'Connection',
'Transfer-Encoding',
'Upgrade']
@classmethod
def from_state(cls, state):
f = cls(
@ -360,11 +352,6 @@ class HTTPResponse(MessageMixin, semantics.Response):
msg=str
)
_headers_to_strip_off = ['Proxy-Connection',
'Alternate-Protocol',
'Alt-Svc']
@classmethod
def from_state(cls, state):
f = cls(None, None, None, None, None)

View File

@ -61,34 +61,6 @@ def pretty_json(s):
return json.dumps(p, sort_keys=True, indent=4).split("\n")
def multipartdecode(hdrs, content):
"""
Takes a multipart boundary encoded string and returns list of (key, value) tuples.
"""
v = hdrs.get_first("content-type")
if v:
v = parse_content_type(v)
if not v:
return []
boundary = v[2].get("boundary")
if not boundary:
return []
rx = re.compile(r'\bname="([^"]+)"')
r = []
for i in content.split("--" + boundary):
parts = i.splitlines()
if len(parts) > 1 and parts[0][0:2] != "--":
match = rx.search(parts[1])
if match:
key = match.group(1)
value = "".join(parts[3 + parts[2:].index(""):])
r.append((key, value))
return r
return []
def pretty_duration(secs):
formatters = [
(100, "{:.0f}s"),
@ -154,34 +126,6 @@ class LRUCache:
return ret
def parse_content_type(c):
"""
A simple parser for content-type values. Returns a (type, subtype,
parameters) tuple, where type and subtype are strings, and parameters
is a dict. If the string could not be parsed, return None.
E.g. the following string:
text/html; charset=UTF-8
Returns:
("text", "html", {"charset": "UTF-8"})
"""
parts = c.split(";", 1)
ts = parts[0].split("/", 1)
if len(ts) != 2:
return None
d = {}
if len(parts) == 2:
for i in parts[1].split(";"):
clause = i.split("=", 1)
if len(clause) == 2:
d[clause[0].strip()] = clause[1].strip()
return ts[0].lower(), ts[1].lower(), d
def clean_hanging_newline(t):
"""
Many editors will silently add a newline to the final line of a

View File

@ -11,27 +11,12 @@ from netlib.http.semantics import CONTENT_MISSING
import tutils
import tservers
def mock_protocol(data='', chunked=False):
def mock_protocol(data=''):
rfile = cStringIO.StringIO(data)
wfile = cStringIO.StringIO()
return http1.HTTP1Protocol(rfile=rfile, wfile=wfile)
# TODO: move test to netlib http1 protocol
# def test_stripped_chunked_encoding_no_content():
# """
# https://github.com/mitmproxy/mitmproxy/issues/186
# """
# r = tutils.tresp(content="")
# r.headers["Transfer-Encoding"] = ["chunked"]
# assert "Content-Length" in r._assemble_headers()
#
# r = tutils.treq(content="")
# r.headers["Transfer-Encoding"] = ["chunked"]
# assert "Content-Length" in r._assemble_headers()
#
class TestHTTPResponse:
def test_read_from_stringio(self):
s = "HTTP/1.1 200 OK\r\n" \
@ -58,8 +43,6 @@ class TestHTTPResponse:
)
class TestHTTPFlow(object):
def test_repr(self):
f = tutils.tflow(resp=True, err=True)

View File

@ -44,25 +44,6 @@ def test_pretty_json():
assert not utils.pretty_json("moo")
def test_multipartdecode():
boundary = 'somefancyboundary'
headers = odict.ODict(
[('content-type', ('multipart/form-data; boundary=%s' % boundary))])
content = "--{0}\n" \
"Content-Disposition: form-data; name=\"field1\"\n\n" \
"value1\n" \
"--{0}\n" \
"Content-Disposition: form-data; name=\"field2\"\n\n" \
"value2\n" \
"--{0}--".format(boundary)
form = utils.multipartdecode(headers, content)
assert len(form) == 2
assert form[0] == ('field1', 'value1')
assert form[1] == ('field2', 'value2')
def test_pretty_duration():
assert utils.pretty_duration(0.00001) == "0ms"
assert utils.pretty_duration(0.0001) == "0ms"
@ -121,14 +102,5 @@ def test_parse_size():
tutils.raises(ValueError, utils.parse_size, "ak")
def test_parse_content_type():
p = utils.parse_content_type
assert p("text/html") == ("text", "html", {})
assert p("text") is None
v = p("text/html; charset=UTF-8")
assert v == ('text', 'html', {'charset': 'UTF-8'})
def test_safe_subn():
assert utils.safe_subn("foo", u"bar", "\xc2foo")