Consolidate reading flows from file, use in mitmweb.
This commit is contained in:
parent
4d01e22f26
commit
1b5f5021dc
|
@ -599,13 +599,20 @@ class ConsoleMaster(flow.FlowMaster):
|
|||
|
||||
self.view_flowlist()
|
||||
|
||||
self.server.start_slave(controller.Slave, controller.Channel(self.masterq, self.should_exit))
|
||||
self.server.start_slave(
|
||||
controller.Slave,
|
||||
controller.Channel(self.masterq, self.should_exit)
|
||||
)
|
||||
|
||||
if self.options.rfile:
|
||||
ret = self.load_flows(self.options.rfile)
|
||||
ret = self.load_flows_path(self.options.rfile)
|
||||
if ret and self.state.flow_count():
|
||||
self.add_event("File truncated or corrupted. Loaded as many flows as possible.","error")
|
||||
elif not self.state.flow_count():
|
||||
self.add_event(
|
||||
"File truncated or corrupted. "
|
||||
"Loaded as many flows as possible.",
|
||||
"error"
|
||||
)
|
||||
elif ret and not self.state.flow_count():
|
||||
self.shutdown()
|
||||
print >> sys.stderr, "Could not load file:", ret
|
||||
sys.exit(1)
|
||||
|
@ -700,23 +707,16 @@ class ConsoleMaster(flow.FlowMaster):
|
|||
def load_flows_callback(self, path):
|
||||
if not path:
|
||||
return
|
||||
ret = self.load_flows(path)
|
||||
ret = self.load_flows_path(path)
|
||||
return ret or "Flows loaded from %s"%path
|
||||
|
||||
def load_flows(self, path):
|
||||
def load_flows_path(self, path):
|
||||
self.state.last_saveload = path
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "rb")
|
||||
fr = flow.FlowReader(f)
|
||||
except IOError, v:
|
||||
return v.strerror
|
||||
reterr = None
|
||||
try:
|
||||
flow.FlowMaster.load_flows(self, fr)
|
||||
flow.FlowMaster.load_flows_file(self, path)
|
||||
except flow.FlowReadError, v:
|
||||
reterr = v.strerror
|
||||
f.close()
|
||||
reterr = str(v)
|
||||
if self.flow_list_walker:
|
||||
self.sync_list_view()
|
||||
return reterr
|
||||
|
|
|
@ -134,16 +134,11 @@ class DumpMaster(flow.FlowMaster):
|
|||
raise DumpError(err)
|
||||
|
||||
if options.rfile:
|
||||
path = os.path.expanduser(options.rfile)
|
||||
try:
|
||||
f = file(path, "rb")
|
||||
freader = flow.FlowReader(f)
|
||||
except IOError, v:
|
||||
raise DumpError(v.strerror)
|
||||
try:
|
||||
self.load_flows(freader)
|
||||
self.load_flows_file(options.rfile)
|
||||
except flow.FlowReadError, v:
|
||||
self.add_event("Flow file corrupted. Stopped loading.", "error")
|
||||
self.add_event("Flow file corrupted.", "error")
|
||||
raise DumpError(v)
|
||||
|
||||
if self.o.app:
|
||||
self.start_app(self.o.app_host, self.o.app_port)
|
||||
|
|
|
@ -6,6 +6,7 @@ from abc import abstractmethod, ABCMeta
|
|||
import hashlib
|
||||
import Cookie
|
||||
import cookielib
|
||||
import os
|
||||
import re
|
||||
from netlib import odict, wsgi
|
||||
import netlib.http
|
||||
|
@ -785,8 +786,20 @@ class FlowMaster(controller.Master):
|
|||
"""
|
||||
Load flows from a FlowReader object.
|
||||
"""
|
||||
cnt = 0
|
||||
for i in fr.stream():
|
||||
cnt += 1
|
||||
self.load_flow(i)
|
||||
return cnt
|
||||
|
||||
def load_flows_file(self, path):
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "rb")
|
||||
freader = FlowReader(f)
|
||||
except IOError, v:
|
||||
raise FlowReadError(v.strerror)
|
||||
return self.load_flows(freader)
|
||||
|
||||
def process_new_request(self, f):
|
||||
if self.stickycookie_state:
|
||||
|
@ -961,7 +974,9 @@ class FlowReader:
|
|||
data = tnetstring.load(self.fo)
|
||||
if tuple(data["version"][:2]) != version.IVERSION[:2]:
|
||||
v = ".".join(str(i) for i in data["version"])
|
||||
raise FlowReadError("Incompatible serialized data version: %s" % v)
|
||||
raise FlowReadError(
|
||||
"Incompatible serialized data version: %s" % v
|
||||
)
|
||||
off = self.fo.tell()
|
||||
yield handle.protocols[data["type"]]["flow"].from_state(data)
|
||||
except ValueError, v:
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import, print_function
|
|||
import collections
|
||||
import tornado.ioloop
|
||||
import tornado.httpserver
|
||||
import os
|
||||
from .. import controller, flow
|
||||
from . import app
|
||||
|
||||
|
@ -124,6 +125,14 @@ class WebMaster(flow.FlowMaster):
|
|||
self.options = options
|
||||
super(WebMaster, self).__init__(server, WebState())
|
||||
self.app = app.Application(self, self.options.wdebug)
|
||||
if options.rfile:
|
||||
try:
|
||||
print(self.load_flows_file(options.rfile))
|
||||
except flow.FlowReadError, v:
|
||||
self.add_event(
|
||||
"Could not read flow file: %s"%v,
|
||||
"error"
|
||||
)
|
||||
|
||||
def tick(self):
|
||||
flow.FlowMaster.tick(self, self.masterq, timeout=0)
|
||||
|
|
|
@ -18,9 +18,12 @@ class RequestHandler(tornado.web.RequestHandler):
|
|||
self.set_header("X-Frame-Options", "DENY")
|
||||
self.add_header("X-XSS-Protection", "1; mode=block")
|
||||
self.add_header("X-Content-Type-Options", "nosniff")
|
||||
self.add_header("Content-Security-Policy", "default-src 'self'; "
|
||||
"connect-src 'self' ws://* ; "
|
||||
"style-src 'self' 'unsafe-inline'")
|
||||
self.add_header(
|
||||
"Content-Security-Policy",
|
||||
"default-src 'self'; "
|
||||
"connect-src 'self' ws://* ; "
|
||||
"style-src 'self' 'unsafe-inline'"
|
||||
)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
|
|
|
@ -99,15 +99,23 @@ class TestDumpMaster:
|
|||
with tutils.tmpdir() as t:
|
||||
p = os.path.join(t, "read")
|
||||
self._flowfile(p)
|
||||
assert "GET" in self._dummy_cycle(0, None, "", flow_detail=1, rfile=p)
|
||||
assert "GET" in self._dummy_cycle(
|
||||
0,
|
||||
None,
|
||||
"",
|
||||
flow_detail=1,
|
||||
rfile=p
|
||||
)
|
||||
|
||||
tutils.raises(
|
||||
dump.DumpError, self._dummy_cycle,
|
||||
0, None, "", verbosity=1, rfile="/nonexistent"
|
||||
)
|
||||
tutils.raises(
|
||||
dump.DumpError, self._dummy_cycle,
|
||||
0, None, "", verbosity=1, rfile="test_dump.py"
|
||||
)
|
||||
|
||||
# We now just ignore errors
|
||||
self._dummy_cycle(0, None, "", verbosity=1, rfile=tutils.test_data.path("test_dump.py"))
|
||||
|
||||
def test_options(self):
|
||||
o = dump.Options(verbosity = 2)
|
||||
|
|
Loading…
Reference in New Issue