1997-10-06 18:54:25 +00:00
|
|
|
#! /usr/bin/env python
|
|
|
|
|
|
|
|
"""A variant on webchecker that creates a mirror copy of a remote site."""
|
|
|
|
|
1998-02-21 20:08:39 +00:00
|
|
|
__version__ = "$Revision$"
|
1997-10-06 18:54:25 +00:00
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import string
|
|
|
|
import urllib
|
|
|
|
import getopt
|
|
|
|
|
|
|
|
import webchecker
|
1998-02-21 20:08:39 +00:00
|
|
|
|
|
|
|
# Extract real version number if necessary
|
|
|
|
if __version__[0] == '$':
|
|
|
|
_v = string.split(__version__)
|
|
|
|
if len(_v) == 3:
|
1998-04-06 14:29:28 +00:00
|
|
|
__version__ = _v[1]
|
1997-10-06 18:54:25 +00:00
|
|
|
|
|
|
|
def main():
|
1998-02-21 20:08:39 +00:00
|
|
|
verbose = webchecker.VERBOSE
|
1997-10-06 18:54:25 +00:00
|
|
|
try:
|
1998-04-06 14:29:28 +00:00
|
|
|
opts, args = getopt.getopt(sys.argv[1:], "qv")
|
1997-10-06 18:54:25 +00:00
|
|
|
except getopt.error, msg:
|
1998-04-06 14:29:28 +00:00
|
|
|
print msg
|
|
|
|
print "usage:", sys.argv[0], "[-qv] ... [rooturl] ..."
|
|
|
|
return 2
|
1997-10-06 18:54:25 +00:00
|
|
|
for o, a in opts:
|
1998-04-06 14:29:28 +00:00
|
|
|
if o == "-q":
|
|
|
|
verbose = 0
|
|
|
|
if o == "-v":
|
|
|
|
verbose = verbose + 1
|
1998-02-21 20:08:39 +00:00
|
|
|
c = Sucker()
|
|
|
|
c.setflags(verbose=verbose)
|
1997-10-06 18:54:25 +00:00
|
|
|
c.urlopener.addheaders = [
|
1998-04-06 14:29:28 +00:00
|
|
|
('User-agent', 'websucker/%s' % __version__),
|
|
|
|
]
|
1997-10-06 18:54:25 +00:00
|
|
|
for arg in args:
|
1998-04-06 14:29:28 +00:00
|
|
|
print "Adding root", arg
|
|
|
|
c.addroot(arg)
|
1997-10-06 18:54:25 +00:00
|
|
|
print "Run..."
|
|
|
|
c.run()
|
|
|
|
|
|
|
|
class Sucker(webchecker.Checker):
|
|
|
|
|
1998-02-21 20:08:39 +00:00
|
|
|
checkext = 0
|
|
|
|
|
|
|
|
def readhtml(self, url):
|
1998-04-06 14:29:28 +00:00
|
|
|
text = None
|
|
|
|
path = self.savefilename(url)
|
|
|
|
try:
|
|
|
|
f = open(path, "rb")
|
|
|
|
except IOError:
|
|
|
|
f = self.openpage(url)
|
|
|
|
if f:
|
|
|
|
info = f.info()
|
|
|
|
nurl = f.geturl()
|
|
|
|
if nurl != url:
|
|
|
|
url = nurl
|
|
|
|
path = self.savefilename(url)
|
|
|
|
text = f.read()
|
|
|
|
f.close()
|
|
|
|
self.savefile(text, path)
|
|
|
|
if not self.checkforhtml(info, url):
|
|
|
|
text = None
|
|
|
|
else:
|
|
|
|
if self.checkforhtml({}, url):
|
|
|
|
text = f.read()
|
|
|
|
f.close()
|
|
|
|
return text, url
|
1997-10-06 18:54:25 +00:00
|
|
|
|
|
|
|
def savefile(self, text, path):
|
1998-04-06 14:29:28 +00:00
|
|
|
dir, base = os.path.split(path)
|
|
|
|
makedirs(dir)
|
|
|
|
f = open(path, "wb")
|
|
|
|
f.write(text)
|
|
|
|
f.close()
|
|
|
|
print "saved", path
|
1997-10-06 18:54:25 +00:00
|
|
|
|
|
|
|
def savefilename(self, url):
|
1998-04-06 14:29:28 +00:00
|
|
|
type, rest = urllib.splittype(url)
|
|
|
|
host, path = urllib.splithost(rest)
|
|
|
|
while path[:1] == "/": path = path[1:]
|
|
|
|
user, host = urllib.splituser(host)
|
|
|
|
host, port = urllib.splitnport(host)
|
|
|
|
host = string.lower(host)
|
|
|
|
path = os.path.join(host, path)
|
|
|
|
if path[-1] == "/": path = path + "index.html"
|
|
|
|
if os.sep != "/":
|
|
|
|
path = string.join(string.split(path, "/"), os.sep)
|
|
|
|
return path
|
1997-10-06 18:54:25 +00:00
|
|
|
|
|
|
|
def makedirs(dir):
|
|
|
|
if not dir or os.path.exists(dir):
|
1998-04-06 14:29:28 +00:00
|
|
|
return
|
1997-10-06 18:54:25 +00:00
|
|
|
head, tail = os.path.split(dir)
|
|
|
|
if not tail:
|
1998-04-06 14:29:28 +00:00
|
|
|
print "Huh? Don't know how to make dir", dir
|
|
|
|
return
|
1997-10-06 18:54:25 +00:00
|
|
|
makedirs(head)
|
|
|
|
os.mkdir(dir, 0777)
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main() or 0)
|