diff --git a/Lib/ftplib.py b/Lib/ftplib.py index 2ccfbcbffc7..21b9e5295c2 100644 --- a/Lib/ftplib.py +++ b/Lib/ftplib.py @@ -36,7 +36,6 @@ import os import sys -import string # Import SOCKS module if it exists, else standard socket module socket try: @@ -266,7 +265,7 @@ def sendeprt(self, host, port): if af == 0: raise error_proto, 'unsupported address family' fields = ['', `af`, host, `port`, ''] - cmd = 'EPRT ' + string.joinfields(fields, '|') + cmd = 'EPRT ' + '|'.join(fields) return self.voidcmd(cmd) def makeport(self): @@ -585,18 +584,18 @@ def parse229(resp, peer): if resp[:3] <> '229': raise error_reply, resp - left = string.find(resp, '(') + left = resp.find('(') if left < 0: raise error_proto, resp - right = string.find(resp, ')', left + 1) + right = resp.find(')', left + 1) if right < 0: raise error_proto, resp # should contain '(|||port|)' if resp[left + 1] <> resp[right - 1]: raise error_proto, resp - parts = string.split(resp[left + 1:right], resp[left+1]) + parts = resp[left+1].split(resp[left + 1:right]) if len(parts) <> 5: raise error_proto, resp host = peer[0] - port = string.atoi(parts[3]) + port = int(parts[3]) return host, port diff --git a/Lib/markupbase.py b/Lib/markupbase.py index 57d3ae4b3ce..ae1986958e2 100644 --- a/Lib/markupbase.py +++ b/Lib/markupbase.py @@ -38,10 +38,10 @@ def updatepos(self, i, j): if i >= j: return j rawdata = self.rawdata - nlines = string.count(rawdata, "\n", i, j) + nlines = rawdata.count("\n", i, j) if nlines: self.lineno = self.lineno + nlines - pos = string.rindex(rawdata, "\n", i, j) # Should not fail + pos = rawdata.rindex("\n", i, j) # Should not fail self.offset = j-(pos+1) else: self.offset = self.offset + j-i @@ -176,7 +176,7 @@ def _parse_doctype_element(self, i, declstartpos): # style content model; just skip until '>' rawdata = self.rawdata if '>' in rawdata[j:]: - return string.find(rawdata, ">", j) + 1 + return rawdata.find(">", j) + 1 return -1 # Internal -- scan past 1: prefix = prefix + "s" - return prefix + " " + string.join(firsts, ', ') + return prefix + " " + ', '.join(firsts) def process_tokens(tokens): INDENT = tokenize.INDENT