2000-03-10 23:23:21 +00:00
|
|
|
|
""" Test script for the Unicode implementation.
|
|
|
|
|
|
|
|
|
|
Written by Marc-Andre Lemburg (mal@lemburg.com).
|
|
|
|
|
|
|
|
|
|
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
|
|
|
|
|
|
2001-01-17 19:11:13 +00:00
|
|
|
|
"""#"
|
Get rid of the superstitious "~" in dict hashing's "i = (~hash) & mask".
The comment following used to say:
/* We use ~hash instead of hash, as degenerate hash functions, such
as for ints <sigh>, can have lots of leading zeros. It's not
really a performance risk, but better safe than sorry.
12-Dec-00 tim: so ~hash produces lots of leading ones instead --
what's the gain? */
That is, there was never a good reason for doing it. And to the contrary,
as explained on Python-Dev last December, it tended to make the *sum*
(i + incr) & mask (which is the first table index examined in case of
collison) the same "too often" across distinct hashes.
Changing to the simpler "i = hash & mask" reduced the number of string-dict
collisions (== # number of times we go around the lookup for-loop) from about
6 million to 5 million during a full run of the test suite (these are
approximate because the test suite does some random stuff from run to run).
The number of collisions in non-string dicts also decreased, but not as
dramatically.
Note that this may, for a given dict, change the order (wrt previous
releases) of entries exposed by .keys(), .values() and .items(). A number
of std tests suffered bogus failures as a result. For dicts keyed by
small ints, or (less so) by characters, the order is much more likely to be
in increasing order of key now; e.g.,
>>> d = {}
>>> for i in range(10):
... d[i] = i
...
>>> d
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}
>>>
Unfortunately. people may latch on to that in small examples and draw a
bogus conclusion.
test_support.py
Moved test_extcall's sortdict() into test_support, made it stronger,
and imported sortdict into other std tests that needed it.
test_unicode.py
Excluced cp875 from the "roundtrip over range(128)" test, because
cp875 doesn't have a well-defined inverse for unicode("?", "cp875").
See Python-Dev for excruciating details.
Cookie.py
Chaged various output functions to sort dicts before building
strings from them.
test_extcall
Fiddled the expected-result file. This remains sensitive to native
dict ordering, because, e.g., if there are multiple errors in a
keyword-arg dict (and test_extcall sets up many cases like that), the
specific error Python complains about first depends on native dict
ordering.
2001-05-13 00:19:31 +00:00
|
|
|
|
from test_support import verify, verbose, TestFailed
|
2000-03-10 23:23:21 +00:00
|
|
|
|
import sys
|
|
|
|
|
|
|
|
|
|
def test(method, input, output, *args):
|
|
|
|
|
if verbose:
|
2000-11-29 12:13:59 +00:00
|
|
|
|
print '%s.%s%s =? %s... ' % (repr(input), method, args, repr(output)),
|
2000-03-10 23:23:21 +00:00
|
|
|
|
try:
|
|
|
|
|
f = getattr(input, method)
|
|
|
|
|
value = apply(f, args)
|
|
|
|
|
except:
|
|
|
|
|
value = sys.exc_type
|
2000-04-28 20:39:58 +00:00
|
|
|
|
exc = sys.exc_info()[:2]
|
2000-03-10 23:23:21 +00:00
|
|
|
|
else:
|
|
|
|
|
exc = None
|
2000-11-29 12:13:59 +00:00
|
|
|
|
if value != output or type(value) is not type(output):
|
2000-03-10 23:23:21 +00:00
|
|
|
|
if verbose:
|
|
|
|
|
print 'no'
|
|
|
|
|
print '*',f, `input`, `output`, `value`
|
|
|
|
|
if exc:
|
2000-04-28 20:39:58 +00:00
|
|
|
|
print ' value == %s: %s' % (exc)
|
2000-03-10 23:23:21 +00:00
|
|
|
|
else:
|
|
|
|
|
if verbose:
|
|
|
|
|
print 'yes'
|
|
|
|
|
|
|
|
|
|
test('capitalize', u' hello ', u' hello ')
|
|
|
|
|
test('capitalize', u'hello ', u'Hello ')
|
2001-01-29 11:14:16 +00:00
|
|
|
|
test('capitalize', u'aaaa', u'Aaaa')
|
|
|
|
|
test('capitalize', u'AaAa', u'Aaaa')
|
2000-03-10 23:23:21 +00:00
|
|
|
|
|
2001-01-16 11:54:12 +00:00
|
|
|
|
test('count', u'aaa', 3, u'a')
|
|
|
|
|
test('count', u'aaa', 0, u'b')
|
|
|
|
|
test('count', 'aaa', 3, u'a')
|
|
|
|
|
test('count', 'aaa', 0, u'b')
|
|
|
|
|
test('count', u'aaa', 3, 'a')
|
|
|
|
|
test('count', u'aaa', 0, 'b')
|
|
|
|
|
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('title', u' hello ', u' Hello ')
|
|
|
|
|
test('title', u'hello ', u'Hello ')
|
|
|
|
|
test('title', u"fOrMaT thIs aS titLe String", u'Format This As Title String')
|
|
|
|
|
test('title', u"fOrMaT,thIs-aS*titLe;String", u'Format,This-As*Title;String')
|
|
|
|
|
test('title', u"getInt", u'Getint')
|
|
|
|
|
|
|
|
|
|
test('find', u'abcdefghiabc', 0, u'abc')
|
|
|
|
|
test('find', u'abcdefghiabc', 9, u'abc', 1)
|
|
|
|
|
test('find', u'abcdefghiabc', -1, u'def', 4)
|
|
|
|
|
|
|
|
|
|
test('rfind', u'abcdefghiabc', 9, u'abc')
|
|
|
|
|
|
|
|
|
|
test('lower', u'HeLLo', u'hello')
|
|
|
|
|
test('lower', u'hello', u'hello')
|
|
|
|
|
|
|
|
|
|
test('upper', u'HeLLo', u'HELLO')
|
|
|
|
|
test('upper', u'HELLO', u'HELLO')
|
|
|
|
|
|
|
|
|
|
if 0:
|
|
|
|
|
transtable = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
|
|
|
|
|
|
|
|
|
|
test('maketrans', u'abc', transtable, u'xyz')
|
|
|
|
|
test('maketrans', u'abc', ValueError, u'xyzq')
|
|
|
|
|
|
|
|
|
|
test('split', u'this is the split function',
|
|
|
|
|
[u'this', u'is', u'the', u'split', u'function'])
|
|
|
|
|
test('split', u'a|b|c|d', [u'a', u'b', u'c', u'd'], u'|')
|
|
|
|
|
test('split', u'a|b|c|d', [u'a', u'b', u'c|d'], u'|', 2)
|
|
|
|
|
test('split', u'a b c d', [u'a', u'b c d'], None, 1)
|
|
|
|
|
test('split', u'a b c d', [u'a', u'b', u'c d'], None, 2)
|
|
|
|
|
test('split', u'a b c d', [u'a', u'b', u'c', u'd'], None, 3)
|
|
|
|
|
test('split', u'a b c d', [u'a', u'b', u'c', u'd'], None, 4)
|
|
|
|
|
test('split', u'a b c d', [u'a b c d'], None, 0)
|
|
|
|
|
test('split', u'a b c d', [u'a', u'b', u'c d'], None, 2)
|
|
|
|
|
test('split', u'a b c d ', [u'a', u'b', u'c', u'd'])
|
2000-12-19 02:22:31 +00:00
|
|
|
|
test('split', u'a//b//c//d', [u'a', u'b', u'c', u'd'], u'//')
|
|
|
|
|
test('split', u'a//b//c//d', [u'a', u'b', u'c', u'd'], '//')
|
|
|
|
|
test('split', 'a//b//c//d', [u'a', u'b', u'c', u'd'], u'//')
|
|
|
|
|
test('split', u'endcase test', [u'endcase ', u''], u'test')
|
|
|
|
|
test('split', u'endcase test', [u'endcase ', u''], 'test')
|
|
|
|
|
test('split', 'endcase test', [u'endcase ', u''], u'test')
|
|
|
|
|
|
2000-03-10 23:23:21 +00:00
|
|
|
|
|
|
|
|
|
# join now works with any sequence type
|
|
|
|
|
class Sequence:
|
2000-11-29 12:13:59 +00:00
|
|
|
|
def __init__(self, seq): self.seq = seq
|
2000-03-10 23:23:21 +00:00
|
|
|
|
def __len__(self): return len(self.seq)
|
|
|
|
|
def __getitem__(self, i): return self.seq[i]
|
|
|
|
|
|
|
|
|
|
test('join', u' ', u'a b c d', [u'a', u'b', u'c', u'd'])
|
2000-11-29 12:13:59 +00:00
|
|
|
|
test('join', u' ', u'a b c d', ['a', 'b', u'c', u'd'])
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('join', u'', u'abcd', (u'a', u'b', u'c', u'd'))
|
2000-11-29 12:13:59 +00:00
|
|
|
|
test('join', u' ', u'w x y z', Sequence('wxyz'))
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('join', u' ', TypeError, 7)
|
2000-11-29 12:13:59 +00:00
|
|
|
|
test('join', u' ', TypeError, Sequence([7, u'hello', 123L]))
|
|
|
|
|
test('join', ' ', u'a b c d', [u'a', u'b', u'c', u'd'])
|
|
|
|
|
test('join', ' ', u'a b c d', ['a', 'b', u'c', u'd'])
|
|
|
|
|
test('join', '', u'abcd', (u'a', u'b', u'c', u'd'))
|
|
|
|
|
test('join', ' ', u'w x y z', Sequence(u'wxyz'))
|
|
|
|
|
test('join', ' ', TypeError, 7)
|
2000-03-10 23:23:21 +00:00
|
|
|
|
|
|
|
|
|
result = u''
|
|
|
|
|
for i in range(10):
|
|
|
|
|
if i > 0:
|
|
|
|
|
result = result + u':'
|
|
|
|
|
result = result + u'x'*10
|
|
|
|
|
test('join', u':', result, [u'x' * 10] * 10)
|
|
|
|
|
test('join', u':', result, (u'x' * 10,) * 10)
|
|
|
|
|
|
|
|
|
|
test('strip', u' hello ', u'hello')
|
|
|
|
|
test('lstrip', u' hello ', u'hello ')
|
|
|
|
|
test('rstrip', u' hello ', u' hello')
|
|
|
|
|
test('strip', u'hello', u'hello')
|
|
|
|
|
|
|
|
|
|
test('swapcase', u'HeLLo cOmpUteRs', u'hEllO CoMPuTErS')
|
|
|
|
|
|
|
|
|
|
if 0:
|
|
|
|
|
test('translate', u'xyzabcdef', u'xyzxyz', transtable, u'def')
|
|
|
|
|
|
|
|
|
|
table = string.maketrans('a', u'A')
|
|
|
|
|
test('translate', u'abc', u'Abc', table)
|
|
|
|
|
test('translate', u'xyz', u'xyz', table)
|
|
|
|
|
|
|
|
|
|
test('replace', u'one!two!three!', u'one@two!three!', u'!', u'@', 1)
|
2000-03-20 16:36:48 +00:00
|
|
|
|
test('replace', u'one!two!three!', u'onetwothree', '!', '')
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('replace', u'one!two!three!', u'one@two@three!', u'!', u'@', 2)
|
|
|
|
|
test('replace', u'one!two!three!', u'one@two@three@', u'!', u'@', 3)
|
|
|
|
|
test('replace', u'one!two!three!', u'one@two@three@', u'!', u'@', 4)
|
|
|
|
|
test('replace', u'one!two!three!', u'one!two!three!', u'!', u'@', 0)
|
|
|
|
|
test('replace', u'one!two!three!', u'one@two@three@', u'!', u'@')
|
|
|
|
|
test('replace', u'one!two!three!', u'one!two!three!', u'x', u'@')
|
|
|
|
|
test('replace', u'one!two!three!', u'one!two!three!', u'x', u'@', 2)
|
|
|
|
|
|
|
|
|
|
test('startswith', u'hello', 1, u'he')
|
|
|
|
|
test('startswith', u'hello', 1, u'hello')
|
|
|
|
|
test('startswith', u'hello', 0, u'hello world')
|
|
|
|
|
test('startswith', u'hello', 1, u'')
|
|
|
|
|
test('startswith', u'hello', 0, u'ello')
|
|
|
|
|
test('startswith', u'hello', 1, u'ello', 1)
|
|
|
|
|
test('startswith', u'hello', 1, u'o', 4)
|
|
|
|
|
test('startswith', u'hello', 0, u'o', 5)
|
|
|
|
|
test('startswith', u'hello', 1, u'', 5)
|
|
|
|
|
test('startswith', u'hello', 0, u'lo', 6)
|
|
|
|
|
test('startswith', u'helloworld', 1, u'lowo', 3)
|
|
|
|
|
test('startswith', u'helloworld', 1, u'lowo', 3, 7)
|
|
|
|
|
test('startswith', u'helloworld', 0, u'lowo', 3, 6)
|
|
|
|
|
|
|
|
|
|
test('endswith', u'hello', 1, u'lo')
|
|
|
|
|
test('endswith', u'hello', 0, u'he')
|
|
|
|
|
test('endswith', u'hello', 1, u'')
|
|
|
|
|
test('endswith', u'hello', 0, u'hello world')
|
|
|
|
|
test('endswith', u'helloworld', 0, u'worl')
|
|
|
|
|
test('endswith', u'helloworld', 1, u'worl', 3, 9)
|
|
|
|
|
test('endswith', u'helloworld', 1, u'world', 3, 12)
|
|
|
|
|
test('endswith', u'helloworld', 1, u'lowo', 1, 7)
|
|
|
|
|
test('endswith', u'helloworld', 1, u'lowo', 2, 7)
|
|
|
|
|
test('endswith', u'helloworld', 1, u'lowo', 3, 7)
|
|
|
|
|
test('endswith', u'helloworld', 0, u'lowo', 4, 7)
|
|
|
|
|
test('endswith', u'helloworld', 0, u'lowo', 3, 8)
|
|
|
|
|
test('endswith', u'ab', 0, u'ab', 0, 1)
|
|
|
|
|
test('endswith', u'ab', 0, u'ab', 0, 0)
|
|
|
|
|
|
|
|
|
|
test('expandtabs', u'abc\rab\tdef\ng\thi', u'abc\rab def\ng hi')
|
|
|
|
|
test('expandtabs', u'abc\rab\tdef\ng\thi', u'abc\rab def\ng hi', 8)
|
|
|
|
|
test('expandtabs', u'abc\rab\tdef\ng\thi', u'abc\rab def\ng hi', 4)
|
|
|
|
|
test('expandtabs', u'abc\r\nab\tdef\ng\thi', u'abc\r\nab def\ng hi', 4)
|
|
|
|
|
|
|
|
|
|
if 0:
|
|
|
|
|
test('capwords', u'abc def ghi', u'Abc Def Ghi')
|
|
|
|
|
test('capwords', u'abc\tdef\nghi', u'Abc Def Ghi')
|
|
|
|
|
test('capwords', u'abc\t def \nghi', u'Abc Def Ghi')
|
|
|
|
|
|
|
|
|
|
# Comparisons:
|
|
|
|
|
print 'Testing Unicode comparisons...',
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'abc' == 'abc')
|
|
|
|
|
verify('abc' == u'abc')
|
|
|
|
|
verify(u'abc' == u'abc')
|
|
|
|
|
verify(u'abcd' > 'abc')
|
|
|
|
|
verify('abcd' > u'abc')
|
|
|
|
|
verify(u'abcd' > u'abc')
|
|
|
|
|
verify(u'abc' < 'abcd')
|
|
|
|
|
verify('abc' < u'abcd')
|
|
|
|
|
verify(u'abc' < u'abcd')
|
2000-03-10 23:23:21 +00:00
|
|
|
|
print 'done.'
|
|
|
|
|
|
2000-08-08 08:04:29 +00:00
|
|
|
|
if 0:
|
|
|
|
|
# Move these tests to a Unicode collation module test...
|
|
|
|
|
|
|
|
|
|
print 'Testing UTF-16 code point order comparisons...',
|
|
|
|
|
#No surrogates, no fixup required.
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'\u0061' < u'\u20ac')
|
2000-08-08 08:04:29 +00:00
|
|
|
|
# Non surrogate below surrogate value, no fixup required
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'\u0061' < u'\ud800\udc02')
|
2000-08-08 08:04:29 +00:00
|
|
|
|
|
|
|
|
|
# Non surrogate above surrogate value, fixup required
|
|
|
|
|
def test_lecmp(s, s2):
|
2001-01-18 02:22:22 +00:00
|
|
|
|
verify(s < s2 , "comparison failed on %s < %s" % (s, s2))
|
2000-08-08 08:04:29 +00:00
|
|
|
|
|
|
|
|
|
def test_fixup(s):
|
2000-10-23 17:22:08 +00:00
|
|
|
|
s2 = u'\ud800\udc01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud900\udc01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\uda00\udc01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\udb00\udc01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud800\udd01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud900\udd01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\uda00\udd01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\udb00\udd01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud800\ude01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud900\ude01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\uda00\ude01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\udb00\ude01'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud800\udfff'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\ud900\udfff'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\uda00\udfff'
|
|
|
|
|
test_lecmp(s, s2)
|
|
|
|
|
s2 = u'\udb00\udfff'
|
|
|
|
|
test_lecmp(s, s2)
|
2000-08-08 08:04:29 +00:00
|
|
|
|
|
|
|
|
|
test_fixup(u'\ue000')
|
|
|
|
|
test_fixup(u'\uff61')
|
|
|
|
|
|
|
|
|
|
# Surrogates on both sides, no fixup required
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'\ud800\udc02' < u'\ud84d\udc56')
|
2000-08-08 08:04:29 +00:00
|
|
|
|
print 'done.'
|
2000-07-07 17:48:52 +00:00
|
|
|
|
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('ljust', u'abc', u'abc ', 10)
|
|
|
|
|
test('rjust', u'abc', u' abc', 10)
|
|
|
|
|
test('center', u'abc', u' abc ', 10)
|
|
|
|
|
test('ljust', u'abc', u'abc ', 6)
|
|
|
|
|
test('rjust', u'abc', u' abc', 6)
|
|
|
|
|
test('center', u'abc', u' abc ', 6)
|
|
|
|
|
test('ljust', u'abc', u'abc', 2)
|
|
|
|
|
test('rjust', u'abc', u'abc', 2)
|
|
|
|
|
test('center', u'abc', u'abc', 2)
|
|
|
|
|
|
|
|
|
|
test('islower', u'a', 1)
|
|
|
|
|
test('islower', u'A', 0)
|
|
|
|
|
test('islower', u'\n', 0)
|
|
|
|
|
test('islower', u'\u1FFc', 0)
|
|
|
|
|
test('islower', u'abc', 1)
|
|
|
|
|
test('islower', u'aBc', 0)
|
|
|
|
|
test('islower', u'abc\n', 1)
|
|
|
|
|
|
|
|
|
|
test('isupper', u'a', 0)
|
|
|
|
|
test('isupper', u'A', 1)
|
|
|
|
|
test('isupper', u'\n', 0)
|
2001-02-10 14:09:31 +00:00
|
|
|
|
if sys.platform[:4] != 'java':
|
|
|
|
|
test('isupper', u'\u1FFc', 0)
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('isupper', u'ABC', 1)
|
|
|
|
|
test('isupper', u'AbC', 0)
|
|
|
|
|
test('isupper', u'ABC\n', 1)
|
|
|
|
|
|
|
|
|
|
test('istitle', u'a', 0)
|
|
|
|
|
test('istitle', u'A', 1)
|
|
|
|
|
test('istitle', u'\n', 0)
|
|
|
|
|
test('istitle', u'\u1FFc', 1)
|
|
|
|
|
test('istitle', u'A Titlecased Line', 1)
|
|
|
|
|
test('istitle', u'A\nTitlecased Line', 1)
|
|
|
|
|
test('istitle', u'A Titlecased, Line', 1)
|
|
|
|
|
test('istitle', u'Greek \u1FFcitlecases ...', 1)
|
|
|
|
|
test('istitle', u'Not a capitalized String', 0)
|
|
|
|
|
test('istitle', u'Not\ta Titlecase String', 0)
|
|
|
|
|
test('istitle', u'Not--a Titlecase String', 0)
|
|
|
|
|
|
2000-07-05 09:46:40 +00:00
|
|
|
|
test('isalpha', u'a', 1)
|
|
|
|
|
test('isalpha', u'A', 1)
|
|
|
|
|
test('isalpha', u'\n', 0)
|
|
|
|
|
test('isalpha', u'\u1FFc', 1)
|
|
|
|
|
test('isalpha', u'abc', 1)
|
|
|
|
|
test('isalpha', u'aBc123', 0)
|
|
|
|
|
test('isalpha', u'abc\n', 0)
|
|
|
|
|
|
|
|
|
|
test('isalnum', u'a', 1)
|
|
|
|
|
test('isalnum', u'A', 1)
|
|
|
|
|
test('isalnum', u'\n', 0)
|
|
|
|
|
test('isalnum', u'123abc456', 1)
|
|
|
|
|
test('isalnum', u'a1b3c', 1)
|
|
|
|
|
test('isalnum', u'aBc000 ', 0)
|
|
|
|
|
test('isalnum', u'abc\n', 0)
|
|
|
|
|
|
2000-03-10 23:23:21 +00:00
|
|
|
|
test('splitlines', u"abc\ndef\n\rghi", [u'abc', u'def', u'', u'ghi'])
|
|
|
|
|
test('splitlines', u"abc\ndef\n\r\nghi", [u'abc', u'def', u'', u'ghi'])
|
|
|
|
|
test('splitlines', u"abc\ndef\r\nghi", [u'abc', u'def', u'ghi'])
|
|
|
|
|
test('splitlines', u"abc\ndef\r\nghi\n", [u'abc', u'def', u'ghi'])
|
|
|
|
|
test('splitlines', u"abc\ndef\r\nghi\n\r", [u'abc', u'def', u'ghi', u''])
|
|
|
|
|
test('splitlines', u"\nabc\ndef\r\nghi\n\r", [u'', u'abc', u'def', u'ghi', u''])
|
2000-04-11 15:37:02 +00:00
|
|
|
|
test('splitlines', u"\nabc\ndef\r\nghi\n\r", [u'\n', u'abc\n', u'def\r\n', u'ghi\n', u'\r'], 1)
|
2000-03-10 23:23:21 +00:00
|
|
|
|
|
|
|
|
|
test('translate', u"abababc", u'bbbc', {ord('a'):None})
|
|
|
|
|
test('translate', u"abababc", u'iiic', {ord('a'):None, ord('b'):ord('i')})
|
|
|
|
|
test('translate', u"abababc", u'iiix', {ord('a'):None, ord('b'):ord('i'), ord('c'):u'x'})
|
|
|
|
|
|
2000-03-13 23:21:48 +00:00
|
|
|
|
# Contains:
|
|
|
|
|
print 'Testing Unicode contains method...',
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(('a' in u'abdb') == 1)
|
|
|
|
|
verify(('a' in u'bdab') == 1)
|
|
|
|
|
verify(('a' in u'bdaba') == 1)
|
|
|
|
|
verify(('a' in u'bdba') == 1)
|
|
|
|
|
verify(('a' in u'bdba') == 1)
|
|
|
|
|
verify((u'a' in u'bdba') == 1)
|
|
|
|
|
verify((u'a' in u'bdb') == 0)
|
|
|
|
|
verify((u'a' in 'bdb') == 0)
|
|
|
|
|
verify((u'a' in 'bdba') == 1)
|
|
|
|
|
verify((u'a' in ('a',1,None)) == 1)
|
|
|
|
|
verify((u'a' in (1,None,'a')) == 1)
|
|
|
|
|
verify((u'a' in (1,None,u'a')) == 1)
|
|
|
|
|
verify(('a' in ('a',1,None)) == 1)
|
|
|
|
|
verify(('a' in (1,None,'a')) == 1)
|
|
|
|
|
verify(('a' in (1,None,u'a')) == 1)
|
|
|
|
|
verify(('a' in ('x',1,u'y')) == 0)
|
|
|
|
|
verify(('a' in ('x',1,None)) == 0)
|
2000-03-13 23:21:48 +00:00
|
|
|
|
print 'done.'
|
|
|
|
|
|
2000-03-10 23:23:21 +00:00
|
|
|
|
# Formatting:
|
|
|
|
|
print 'Testing Unicode formatting strings...',
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u"%s, %s" % (u"abc", "abc") == u'abc, abc')
|
|
|
|
|
verify(u"%s, %s, %i, %f, %5.2f" % (u"abc", "abc", 1, 2, 3) == u'abc, abc, 1, 2.000000, 3.00')
|
|
|
|
|
verify(u"%s, %s, %i, %f, %5.2f" % (u"abc", "abc", 1, -2, 3) == u'abc, abc, 1, -2.000000, 3.00')
|
|
|
|
|
verify(u"%s, %s, %i, %f, %5.2f" % (u"abc", "abc", -1, -2, 3.5) == u'abc, abc, -1, -2.000000, 3.50')
|
|
|
|
|
verify(u"%s, %s, %i, %f, %5.2f" % (u"abc", "abc", -1, -2, 3.57) == u'abc, abc, -1, -2.000000, 3.57')
|
|
|
|
|
verify(u"%s, %s, %i, %f, %5.2f" % (u"abc", "abc", -1, -2, 1003.57) == u'abc, abc, -1, -2.000000, 1003.57')
|
|
|
|
|
verify(u"%c" % (u"a",) == u'a')
|
|
|
|
|
verify(u"%c" % ("a",) == u'a')
|
|
|
|
|
verify(u"%c" % (34,) == u'"')
|
|
|
|
|
verify(u"%c" % (36,) == u'$')
|
2001-02-10 14:09:31 +00:00
|
|
|
|
if sys.platform[:4] != 'java':
|
|
|
|
|
value = u"%r, %r" % (u"abc", "abc")
|
|
|
|
|
if value != u"u'abc', 'abc'":
|
|
|
|
|
print '*** formatting failed for "%s"' % 'u"%r, %r" % (u"abc", "abc")'
|
2000-06-13 12:05:36 +00:00
|
|
|
|
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u"%(x)s, %(y)s" % {'x':u"abc", 'y':"def"} == u'abc, def')
|
2000-06-13 12:05:36 +00:00
|
|
|
|
try:
|
2001-02-10 14:09:31 +00:00
|
|
|
|
if sys.platform[:4] != 'java':
|
|
|
|
|
value = u"%(x)s, %(<28>)s" % {'x':u"abc", u'<EFBFBD>'.encode('utf-8'):"def"}
|
|
|
|
|
else:
|
|
|
|
|
value = u"%(x)s, %(<28>)s" % {'x':u"abc", u'<EFBFBD>':"def"}
|
2000-06-13 12:05:36 +00:00
|
|
|
|
except KeyError:
|
|
|
|
|
print '*** formatting failed for "%s"' % "u'abc, def'"
|
|
|
|
|
else:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(value == u'abc, def')
|
2000-06-13 12:05:36 +00:00
|
|
|
|
|
2000-04-10 13:52:48 +00:00
|
|
|
|
# formatting jobs delegated from the string implementation:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify('...%(foo)s...' % {'foo':u"abc"} == u'...abc...')
|
|
|
|
|
verify('...%(foo)s...' % {'foo':"abc"} == '...abc...')
|
|
|
|
|
verify('...%(foo)s...' % {u'foo':"abc"} == '...abc...')
|
|
|
|
|
verify('...%(foo)s...' % {u'foo':u"abc"} == u'...abc...')
|
|
|
|
|
verify('...%(foo)s...' % {u'foo':u"abc",'def':123} == u'...abc...')
|
|
|
|
|
verify('...%(foo)s...' % {u'foo':u"abc",u'def':123} == u'...abc...')
|
|
|
|
|
verify('...%s...%s...%s...%s...' % (1,2,3,u"abc") == u'...1...2...3...abc...')
|
|
|
|
|
verify('...%%...%%s...%s...%s...%s...%s...' % (1,2,3,u"abc") == u'...%...%s...1...2...3...abc...')
|
|
|
|
|
verify('...%s...' % u"abc" == u'...abc...')
|
2001-05-02 14:21:53 +00:00
|
|
|
|
verify('%*s' % (5,u'abc',) == u' abc')
|
|
|
|
|
verify('%*s' % (-5,u'abc',) == u'abc ')
|
|
|
|
|
verify('%*.*s' % (5,2,u'abc',) == u' ab')
|
|
|
|
|
verify('%*.*s' % (5,3,u'abc',) == u' abc')
|
|
|
|
|
verify('%i %*.*s' % (10, 5,3,u'abc',) == u'10 abc')
|
|
|
|
|
verify('%i%s %*.*s' % (10, 3, 5,3,u'abc',) == u'103 abc')
|
2000-03-10 23:23:21 +00:00
|
|
|
|
print 'done.'
|
|
|
|
|
|
2000-03-24 22:14:19 +00:00
|
|
|
|
# Test builtin codecs
|
|
|
|
|
print 'Testing builtin codecs...',
|
|
|
|
|
|
2000-07-07 17:48:52 +00:00
|
|
|
|
# UTF-8 specific encoding tests:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'\u20ac'.encode('utf-8') == \
|
|
|
|
|
''.join((chr(0xe2), chr(0x82), chr(0xac))) )
|
|
|
|
|
verify(u'\ud800\udc02'.encode('utf-8') == \
|
|
|
|
|
''.join((chr(0xf0), chr(0x90), chr(0x80), chr(0x82))) )
|
|
|
|
|
verify(u'\ud84d\udc56'.encode('utf-8') == \
|
|
|
|
|
''.join((chr(0xf0), chr(0xa3), chr(0x91), chr(0x96))) )
|
2000-07-07 17:48:52 +00:00
|
|
|
|
# UTF-8 specific decoding tests
|
2001-01-18 02:22:22 +00:00
|
|
|
|
verify(unicode(''.join((chr(0xf0), chr(0xa3), chr(0x91), chr(0x96))),
|
2001-01-17 19:11:13 +00:00
|
|
|
|
'utf-8') == u'\ud84d\udc56' )
|
2001-01-18 02:22:22 +00:00
|
|
|
|
verify(unicode(''.join((chr(0xf0), chr(0x90), chr(0x80), chr(0x82))),
|
2001-01-17 19:11:13 +00:00
|
|
|
|
'utf-8') == u'\ud800\udc02' )
|
2001-01-18 02:22:22 +00:00
|
|
|
|
verify(unicode(''.join((chr(0xe2), chr(0x82), chr(0xac))),
|
2001-01-17 19:11:13 +00:00
|
|
|
|
'utf-8') == u'\u20ac' )
|
2000-07-07 17:48:52 +00:00
|
|
|
|
|
|
|
|
|
# Other possible utf-8 test cases:
|
|
|
|
|
# * strict decoding testing for all of the
|
|
|
|
|
# UTF8_ERROR cases in PyUnicode_DecodeUTF8
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode('hello','ascii') == u'hello')
|
|
|
|
|
verify(unicode('hello','utf-8') == u'hello')
|
|
|
|
|
verify(unicode('hello','utf8') == u'hello')
|
|
|
|
|
verify(unicode('hello','latin-1') == u'hello')
|
2000-03-24 22:14:19 +00:00
|
|
|
|
|
2000-07-07 13:46:19 +00:00
|
|
|
|
class String:
|
|
|
|
|
x = ''
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return self.x
|
|
|
|
|
|
|
|
|
|
o = String()
|
|
|
|
|
|
|
|
|
|
o.x = 'abc'
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(o) == u'abc')
|
|
|
|
|
verify(str(o) == 'abc')
|
2000-07-07 13:46:19 +00:00
|
|
|
|
|
|
|
|
|
o.x = u'abc'
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(o) == u'abc')
|
|
|
|
|
verify(str(o) == 'abc')
|
2000-07-07 13:46:19 +00:00
|
|
|
|
|
2000-04-10 13:52:48 +00:00
|
|
|
|
try:
|
|
|
|
|
u'Andr\202 x'.encode('ascii')
|
|
|
|
|
u'Andr\202 x'.encode('ascii','strict')
|
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
2001-01-19 19:01:56 +00:00
|
|
|
|
raise TestFailed, "u'Andr\202'.encode('ascii') failed to raise an exception"
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'Andr\202 x'.encode('ascii','ignore') == "Andr x")
|
|
|
|
|
verify(u'Andr\202 x'.encode('ascii','replace') == "Andr? x")
|
2000-04-10 13:52:48 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
unicode('Andr\202 x','ascii')
|
|
|
|
|
unicode('Andr\202 x','ascii','strict')
|
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
2001-01-19 19:01:56 +00:00
|
|
|
|
raise TestFailed, "unicode('Andr\202') failed to raise an exception"
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode('Andr\202 x','ascii','ignore') == u"Andr x")
|
|
|
|
|
verify(unicode('Andr\202 x','ascii','replace') == u'Andr\uFFFD x')
|
2000-04-10 13:52:48 +00:00
|
|
|
|
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(u'hello'.encode('ascii') == 'hello')
|
|
|
|
|
verify(u'hello'.encode('utf-8') == 'hello')
|
|
|
|
|
verify(u'hello'.encode('utf8') == 'hello')
|
|
|
|
|
verify(u'hello'.encode('utf-16-le') == 'h\000e\000l\000l\000o\000')
|
|
|
|
|
verify(u'hello'.encode('utf-16-be') == '\000h\000e\000l\000l\000o')
|
|
|
|
|
verify(u'hello'.encode('latin-1') == 'hello')
|
2000-03-24 22:14:19 +00:00
|
|
|
|
|
|
|
|
|
u = u''.join(map(unichr, range(1024)))
|
|
|
|
|
for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
|
|
|
|
|
'raw_unicode_escape', 'unicode_escape', 'unicode_internal'):
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(u.encode(encoding),encoding) == u)
|
2000-03-24 22:14:19 +00:00
|
|
|
|
|
|
|
|
|
u = u''.join(map(unichr, range(256)))
|
2000-04-05 20:11:21 +00:00
|
|
|
|
for encoding in (
|
|
|
|
|
'latin-1',
|
|
|
|
|
):
|
|
|
|
|
try:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(u.encode(encoding),encoding) == u)
|
2001-01-19 19:01:56 +00:00
|
|
|
|
except TestFailed:
|
2000-04-05 20:11:21 +00:00
|
|
|
|
print '*** codec "%s" failed round-trip' % encoding
|
|
|
|
|
except ValueError,why:
|
|
|
|
|
print '*** codec for "%s" failed: %s' % (encoding, why)
|
2000-03-24 22:14:19 +00:00
|
|
|
|
|
|
|
|
|
u = u''.join(map(unichr, range(128)))
|
2000-04-05 20:11:21 +00:00
|
|
|
|
for encoding in (
|
|
|
|
|
'ascii',
|
|
|
|
|
):
|
|
|
|
|
try:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(u.encode(encoding),encoding) == u)
|
2001-01-19 19:01:56 +00:00
|
|
|
|
except TestFailed:
|
2000-04-05 20:11:21 +00:00
|
|
|
|
print '*** codec "%s" failed round-trip' % encoding
|
|
|
|
|
except ValueError,why:
|
|
|
|
|
print '*** codec for "%s" failed: %s' % (encoding, why)
|
|
|
|
|
|
|
|
|
|
print 'done.'
|
|
|
|
|
|
|
|
|
|
print 'Testing standard mapping codecs...',
|
|
|
|
|
|
|
|
|
|
print '0-127...',
|
|
|
|
|
s = ''.join(map(chr, range(128)))
|
|
|
|
|
for encoding in (
|
|
|
|
|
'cp037', 'cp1026',
|
|
|
|
|
'cp437', 'cp500', 'cp737', 'cp775', 'cp850',
|
|
|
|
|
'cp852', 'cp855', 'cp860', 'cp861', 'cp862',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
'cp863', 'cp865', 'cp866',
|
2000-04-05 20:11:21 +00:00
|
|
|
|
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
|
|
|
|
|
'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5', 'iso8859_6',
|
|
|
|
|
'iso8859_7', 'iso8859_9', 'koi8_r', 'latin_1',
|
|
|
|
|
'mac_cyrillic', 'mac_latin2',
|
|
|
|
|
|
|
|
|
|
'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
|
|
|
|
|
'cp1256', 'cp1257', 'cp1258',
|
|
|
|
|
'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
|
|
|
|
|
|
|
|
|
|
'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
|
Get rid of the superstitious "~" in dict hashing's "i = (~hash) & mask".
The comment following used to say:
/* We use ~hash instead of hash, as degenerate hash functions, such
as for ints <sigh>, can have lots of leading zeros. It's not
really a performance risk, but better safe than sorry.
12-Dec-00 tim: so ~hash produces lots of leading ones instead --
what's the gain? */
That is, there was never a good reason for doing it. And to the contrary,
as explained on Python-Dev last December, it tended to make the *sum*
(i + incr) & mask (which is the first table index examined in case of
collison) the same "too often" across distinct hashes.
Changing to the simpler "i = hash & mask" reduced the number of string-dict
collisions (== # number of times we go around the lookup for-loop) from about
6 million to 5 million during a full run of the test suite (these are
approximate because the test suite does some random stuff from run to run).
The number of collisions in non-string dicts also decreased, but not as
dramatically.
Note that this may, for a given dict, change the order (wrt previous
releases) of entries exposed by .keys(), .values() and .items(). A number
of std tests suffered bogus failures as a result. For dicts keyed by
small ints, or (less so) by characters, the order is much more likely to be
in increasing order of key now; e.g.,
>>> d = {}
>>> for i in range(10):
... d[i] = i
...
>>> d
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}
>>>
Unfortunately. people may latch on to that in small examples and draw a
bogus conclusion.
test_support.py
Moved test_extcall's sortdict() into test_support, made it stronger,
and imported sortdict into other std tests that needed it.
test_unicode.py
Excluced cp875 from the "roundtrip over range(128)" test, because
cp875 doesn't have a well-defined inverse for unicode("?", "cp875").
See Python-Dev for excruciating details.
Cookie.py
Chaged various output functions to sort dicts before building
strings from them.
test_extcall
Fiddled the expected-result file. This remains sensitive to native
dict ordering, because, e.g., if there are multiple errors in a
keyword-arg dict (and test_extcall sets up many cases like that), the
specific error Python complains about first depends on native dict
ordering.
2001-05-13 00:19:31 +00:00
|
|
|
|
'cp1006', 'iso8859_8',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
|
2000-04-05 20:11:21 +00:00
|
|
|
|
### These have undefined mappings:
|
|
|
|
|
#'cp424',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
|
Get rid of the superstitious "~" in dict hashing's "i = (~hash) & mask".
The comment following used to say:
/* We use ~hash instead of hash, as degenerate hash functions, such
as for ints <sigh>, can have lots of leading zeros. It's not
really a performance risk, but better safe than sorry.
12-Dec-00 tim: so ~hash produces lots of leading ones instead --
what's the gain? */
That is, there was never a good reason for doing it. And to the contrary,
as explained on Python-Dev last December, it tended to make the *sum*
(i + incr) & mask (which is the first table index examined in case of
collison) the same "too often" across distinct hashes.
Changing to the simpler "i = hash & mask" reduced the number of string-dict
collisions (== # number of times we go around the lookup for-loop) from about
6 million to 5 million during a full run of the test suite (these are
approximate because the test suite does some random stuff from run to run).
The number of collisions in non-string dicts also decreased, but not as
dramatically.
Note that this may, for a given dict, change the order (wrt previous
releases) of entries exposed by .keys(), .values() and .items(). A number
of std tests suffered bogus failures as a result. For dicts keyed by
small ints, or (less so) by characters, the order is much more likely to be
in increasing order of key now; e.g.,
>>> d = {}
>>> for i in range(10):
... d[i] = i
...
>>> d
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}
>>>
Unfortunately. people may latch on to that in small examples and draw a
bogus conclusion.
test_support.py
Moved test_extcall's sortdict() into test_support, made it stronger,
and imported sortdict into other std tests that needed it.
test_unicode.py
Excluced cp875 from the "roundtrip over range(128)" test, because
cp875 doesn't have a well-defined inverse for unicode("?", "cp875").
See Python-Dev for excruciating details.
Cookie.py
Chaged various output functions to sort dicts before building
strings from them.
test_extcall
Fiddled the expected-result file. This remains sensitive to native
dict ordering, because, e.g., if there are multiple errors in a
keyword-arg dict (and test_extcall sets up many cases like that), the
specific error Python complains about first depends on native dict
ordering.
2001-05-13 00:19:31 +00:00
|
|
|
|
### These fail the round-trip:
|
|
|
|
|
#'cp875'
|
|
|
|
|
|
2000-04-05 20:11:21 +00:00
|
|
|
|
):
|
|
|
|
|
try:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(s,encoding).encode(encoding) == s)
|
2001-01-19 19:01:56 +00:00
|
|
|
|
except TestFailed:
|
2000-04-05 20:11:21 +00:00
|
|
|
|
print '*** codec "%s" failed round-trip' % encoding
|
|
|
|
|
except ValueError,why:
|
|
|
|
|
print '*** codec for "%s" failed: %s' % (encoding, why)
|
|
|
|
|
|
|
|
|
|
print '128-255...',
|
|
|
|
|
s = ''.join(map(chr, range(128,256)))
|
|
|
|
|
for encoding in (
|
|
|
|
|
'cp037', 'cp1026',
|
|
|
|
|
'cp437', 'cp500', 'cp737', 'cp775', 'cp850',
|
|
|
|
|
'cp852', 'cp855', 'cp860', 'cp861', 'cp862',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
'cp863', 'cp865', 'cp866',
|
2000-04-05 20:11:21 +00:00
|
|
|
|
'iso8859_10', 'iso8859_13', 'iso8859_14', 'iso8859_15',
|
2001-01-18 02:22:22 +00:00
|
|
|
|
'iso8859_2', 'iso8859_4', 'iso8859_5',
|
2001-01-03 21:29:14 +00:00
|
|
|
|
'iso8859_9', 'koi8_r', 'latin_1',
|
2000-04-05 20:11:21 +00:00
|
|
|
|
'mac_cyrillic', 'mac_latin2',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
|
2000-04-05 20:11:21 +00:00
|
|
|
|
### These have undefined mappings:
|
|
|
|
|
#'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255',
|
|
|
|
|
#'cp1256', 'cp1257', 'cp1258',
|
|
|
|
|
#'cp424', 'cp856', 'cp857', 'cp864', 'cp869', 'cp874',
|
2001-01-18 02:22:22 +00:00
|
|
|
|
#'iso8859_3', 'iso8859_6', 'iso8859_7',
|
2000-04-05 20:11:21 +00:00
|
|
|
|
#'mac_greek', 'mac_iceland','mac_roman', 'mac_turkish',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
|
2000-04-05 20:11:21 +00:00
|
|
|
|
### These fail the round-trip:
|
|
|
|
|
#'cp1006', 'cp875', 'iso8859_8',
|
2000-10-23 17:22:08 +00:00
|
|
|
|
|
2000-04-05 20:11:21 +00:00
|
|
|
|
):
|
|
|
|
|
try:
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify(unicode(s,encoding).encode(encoding) == s)
|
2001-01-19 19:01:56 +00:00
|
|
|
|
except TestFailed:
|
2000-04-05 20:11:21 +00:00
|
|
|
|
print '*** codec "%s" failed round-trip' % encoding
|
|
|
|
|
except ValueError,why:
|
|
|
|
|
print '*** codec for "%s" failed: %s' % (encoding, why)
|
2000-03-24 22:14:19 +00:00
|
|
|
|
|
|
|
|
|
print 'done.'
|
2000-04-13 14:11:56 +00:00
|
|
|
|
|
|
|
|
|
print 'Testing Unicode string concatenation...',
|
2001-01-17 19:11:13 +00:00
|
|
|
|
verify((u"abc" u"def") == u"abcdef")
|
|
|
|
|
verify(("abc" u"def") == u"abcdef")
|
|
|
|
|
verify((u"abc" "def") == u"abcdef")
|
|
|
|
|
verify((u"abc" u"def" "ghi") == u"abcdefghi")
|
|
|
|
|
verify(("abc" "def" u"ghi") == u"abcdefghi")
|
2000-04-13 14:11:56 +00:00
|
|
|
|
print 'done.'
|