summaryrefslogtreecommitdiff
path: root/Lib/test
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test')
-rw-r--r--Lib/test/_test_multiprocessing.py20
-rw-r--r--Lib/test/pickletester.py433
-rw-r--r--Lib/test/test__locale.py99
-rw-r--r--Lib/test/test_asyncio/test_queues.py10
-rw-r--r--Lib/test/test_asyncio/test_subprocess.py64
-rw-r--r--Lib/test/test_asyncio/test_unix_events.py6
-rw-r--r--Lib/test/test_bz2.py10
-rw-r--r--Lib/test/test_cgi.py4
-rw-r--r--Lib/test/test_dbm_dumb.py9
-rw-r--r--Lib/test/test_functools.py16
-rw-r--r--Lib/test/test_gdb.py2
-rw-r--r--Lib/test/test_httplib.py15
-rw-r--r--Lib/test/test_httpservers.py53
-rw-r--r--Lib/test/test_io.py53
-rw-r--r--Lib/test/test_locale.py2
-rw-r--r--Lib/test/test_multibytecodec.py7
-rw-r--r--Lib/test/test_pathlib.py36
-rw-r--r--Lib/test/test_pydoc.py41
-rw-r--r--Lib/test/test_re.py33
-rw-r--r--Lib/test/test_statistics.py1
-rw-r--r--Lib/test/test_subprocess.py4
-rw-r--r--Lib/test/test_tarfile.py15
-rw-r--r--Lib/test/test_zipfile.py10
23 files changed, 636 insertions, 307 deletions
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index 2d4395e7cd..9466d4ebd6 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -2020,6 +2020,12 @@ SERIALIZER = 'xmlrpclib'
class _TestRemoteManager(BaseTestCase):
ALLOWED_TYPES = ('manager',)
+ values = ['hello world', None, True, 2.25,
+ 'hall\xe5 v\xe4rlden',
+ '\u043f\u0440\u0438\u0432\u0456\u0442 \u0441\u0432\u0456\u0442',
+ b'hall\xe5 v\xe4rlden',
+ ]
+ result = values[:]
@classmethod
def _putter(cls, address, authkey):
@@ -2028,7 +2034,8 @@ class _TestRemoteManager(BaseTestCase):
)
manager.connect()
queue = manager.get_queue()
- queue.put(('hello world', None, True, 2.25))
+ # Note that xmlrpclib will deserialize object as a list not a tuple
+ queue.put(tuple(cls.values))
def test_remote(self):
authkey = os.urandom(32)
@@ -2048,8 +2055,7 @@ class _TestRemoteManager(BaseTestCase):
manager2.connect()
queue = manager2.get_queue()
- # Note that xmlrpclib will deserialize object as a list not a tuple
- self.assertEqual(queue.get(), ['hello world', None, True, 2.25])
+ self.assertEqual(queue.get(), self.result)
# Because we are using xmlrpclib for serialization instead of
# pickle this will cause a serialization error.
@@ -3405,12 +3411,12 @@ class TestNoForkBomb(unittest.TestCase):
name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py')
if sm != 'fork':
rc, out, err = test.script_helper.assert_python_failure(name, sm)
- self.assertEqual('', out.decode('ascii'))
- self.assertIn('RuntimeError', err.decode('ascii'))
+ self.assertEqual(out, b'')
+ self.assertIn(b'RuntimeError', err)
else:
rc, out, err = test.script_helper.assert_python_ok(name, sm)
- self.assertEqual('123', out.decode('ascii').rstrip())
- self.assertEqual('', err.decode('ascii'))
+ self.assertEqual(out.rstrip(), b'123')
+ self.assertEqual(err, b'')
#
# Issue #17555: ForkAwareThreadLock
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index bdc7bad6da..55205d10d5 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -144,21 +144,22 @@ def create_dynamic_class(name, bases):
# the object returned by create_data().
DATA0 = (
- b'(lp0\nL0L\naL1L\naF2.0\nac'
- b'builtins\ncomplex\n'
- b'p1\n(F3.0\nF0.0\ntp2\nRp'
- b'3\naL1L\naL-1L\naL255L\naL-'
- b'255L\naL-256L\naL65535L\na'
- b'L-65535L\naL-65536L\naL2'
- b'147483647L\naL-2147483'
- b'647L\naL-2147483648L\na('
- b'Vabc\np4\ng4\nccopyreg'
- b'\n_reconstructor\np5\n('
- b'c__main__\nC\np6\ncbu'
- b'iltins\nobject\np7\nNt'
- b'p8\nRp9\n(dp10\nVfoo\np1'
- b'1\nL1L\nsVbar\np12\nL2L\nsb'
- b'g9\ntp13\nag13\naL5L\na.'
+ b'(lp0\nL0L\naL1L\naF2.0\n'
+ b'ac__builtin__\ncomple'
+ b'x\np1\n(F3.0\nF0.0\ntp2\n'
+ b'Rp3\naL1L\naL-1L\naL255'
+ b'L\naL-255L\naL-256L\naL'
+ b'65535L\naL-65535L\naL-'
+ b'65536L\naL2147483647L'
+ b'\naL-2147483647L\naL-2'
+ b'147483648L\na(Vabc\np4'
+ b'\ng4\nccopy_reg\n_recon'
+ b'structor\np5\n(c__main'
+ b'__\nC\np6\nc__builtin__'
+ b'\nobject\np7\nNtp8\nRp9\n'
+ b'(dp10\nVfoo\np11\nL1L\ns'
+ b'Vbar\np12\nL2L\nsbg9\ntp'
+ b'13\nag13\naL5L\na.'
)
# Disassembly of DATA0
@@ -172,88 +173,88 @@ DATA0_DIS = """\
14: a APPEND
15: F FLOAT 2.0
20: a APPEND
- 21: c GLOBAL 'builtins complex'
- 39: p PUT 1
- 42: ( MARK
- 43: F FLOAT 3.0
- 48: F FLOAT 0.0
- 53: t TUPLE (MARK at 42)
- 54: p PUT 2
- 57: R REDUCE
- 58: p PUT 3
- 61: a APPEND
- 62: L LONG 1
- 66: a APPEND
- 67: L LONG -1
- 72: a APPEND
- 73: L LONG 255
- 79: a APPEND
- 80: L LONG -255
- 87: a APPEND
- 88: L LONG -256
- 95: a APPEND
- 96: L LONG 65535
- 104: a APPEND
- 105: L LONG -65535
- 114: a APPEND
- 115: L LONG -65536
- 124: a APPEND
- 125: L LONG 2147483647
- 138: a APPEND
- 139: L LONG -2147483647
- 153: a APPEND
- 154: L LONG -2147483648
- 168: a APPEND
- 169: ( MARK
- 170: V UNICODE 'abc'
- 175: p PUT 4
- 178: g GET 4
- 181: c GLOBAL 'copyreg _reconstructor'
- 205: p PUT 5
- 208: ( MARK
- 209: c GLOBAL '__main__ C'
- 221: p PUT 6
- 224: c GLOBAL 'builtins object'
- 241: p PUT 7
- 244: N NONE
- 245: t TUPLE (MARK at 208)
- 246: p PUT 8
- 249: R REDUCE
- 250: p PUT 9
- 253: ( MARK
- 254: d DICT (MARK at 253)
- 255: p PUT 10
- 259: V UNICODE 'foo'
- 264: p PUT 11
- 268: L LONG 1
- 272: s SETITEM
- 273: V UNICODE 'bar'
- 278: p PUT 12
- 282: L LONG 2
- 286: s SETITEM
- 287: b BUILD
- 288: g GET 9
- 291: t TUPLE (MARK at 169)
- 292: p PUT 13
- 296: a APPEND
- 297: g GET 13
- 301: a APPEND
- 302: L LONG 5
- 306: a APPEND
- 307: . STOP
+ 21: c GLOBAL '__builtin__ complex'
+ 42: p PUT 1
+ 45: ( MARK
+ 46: F FLOAT 3.0
+ 51: F FLOAT 0.0
+ 56: t TUPLE (MARK at 45)
+ 57: p PUT 2
+ 60: R REDUCE
+ 61: p PUT 3
+ 64: a APPEND
+ 65: L LONG 1
+ 69: a APPEND
+ 70: L LONG -1
+ 75: a APPEND
+ 76: L LONG 255
+ 82: a APPEND
+ 83: L LONG -255
+ 90: a APPEND
+ 91: L LONG -256
+ 98: a APPEND
+ 99: L LONG 65535
+ 107: a APPEND
+ 108: L LONG -65535
+ 117: a APPEND
+ 118: L LONG -65536
+ 127: a APPEND
+ 128: L LONG 2147483647
+ 141: a APPEND
+ 142: L LONG -2147483647
+ 156: a APPEND
+ 157: L LONG -2147483648
+ 171: a APPEND
+ 172: ( MARK
+ 173: V UNICODE 'abc'
+ 178: p PUT 4
+ 181: g GET 4
+ 184: c GLOBAL 'copy_reg _reconstructor'
+ 209: p PUT 5
+ 212: ( MARK
+ 213: c GLOBAL '__main__ C'
+ 225: p PUT 6
+ 228: c GLOBAL '__builtin__ object'
+ 248: p PUT 7
+ 251: N NONE
+ 252: t TUPLE (MARK at 212)
+ 253: p PUT 8
+ 256: R REDUCE
+ 257: p PUT 9
+ 260: ( MARK
+ 261: d DICT (MARK at 260)
+ 262: p PUT 10
+ 266: V UNICODE 'foo'
+ 271: p PUT 11
+ 275: L LONG 1
+ 279: s SETITEM
+ 280: V UNICODE 'bar'
+ 285: p PUT 12
+ 289: L LONG 2
+ 293: s SETITEM
+ 294: b BUILD
+ 295: g GET 9
+ 298: t TUPLE (MARK at 172)
+ 299: p PUT 13
+ 303: a APPEND
+ 304: g GET 13
+ 308: a APPEND
+ 309: L LONG 5
+ 313: a APPEND
+ 314: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (
- b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
- b'builtins\ncomplex\nq\x01'
+ b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c__'
+ b'builtin__\ncomplex\nq\x01'
b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
- b'cq\x04h\x04ccopyreg\n_reco'
+ b'cq\x04h\x04ccopy_reg\n_reco'
b'nstructor\nq\x05(c__main'
- b'__\nC\nq\x06cbuiltins\n'
+ b'__\nC\nq\x06c__builtin__\n'
b'object\nq\x07Ntq\x08Rq\t}q\n('
b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
@@ -267,66 +268,66 @@ DATA1_DIS = """\
4: K BININT1 0
6: K BININT1 1
8: G BINFLOAT 2.0
- 17: c GLOBAL 'builtins complex'
- 35: q BINPUT 1
- 37: ( MARK
- 38: G BINFLOAT 3.0
- 47: G BINFLOAT 0.0
- 56: t TUPLE (MARK at 37)
- 57: q BINPUT 2
- 59: R REDUCE
- 60: q BINPUT 3
- 62: K BININT1 1
- 64: J BININT -1
- 69: K BININT1 255
- 71: J BININT -255
- 76: J BININT -256
- 81: M BININT2 65535
- 84: J BININT -65535
- 89: J BININT -65536
- 94: J BININT 2147483647
- 99: J BININT -2147483647
- 104: J BININT -2147483648
- 109: ( MARK
- 110: X BINUNICODE 'abc'
- 118: q BINPUT 4
- 120: h BINGET 4
- 122: c GLOBAL 'copyreg _reconstructor'
- 146: q BINPUT 5
- 148: ( MARK
- 149: c GLOBAL '__main__ C'
- 161: q BINPUT 6
- 163: c GLOBAL 'builtins object'
- 180: q BINPUT 7
- 182: N NONE
- 183: t TUPLE (MARK at 148)
- 184: q BINPUT 8
- 186: R REDUCE
- 187: q BINPUT 9
- 189: } EMPTY_DICT
- 190: q BINPUT 10
- 192: ( MARK
- 193: X BINUNICODE 'foo'
- 201: q BINPUT 11
- 203: K BININT1 1
- 205: X BINUNICODE 'bar'
- 213: q BINPUT 12
- 215: K BININT1 2
- 217: u SETITEMS (MARK at 192)
- 218: b BUILD
- 219: h BINGET 9
- 221: t TUPLE (MARK at 109)
- 222: q BINPUT 13
- 224: h BINGET 13
- 226: K BININT1 5
- 228: e APPENDS (MARK at 3)
- 229: . STOP
+ 17: c GLOBAL '__builtin__ complex'
+ 38: q BINPUT 1
+ 40: ( MARK
+ 41: G BINFLOAT 3.0
+ 50: G BINFLOAT 0.0
+ 59: t TUPLE (MARK at 40)
+ 60: q BINPUT 2
+ 62: R REDUCE
+ 63: q BINPUT 3
+ 65: K BININT1 1
+ 67: J BININT -1
+ 72: K BININT1 255
+ 74: J BININT -255
+ 79: J BININT -256
+ 84: M BININT2 65535
+ 87: J BININT -65535
+ 92: J BININT -65536
+ 97: J BININT 2147483647
+ 102: J BININT -2147483647
+ 107: J BININT -2147483648
+ 112: ( MARK
+ 113: X BINUNICODE 'abc'
+ 121: q BINPUT 4
+ 123: h BINGET 4
+ 125: c GLOBAL 'copy_reg _reconstructor'
+ 150: q BINPUT 5
+ 152: ( MARK
+ 153: c GLOBAL '__main__ C'
+ 165: q BINPUT 6
+ 167: c GLOBAL '__builtin__ object'
+ 187: q BINPUT 7
+ 189: N NONE
+ 190: t TUPLE (MARK at 152)
+ 191: q BINPUT 8
+ 193: R REDUCE
+ 194: q BINPUT 9
+ 196: } EMPTY_DICT
+ 197: q BINPUT 10
+ 199: ( MARK
+ 200: X BINUNICODE 'foo'
+ 208: q BINPUT 11
+ 210: K BININT1 1
+ 212: X BINUNICODE 'bar'
+ 220: q BINPUT 12
+ 222: K BININT1 2
+ 224: u SETITEMS (MARK at 199)
+ 225: b BUILD
+ 226: h BINGET 9
+ 228: t TUPLE (MARK at 112)
+ 229: q BINPUT 13
+ 231: h BINGET 13
+ 233: K BININT1 5
+ 235: e APPENDS (MARK at 3)
+ 236: . STOP
highest protocol among opcodes = 1
"""
DATA2 = (
b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
- b'builtins\ncomplex\n'
+ b'__builtin__\ncomplex\n'
b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
@@ -346,52 +347,52 @@ DATA2_DIS = """\
6: K BININT1 0
8: K BININT1 1
10: G BINFLOAT 2.0
- 19: c GLOBAL 'builtins complex'
- 37: q BINPUT 1
- 39: G BINFLOAT 3.0
- 48: G BINFLOAT 0.0
- 57: \x86 TUPLE2
- 58: q BINPUT 2
- 60: R REDUCE
- 61: q BINPUT 3
- 63: K BININT1 1
- 65: J BININT -1
- 70: K BININT1 255
- 72: J BININT -255
- 77: J BININT -256
- 82: M BININT2 65535
- 85: J BININT -65535
- 90: J BININT -65536
- 95: J BININT 2147483647
- 100: J BININT -2147483647
- 105: J BININT -2147483648
- 110: ( MARK
- 111: X BINUNICODE 'abc'
- 119: q BINPUT 4
- 121: h BINGET 4
- 123: c GLOBAL '__main__ C'
- 135: q BINPUT 5
- 137: ) EMPTY_TUPLE
- 138: \x81 NEWOBJ
- 139: q BINPUT 6
- 141: } EMPTY_DICT
- 142: q BINPUT 7
- 144: ( MARK
- 145: X BINUNICODE 'foo'
- 153: q BINPUT 8
- 155: K BININT1 1
- 157: X BINUNICODE 'bar'
- 165: q BINPUT 9
- 167: K BININT1 2
- 169: u SETITEMS (MARK at 144)
- 170: b BUILD
- 171: h BINGET 6
- 173: t TUPLE (MARK at 110)
- 174: q BINPUT 10
- 176: h BINGET 10
- 178: K BININT1 5
- 180: e APPENDS (MARK at 5)
- 181: . STOP
+ 19: c GLOBAL '__builtin__ complex'
+ 40: q BINPUT 1
+ 42: G BINFLOAT 3.0
+ 51: G BINFLOAT 0.0
+ 60: \x86 TUPLE2
+ 61: q BINPUT 2
+ 63: R REDUCE
+ 64: q BINPUT 3
+ 66: K BININT1 1
+ 68: J BININT -1
+ 73: K BININT1 255
+ 75: J BININT -255
+ 80: J BININT -256
+ 85: M BININT2 65535
+ 88: J BININT -65535
+ 93: J BININT -65536
+ 98: J BININT 2147483647
+ 103: J BININT -2147483647
+ 108: J BININT -2147483648
+ 113: ( MARK
+ 114: X BINUNICODE 'abc'
+ 122: q BINPUT 4
+ 124: h BINGET 4
+ 126: c GLOBAL '__main__ C'
+ 138: q BINPUT 5
+ 140: ) EMPTY_TUPLE
+ 141: \x81 NEWOBJ
+ 142: q BINPUT 6
+ 144: } EMPTY_DICT
+ 145: q BINPUT 7
+ 147: ( MARK
+ 148: X BINUNICODE 'foo'
+ 156: q BINPUT 8
+ 158: K BININT1 1
+ 160: X BINUNICODE 'bar'
+ 168: q BINPUT 9
+ 170: K BININT1 2
+ 172: u SETITEMS (MARK at 147)
+ 173: b BUILD
+ 174: h BINGET 6
+ 176: t TUPLE (MARK at 113)
+ 177: q BINPUT 10
+ 179: h BINGET 10
+ 181: K BININT1 5
+ 183: e APPENDS (MARK at 5)
+ 184: . STOP
highest protocol among opcodes = 2
"""
@@ -570,14 +571,14 @@ class AbstractPickleTests(unittest.TestCase):
xname = X.__name__.encode('ascii')
# Protocol 0 (text mode pickle):
"""
- 0: ( MARK
- 1: i INST '__main__ X' (MARK at 0)
- 15: p PUT 0
- 18: ( MARK
- 19: d DICT (MARK at 18)
- 20: p PUT 1
- 23: b BUILD
- 24: . STOP
+ 0: ( MARK
+ 1: i INST '__main__ X' (MARK at 0)
+ 13: p PUT 0
+ 16: ( MARK
+ 17: d DICT (MARK at 16)
+ 18: p PUT 1
+ 21: b BUILD
+ 22: . STOP
"""
pickle0 = (b"(i__main__\n"
b"X\n"
@@ -587,15 +588,15 @@ class AbstractPickleTests(unittest.TestCase):
# Protocol 1 (binary mode pickle)
"""
- 0: ( MARK
- 1: c GLOBAL '__main__ X'
- 15: q BINPUT 0
- 17: o OBJ (MARK at 0)
- 18: q BINPUT 1
- 20: } EMPTY_DICT
- 21: q BINPUT 2
- 23: b BUILD
- 24: . STOP
+ 0: ( MARK
+ 1: c GLOBAL '__main__ X'
+ 13: q BINPUT 0
+ 15: o OBJ (MARK at 0)
+ 16: q BINPUT 1
+ 18: } EMPTY_DICT
+ 19: q BINPUT 2
+ 21: b BUILD
+ 22: . STOP
"""
pickle1 = (b'(c__main__\n'
b'X\n'
@@ -604,16 +605,16 @@ class AbstractPickleTests(unittest.TestCase):
# Protocol 2 (pickle2 = b'\x80\x02' + pickle1)
"""
- 0: \x80 PROTO 2
- 2: ( MARK
- 3: c GLOBAL '__main__ X'
- 17: q BINPUT 0
- 19: o OBJ (MARK at 2)
- 20: q BINPUT 1
- 22: } EMPTY_DICT
- 23: q BINPUT 2
- 25: b BUILD
- 26: . STOP
+ 0: \x80 PROTO 2
+ 2: ( MARK
+ 3: c GLOBAL '__main__ X'
+ 15: q BINPUT 0
+ 17: o OBJ (MARK at 2)
+ 18: q BINPUT 1
+ 20: } EMPTY_DICT
+ 21: q BINPUT 2
+ 23: b BUILD
+ 24: . STOP
"""
pickle2 = (b'\x80\x02(c__main__\n'
b'X\n'
diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py
index 4231f37bc9..8d1c8db2ac 100644
--- a/Lib/test/test__locale.py
+++ b/Lib/test/test__locale.py
@@ -9,7 +9,6 @@ import locale
import sys
import unittest
from platform import uname
-from test.support import run_unittest
if uname().system == "Darwin":
maj, min, mic = [int(part) for part in uname().release.split(".")]
@@ -24,45 +23,52 @@ candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT',
'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH',
'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO',
'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA',
- 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US',
+ 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'ps_AF', 'en_US',
'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR']
-# Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
-# workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
-# the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
-# decoded as U+30000020 (an invalid character) by mbstowcs().
-if sys.platform == 'sunos5':
- old_locale = locale.setlocale(locale.LC_ALL)
- try:
- locales = []
- for loc in candidate_locales:
- try:
- locale.setlocale(locale.LC_ALL, loc)
- except Error:
- continue
- encoding = locale.getpreferredencoding(False)
- try:
- localeconv()
- except Exception as err:
- print("WARNING: Skip locale %s (encoding %s): [%s] %s"
- % (loc, encoding, type(err), err))
- else:
- locales.append(loc)
- candidate_locales = locales
- finally:
- locale.setlocale(locale.LC_ALL, old_locale)
-
-# Workaround for MSVC6(debug) crash bug
-if "MSC v.1200" in sys.version:
- def accept(loc):
- a = loc.split(".")
- return not(len(a) == 2 and len(a[-1]) >= 9)
- candidate_locales = [loc for loc in candidate_locales if accept(loc)]
+def setUpModule():
+ global candidate_locales
+ # Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to
+ # workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses
+ # the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is
+ # decoded as U+30000020 (an invalid character) by mbstowcs().
+ if sys.platform == 'sunos5':
+ old_locale = locale.setlocale(locale.LC_ALL)
+ try:
+ locales = []
+ for loc in candidate_locales:
+ try:
+ locale.setlocale(locale.LC_ALL, loc)
+ except Error:
+ continue
+ encoding = locale.getpreferredencoding(False)
+ try:
+ localeconv()
+ except Exception as err:
+ print("WARNING: Skip locale %s (encoding %s): [%s] %s"
+ % (loc, encoding, type(err), err))
+ else:
+ locales.append(loc)
+ candidate_locales = locales
+ finally:
+ locale.setlocale(locale.LC_ALL, old_locale)
+
+ # Workaround for MSVC6(debug) crash bug
+ if "MSC v.1200" in sys.version:
+ def accept(loc):
+ a = loc.split(".")
+ return not(len(a) == 2 and len(a[-1]) >= 9)
+ candidate_locales = [loc for loc in candidate_locales if accept(loc)]
# List known locale values to test against when available.
# Dict formatted as ``<locale> : (<decimal_point>, <thousands_sep>)``. If a
# value is not known, use '' .
-known_numerics = {'fr_FR' : (',', ''), 'en_US':('.', ',')}
+known_numerics = {
+ 'en_US': ('.', ','),
+ 'fr_FR' : (',', ' '),
+ 'de_DE' : (',', '.'),
+ 'ps_AF': ('\u066b', '\u066c'),
+}
class _LocaleTests(unittest.TestCase):
@@ -91,10 +97,12 @@ class _LocaleTests(unittest.TestCase):
calc_value, known_value,
calc_type, data_type, set_locale,
used_locale))
+ return True
@unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
def test_lc_numeric_nl_langinfo(self):
# Test nl_langinfo against known values
+ tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@@ -103,10 +111,14 @@ class _LocaleTests(unittest.TestCase):
continue
for li, lc in ((RADIXCHAR, "decimal_point"),
(THOUSEP, "thousands_sep")):
- self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc)
+ if self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc):
+ tested = True
+ if not tested:
+ self.skipTest('no suitable locales')
def test_lc_numeric_localeconv(self):
# Test localeconv against known values
+ tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@@ -116,11 +128,15 @@ class _LocaleTests(unittest.TestCase):
formatting = localeconv()
for lc in ("decimal_point",
"thousands_sep"):
- self.numeric_tester('localeconv', formatting[lc], lc, loc)
+ if self.numeric_tester('localeconv', formatting[lc], lc, loc):
+ tested = True
+ if not tested:
+ self.skipTest('no suitable locales')
@unittest.skipUnless(nl_langinfo, "nl_langinfo is not available")
def test_lc_numeric_basic(self):
# Test nl_langinfo against localeconv
+ tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@@ -140,10 +156,14 @@ class _LocaleTests(unittest.TestCase):
"(set to %s, using %s)" % (
nl_radixchar, li_radixchar,
loc, set_locale))
+ tested = True
+ if not tested:
+ self.skipTest('no suitable locales')
def test_float_parsing(self):
# Bug #1391872: Test whether float parsing is okay on European
# locales.
+ tested = False
for loc in candidate_locales:
try:
setlocale(LC_NUMERIC, loc)
@@ -162,9 +182,10 @@ class _LocaleTests(unittest.TestCase):
if localeconv()['decimal_point'] != '.':
self.assertRaises(ValueError, float,
localeconv()['decimal_point'].join(['1', '23']))
+ tested = True
+ if not tested:
+ self.skipTest('no suitable locales')
-def test_main():
- run_unittest(_LocaleTests)
if __name__ == '__main__':
- test_main()
+ unittest.main()
diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py
index 3d4ac51df4..a73539d1a6 100644
--- a/Lib/test/test_asyncio/test_queues.py
+++ b/Lib/test/test_asyncio/test_queues.py
@@ -408,14 +408,14 @@ class PriorityQueueTests(_QueueTestBase):
self.assertEqual([1, 2, 3], items)
-class JoinableQueueTests(_QueueTestBase):
+class QueueJoinTests(_QueueTestBase):
def test_task_done_underflow(self):
- q = asyncio.JoinableQueue(loop=self.loop)
+ q = asyncio.Queue(loop=self.loop)
self.assertRaises(ValueError, q.task_done)
def test_task_done(self):
- q = asyncio.JoinableQueue(loop=self.loop)
+ q = asyncio.Queue(loop=self.loop)
for i in range(100):
q.put_nowait(i)
@@ -452,7 +452,7 @@ class JoinableQueueTests(_QueueTestBase):
self.loop.run_until_complete(asyncio.wait(tasks, loop=self.loop))
def test_join_empty_queue(self):
- q = asyncio.JoinableQueue(loop=self.loop)
+ q = asyncio.Queue(loop=self.loop)
# Test that a queue join()s successfully, and before anything else
# (done twice for insurance).
@@ -465,7 +465,7 @@ class JoinableQueueTests(_QueueTestBase):
self.loop.run_until_complete(join())
def test_format(self):
- q = asyncio.JoinableQueue(loop=self.loop)
+ q = asyncio.Queue(loop=self.loop)
self.assertEqual(q._format(), 'maxsize=0')
q._unfinished_tasks = 2
diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py
index b467b04f53..5ccdafb151 100644
--- a/Lib/test/test_asyncio/test_subprocess.py
+++ b/Lib/test/test_asyncio/test_subprocess.py
@@ -349,6 +349,70 @@ class SubprocessMixin:
self.loop.run_until_complete(cancel_make_transport())
test_utils.run_briefly(self.loop)
+ def test_close_kill_running(self):
+ @asyncio.coroutine
+ def kill_running():
+ create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
+ *PROGRAM_BLOCKED)
+ transport, protocol = yield from create
+
+ kill_called = False
+ def kill():
+ nonlocal kill_called
+ kill_called = True
+ orig_kill()
+
+ proc = transport.get_extra_info('subprocess')
+ orig_kill = proc.kill
+ proc.kill = kill
+ returncode = transport.get_returncode()
+ transport.close()
+ yield from transport._wait()
+ return (returncode, kill_called)
+
+ # Ignore "Close running child process: kill ..." log
+ with test_utils.disable_logger():
+ returncode, killed = self.loop.run_until_complete(kill_running())
+ self.assertIsNone(returncode)
+
+ # transport.close() must kill the process if it is still running
+ self.assertTrue(killed)
+ test_utils.run_briefly(self.loop)
+
+ def test_close_dont_kill_finished(self):
+ @asyncio.coroutine
+ def kill_running():
+ create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
+ *PROGRAM_BLOCKED)
+ transport, protocol = yield from create
+ proc = transport.get_extra_info('subprocess')
+
+ # kill the process (but asyncio is not notified immediatly)
+ proc.kill()
+ proc.wait()
+
+ proc.kill = mock.Mock()
+ proc_returncode = proc.poll()
+ transport_returncode = transport.get_returncode()
+ transport.close()
+ return (proc_returncode, transport_returncode, proc.kill.called)
+
+ # Ignore "Unknown child process pid ..." log of SafeChildWatcher,
+ # emitted because the test already consumes the exit status:
+ # proc.wait()
+ with test_utils.disable_logger():
+ result = self.loop.run_until_complete(kill_running())
+ test_utils.run_briefly(self.loop)
+
+ proc_returncode, transport_return_code, killed = result
+
+ self.assertIsNotNone(proc_returncode)
+ self.assertIsNone(transport_return_code)
+
+ # transport.close() must not kill the process if it finished, even if
+ # the transport was not notified yet
+ self.assertFalse(killed)
+
if sys.platform != 'win32':
# Unix
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 41249ff024..dc0835c527 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -295,7 +295,7 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase):
def test_create_unix_connection_path_sock(self):
coro = self.loop.create_unix_connection(
- lambda: None, '/dev/null', sock=object())
+ lambda: None, os.devnull, sock=object())
with self.assertRaisesRegex(ValueError, 'path and sock can not be'):
self.loop.run_until_complete(coro)
@@ -308,14 +308,14 @@ class SelectorEventLoopUnixSocketTests(test_utils.TestCase):
def test_create_unix_connection_nossl_serverhost(self):
coro = self.loop.create_unix_connection(
- lambda: None, '/dev/null', server_hostname='spam')
+ lambda: None, os.devnull, server_hostname='spam')
with self.assertRaisesRegex(ValueError,
'server_hostname is only meaningful'):
self.loop.run_until_complete(coro)
def test_create_unix_connection_ssl_noserverhost(self):
coro = self.loop.create_unix_connection(
- lambda: None, '/dev/null', ssl=True)
+ lambda: None, os.devnull, ssl=True)
with self.assertRaisesRegex(
ValueError, 'you have to pass server_hostname when using ssl'):
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
index ce012d6aa3..1535e8e669 100644
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -87,11 +87,11 @@ class BZ2FileTest(BaseTest):
def testBadArgs(self):
self.assertRaises(TypeError, BZ2File, 123.456)
- self.assertRaises(ValueError, BZ2File, "/dev/null", "z")
- self.assertRaises(ValueError, BZ2File, "/dev/null", "rx")
- self.assertRaises(ValueError, BZ2File, "/dev/null", "rbt")
- self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=0)
- self.assertRaises(ValueError, BZ2File, "/dev/null", compresslevel=10)
+ self.assertRaises(ValueError, BZ2File, os.devnull, "z")
+ self.assertRaises(ValueError, BZ2File, os.devnull, "rx")
+ self.assertRaises(ValueError, BZ2File, os.devnull, "rbt")
+ self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=0)
+ self.assertRaises(ValueError, BZ2File, os.devnull, compresslevel=10)
def testRead(self):
self.createTempFile()
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 86e1f3a5f0..1127dd12e6 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -186,9 +186,9 @@ class CgiTests(unittest.TestCase):
cgi.initlog("%s", "Testing initlog 1")
cgi.log("%s", "Testing log 2")
self.assertEqual(cgi.logfp.getvalue(), "Testing initlog 1\nTesting log 2\n")
- if os.path.exists("/dev/null"):
+ if os.path.exists(os.devnull):
cgi.logfp = None
- cgi.logfile = "/dev/null"
+ cgi.logfile = os.devnull
cgi.initlog("%s", "Testing log 3")
self.addCleanup(cgi.closelog)
cgi.log("Testing log 4")
diff --git a/Lib/test/test_dbm_dumb.py b/Lib/test/test_dbm_dumb.py
index 29f48a3561..dc88ca64dc 100644
--- a/Lib/test/test_dbm_dumb.py
+++ b/Lib/test/test_dbm_dumb.py
@@ -217,6 +217,15 @@ class DumbDBMTestCase(unittest.TestCase):
self.assertEqual(str(cm.exception),
"DBM object has already been closed")
+ def test_eval(self):
+ with open(_fname + '.dir', 'w') as stream:
+ stream.write("str(print('Hacked!')), 0\n")
+ with support.captured_stdout() as stdout:
+ with self.assertRaises(ValueError):
+ with dumbdbm.open(_fname) as f:
+ pass
+ self.assertEqual(stdout.getvalue(), '')
+
def tearDown(self):
_delete_files()
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 10120530b8..0375601c51 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -155,9 +155,9 @@ class TestPartialC(TestPartial, unittest.TestCase):
def test_repr(self):
args = (object(), object())
args_repr = ', '.join(repr(a) for a in args)
- #kwargs = {'a': object(), 'b': object()}
- kwargs = {'a': object()}
- kwargs_repr = ', '.join("%s=%r" % (k, v) for k, v in kwargs.items())
+ kwargs = {'a': object(), 'b': object()}
+ kwargs_reprs = ['a={a!r}, b={b!r}'.format_map(kwargs),
+ 'b={b!r}, a={a!r}'.format_map(kwargs)]
if self.partial is c_functools.partial:
name = 'functools.partial'
else:
@@ -172,12 +172,14 @@ class TestPartialC(TestPartial, unittest.TestCase):
repr(f))
f = self.partial(capture, **kwargs)
- self.assertEqual('{}({!r}, {})'.format(name, capture, kwargs_repr),
- repr(f))
+ self.assertIn(repr(f),
+ ['{}({!r}, {})'.format(name, capture, kwargs_repr)
+ for kwargs_repr in kwargs_reprs])
f = self.partial(capture, *args, **kwargs)
- self.assertEqual('{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr),
- repr(f))
+ self.assertIn(repr(f),
+ ['{}({!r}, {}, {})'.format(name, capture, args_repr, kwargs_repr)
+ for kwargs_repr in kwargs_reprs])
def test_pickle(self):
f = self.partial(signature, 'asdf', bar=True)
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index aaa5c69d49..c57875c3dd 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -190,6 +190,8 @@ class DebuggerTests(unittest.TestCase):
'linux-vdso.so',
'warning: Could not load shared library symbols for '
'linux-gate.so',
+ 'warning: Could not load shared library symbols for '
+ 'linux-vdso64.so',
'Do you need "set solib-search-path" or '
'"set sysroot"?',
'warning: Source file is more recent than executable.',
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 3fc34665da..d0a0e8de81 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -708,7 +708,22 @@ class BasicTest(TestCase):
self.assertTrue(response.closed)
self.assertTrue(conn.sock.file_closed)
+
class OfflineTest(TestCase):
+ def test_all(self):
+ # Documented objects defined in the module should be in __all__
+ expected = {"responses"} # White-list documented dict() object
+ # HTTPMessage, parse_headers(), and the HTTP status code constants are
+ # intentionally omitted for simplicity
+ blacklist = {"HTTPMessage", "parse_headers"}
+ for name in dir(client):
+ if name in blacklist:
+ continue
+ module_object = getattr(client, name)
+ if getattr(module_object, "__module__", None) == "http.client":
+ expected.add(name)
+ self.assertCountEqual(client.__all__, expected)
+
def test_responses(self):
self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index 569341d83b..74e07143eb 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -616,6 +616,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+ self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+ self.assertEqual(self.handler.command, 'GET')
+ self.assertEqual(self.handler.path, '/')
+ self.assertEqual(self.handler.request_version, 'HTTP/1.1')
+ self.assertSequenceEqual(self.handler.headers.items(), ())
def test_http_1_0(self):
result = self.send_typical_request(b'GET / HTTP/1.0\r\n\r\n')
@@ -623,6 +628,11 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+ self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
+ self.assertEqual(self.handler.command, 'GET')
+ self.assertEqual(self.handler.path, '/')
+ self.assertEqual(self.handler.request_version, 'HTTP/1.0')
+ self.assertSequenceEqual(self.handler.headers.items(), ())
def test_http_0_9(self):
result = self.send_typical_request(b'GET / HTTP/0.9\r\n\r\n')
@@ -636,6 +646,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[1:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+ self.assertEqual(self.handler.requestline, 'GET / HTTP/1.0')
+ self.assertEqual(self.handler.command, 'GET')
+ self.assertEqual(self.handler.path, '/')
+ self.assertEqual(self.handler.request_version, 'HTTP/1.0')
+ headers = (("Expect", "100-continue"),)
+ self.assertSequenceEqual(self.handler.headers.items(), headers)
def test_with_continue_1_1(self):
result = self.send_typical_request(b'GET / HTTP/1.1\r\nExpect: 100-continue\r\n\r\n')
@@ -645,6 +661,12 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
self.verify_expected_headers(result[2:-1])
self.verify_get_called()
self.assertEqual(result[-1], b'<html><body>Data</body></html>\r\n')
+ self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+ self.assertEqual(self.handler.command, 'GET')
+ self.assertEqual(self.handler.path, '/')
+ self.assertEqual(self.handler.request_version, 'HTTP/1.1')
+ headers = (("Expect", "100-continue"),)
+ self.assertSequenceEqual(self.handler.headers.items(), headers)
def test_header_buffering_of_send_error(self):
@@ -730,6 +752,7 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
result = self.send_typical_request(b'GET ' + b'x' * 65537)
self.assertEqual(result[0], b'HTTP/1.1 414 Request-URI Too Long\r\n')
self.assertFalse(self.handler.get_called)
+ self.assertIsInstance(self.handler.requestline, str)
def test_header_length(self):
# Issue #6791: same for headers
@@ -737,6 +760,22 @@ class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
b'GET / HTTP/1.1\r\nX-Foo: bar' + b'r' * 65537 + b'\r\n\r\n')
self.assertEqual(result[0], b'HTTP/1.1 400 Line too long\r\n')
self.assertFalse(self.handler.get_called)
+ self.assertEqual(self.handler.requestline, 'GET / HTTP/1.1')
+
+ def test_close_connection(self):
+ # handle_one_request() should be repeatedly called until
+ # it sets close_connection
+ def handle_one_request():
+ self.handler.close_connection = next(close_values)
+ self.handler.handle_one_request = handle_one_request
+
+ close_values = iter((True,))
+ self.handler.handle()
+ self.assertRaises(StopIteration, next, close_values)
+
+ close_values = iter((False, False, True))
+ self.handler.handle()
+ self.assertRaises(StopIteration, next, close_values)
class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
""" Test url parsing """
@@ -760,6 +799,19 @@ class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
self.assertEqual(path, self.translated)
+class MiscTestCase(unittest.TestCase):
+ def test_all(self):
+ expected = []
+ blacklist = {'executable', 'nobody_uid', 'test'}
+ for name in dir(server):
+ if name.startswith('_') or name in blacklist:
+ continue
+ module_object = getattr(server, name)
+ if getattr(module_object, '__module__', None) == 'http.server':
+ expected.append(name)
+ self.assertCountEqual(server.__all__, expected)
+
+
def test_main(verbose=None):
cwd = os.getcwd()
try:
@@ -769,6 +821,7 @@ def test_main(verbose=None):
SimpleHTTPServerTestCase,
CGIHTTPServerTestCase,
SimpleHTTPRequestHandlerTestCase,
+ MiscTestCase,
)
finally:
os.chdir(cwd)
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index a424f760f7..668b023e48 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -593,13 +593,44 @@ class IOTest(unittest.TestCase):
with self.open(zero, "r") as f:
self.assertRaises(OverflowError, f.read)
- def test_flush_error_on_close(self):
- f = self.open(support.TESTFN, "wb", buffering=0)
+ def check_flush_error_on_close(self, *args, **kwargs):
+ # Test that the file is closed despite failed flush
+ # and that flush() is called before file closed.
+ f = self.open(*args, **kwargs)
+ closed = []
def bad_flush():
+ closed[:] = [f.closed]
raise OSError()
f.flush = bad_flush
self.assertRaises(OSError, f.close) # exception not swallowed
self.assertTrue(f.closed)
+ self.assertTrue(closed) # flush() called
+ self.assertFalse(closed[0]) # flush() called before file closed
+ f.flush = lambda: None # break reference loop
+
+ def test_flush_error_on_close(self):
+ # raw file
+ # Issue #5700: io.FileIO calls flush() after file closed
+ self.check_flush_error_on_close(support.TESTFN, 'wb', buffering=0)
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'wb', buffering=0)
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
+ os.close(fd)
+ # buffered io
+ self.check_flush_error_on_close(support.TESTFN, 'wb')
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'wb')
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'wb', closefd=False)
+ os.close(fd)
+ # text io
+ self.check_flush_error_on_close(support.TESTFN, 'w')
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'w')
+ fd = os.open(support.TESTFN, os.O_WRONLY|os.O_CREAT)
+ self.check_flush_error_on_close(fd, 'w', closefd=False)
+ os.close(fd)
def test_multi_close(self):
f = self.open(support.TESTFN, "wb", buffering=0)
@@ -788,13 +819,22 @@ class CommonBufferedTests:
self.assertEqual(repr(b), "<%s name=b'dummy'>" % clsname)
def test_flush_error_on_close(self):
+ # Test that buffered file is closed despite failed flush
+ # and that flush() is called before file closed.
raw = self.MockRawIO()
+ closed = []
def bad_flush():
+ closed[:] = [b.closed, raw.closed]
raise OSError()
raw.flush = bad_flush
b = self.tp(raw)
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
+ self.assertTrue(raw.closed)
+ self.assertTrue(closed) # flush() called
+ self.assertFalse(closed[0]) # flush() called before file closed
+ self.assertFalse(closed[1])
+ raw.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
raw = self.MockRawIO()
@@ -2618,12 +2658,21 @@ class TextIOWrapperTest(unittest.TestCase):
self.assertEqual(content.count("Thread%03d\n" % n), 1)
def test_flush_error_on_close(self):
+ # Test that text file is closed despite failed flush
+ # and that flush() is called before file closed.
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
+ closed = []
def bad_flush():
+ closed[:] = [txt.closed, txt.buffer.closed]
raise OSError()
txt.flush = bad_flush
self.assertRaises(OSError, txt.close) # exception not swallowed
self.assertTrue(txt.closed)
+ self.assertTrue(txt.buffer.closed)
+ self.assertTrue(closed) # flush() called
+ self.assertFalse(closed[0]) # flush() called before file closed
+ self.assertFalse(closed[1])
+ txt.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
buffer = self.BytesIO(self.testdata)
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
index e979753879..9369a2531e 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
@@ -511,7 +511,7 @@ class TestMiscellaneous(unittest.TestCase):
self.skipTest('test needs Turkish locale')
loc = locale.getlocale(locale.LC_CTYPE)
if verbose:
- print('got locale %a' % (loc,))
+ print('testing with %a' % (loc,), end=' ', flush=True)
locale.setlocale(locale.LC_CTYPE, loc)
self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE))
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
index ce267ddeb3..2929f988a8 100644
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -44,6 +44,13 @@ class Test_MultibyteCodec(unittest.TestCase):
self.assertRaises(IndexError, dec,
b'apple\x92ham\x93spam', 'test.cjktest')
+ def test_errorcallback_custom_ignore(self):
+ # Issue #23215: MemoryError with custom error handlers and multibyte codecs
+ data = 100 * "\udc00"
+ codecs.register_error("test.ignore", codecs.ignore_errors)
+ for enc in ALL_CJKENCODINGS:
+ self.assertEqual(data.encode(enc, "test.ignore"), b'')
+
def test_codingspec(self):
try:
for enc in ALL_CJKENCODINGS:
diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py
index ab88c340bf..11420e2c17 100644
--- a/Lib/test/test_pathlib.py
+++ b/Lib/test/test_pathlib.py
@@ -105,31 +105,35 @@ class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
check = self._check_parse_parts
# First part is anchored
check(['c:'], ('c:', '', ['c:']))
- check(['c:\\'], ('c:', '\\', ['c:\\']))
- check(['\\'], ('', '\\', ['\\']))
+ check(['c:/'], ('c:', '\\', ['c:\\']))
+ check(['/'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
- check(['c:\\a'], ('c:', '\\', ['c:\\', 'a']))
- check(['\\a'], ('', '\\', ['\\', 'a']))
+ check(['c:/a'], ('c:', '\\', ['c:\\', 'a']))
+ check(['/a'], ('', '\\', ['\\', 'a']))
# UNC paths
- check(['\\\\a\\b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
- check(['\\\\a\\b\\'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
- check(['\\\\a\\b\\c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
+ check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+ check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
+ check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
- check(['a', 'Z:\\b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
- check(['a', '\\b', 'c'], ('', '\\', ['\\', 'b', 'c']))
+ check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
# UNC paths
- check(['a', '\\\\b\\c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+ check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes
- check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
+ check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths
- check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
+ check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths
- check(['\\\\?\\c:\\'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
- check(['\\\\?\\c:\\a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
+ check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
+ check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
+ check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
# Extended UNC paths (format is "\\?\UNC\server\share")
- check(['\\\\?\\UNC\\b\\c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
- check(['\\\\?\\UNC\\b\\c\\d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
+ check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
+ check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
+ # Second part has a root but not drive
+ check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c']))
+ check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
+ check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
def test_splitroot(self):
f = self.flavour.splitroot
diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py
index 8bf9b20ce9..6a44c22585 100644
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -3,12 +3,15 @@ import sys
import builtins
import contextlib
import difflib
+import importlib.util
import inspect
import pydoc
+import py_compile
import keyword
import _pickle
import pkgutil
import re
+import stat
import string
import test.support
import time
@@ -32,6 +35,10 @@ try:
except ImportError:
threading = None
+class nonascii:
+ 'Це не латиниця'
+ pass
+
if test.support.HAVE_DOCSTRINGS:
expected_data_docstrings = (
'dictionary for instance variables (if defined)',
@@ -471,6 +478,11 @@ class PydocDocTest(unittest.TestCase):
self.assertEqual(expected, result,
"documentation for missing module found")
+ def test_not_ascii(self):
+ result = run_pydoc('test.test_pydoc.nonascii', PYTHONIOENCODING='ascii')
+ encoded = nonascii.__doc__.encode('ascii', 'backslashreplace')
+ self.assertIn(encoded, result)
+
def test_input_strip(self):
missing_module = " test.i_am_not_here "
result = str(run_pydoc(missing_module), 'ascii')
@@ -557,6 +569,18 @@ class PydocDocTest(unittest.TestCase):
self.assertEqual(synopsis, expected)
+ def test_synopsis_sourceless_empty_doc(self):
+ with test.support.temp_cwd() as test_dir:
+ init_path = os.path.join(test_dir, 'foomod42.py')
+ cached_path = importlib.util.cache_from_source(init_path)
+ with open(init_path, 'w') as fobj:
+ fobj.write("foo = 1")
+ py_compile.compile(init_path)
+ synopsis = pydoc.synopsis(init_path, {})
+ self.assertIsNone(synopsis)
+ synopsis_cached = pydoc.synopsis(cached_path, {})
+ self.assertIsNone(synopsis_cached)
+
def test_splitdoc_with_description(self):
example_string = "I Am A Doc\n\n\nHere is my description"
self.assertEqual(pydoc.splitdoc(example_string),
@@ -612,6 +636,7 @@ class PydocImportTest(PydocBaseTest):
def setUp(self):
self.test_dir = os.mkdir(TESTFN)
self.addCleanup(rmtree, TESTFN)
+ importlib.invalidate_caches()
def test_badimport(self):
# This tests the fix for issue 5230, where if pydoc found the module
@@ -670,6 +695,22 @@ class PydocImportTest(PydocBaseTest):
self.assertEqual(out.getvalue(), '')
self.assertEqual(err.getvalue(), '')
+ def test_apropos_empty_doc(self):
+ pkgdir = os.path.join(TESTFN, 'walkpkg')
+ os.mkdir(pkgdir)
+ self.addCleanup(rmtree, pkgdir)
+ init_path = os.path.join(pkgdir, '__init__.py')
+ with open(init_path, 'w') as fobj:
+ fobj.write("foo = 1")
+ current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode)
+ try:
+ os.chmod(pkgdir, current_mode & ~stat.S_IEXEC)
+ with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout:
+ pydoc.apropos('')
+ self.assertIn('walkpkg', stdout.getvalue())
+ finally:
+ os.chmod(pkgdir, current_mode)
+
@unittest.skip('causes undesireable side-effects (#20128)')
def test_modules(self):
# See Helper.listmodules().
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index d2547d4a96..7348af3f1a 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -557,7 +557,7 @@ class ReTests(unittest.TestCase):
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
- def test_non_consuming(self):
+ def test_lookahead(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
@@ -571,6 +571,37 @@ class ReTests(unittest.TestCase):
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
+ # Group reference.
+ self.assertTrue(re.match(r'(a)b(?=\1)a', 'aba'))
+ self.assertIsNone(re.match(r'(a)b(?=\1)c', 'abac'))
+ # Named group reference.
+ self.assertTrue(re.match(r'(?P<g>a)b(?=(?P=g))a', 'aba'))
+ self.assertIsNone(re.match(r'(?P<g>a)b(?=(?P=g))c', 'abac'))
+ # Conditional group reference.
+ self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
+ self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(2)c|x))c', 'abc'))
+ self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(2)x|c))c', 'abc'))
+ self.assertIsNone(re.match(r'(?:(a)|(x))b(?=(?(1)b|x))c', 'abc'))
+ self.assertTrue(re.match(r'(?:(a)|(x))b(?=(?(1)c|x))c', 'abc'))
+ # Group used before defined.
+ self.assertTrue(re.match(r'(a)b(?=(?(2)x|c))(c)', 'abc'))
+ self.assertIsNone(re.match(r'(a)b(?=(?(2)b|x))(c)', 'abc'))
+ self.assertTrue(re.match(r'(a)b(?=(?(1)c|x))(c)', 'abc'))
+
+ def test_lookbehind(self):
+ self.assertTrue(re.match(r'ab(?<=b)c', 'abc'))
+ self.assertIsNone(re.match(r'ab(?<=c)c', 'abc'))
+ self.assertIsNone(re.match(r'ab(?<!b)c', 'abc'))
+ self.assertTrue(re.match(r'ab(?<!c)c', 'abc'))
+ # Group reference.
+ self.assertWarns(RuntimeWarning, re.compile, r'(a)a(?<=\1)c')
+ # Named group reference.
+ self.assertWarns(RuntimeWarning, re.compile, r'(?P<g>a)a(?<=(?P=g))c')
+ # Conditional group reference.
+ self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(1)b|x))c')
+ # Group used before defined.
+ self.assertWarns(RuntimeWarning, re.compile, r'(a)b(?<=(?(2)b|x))(c)')
+
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC")
diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py
index f1da21ed2b..758a481fe7 100644
--- a/Lib/test/test_statistics.py
+++ b/Lib/test/test_statistics.py
@@ -9,7 +9,6 @@ import doctest
import math
import random
import sys
-import types
import unittest
from decimal import Decimal
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 538111564a..08af71faad 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -1922,7 +1922,7 @@ class POSIXProcessTestCase(BaseTestCase):
open_fds = set(fds)
# add a bunch more fds
for _ in range(9):
- fd = os.open("/dev/null", os.O_RDONLY)
+ fd = os.open(os.devnull, os.O_RDONLY)
self.addCleanup(os.close, fd)
open_fds.add(fd)
@@ -1984,7 +1984,7 @@ class POSIXProcessTestCase(BaseTestCase):
open_fds = set()
# Add a bunch more fds to pass down.
for _ in range(40):
- fd = os.open("/dev/null", os.O_RDONLY)
+ fd = os.open(os.devnull, os.O_RDONLY)
open_fds.add(fd)
# Leave a two pairs of low ones available for use by the
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index e527e403fa..c135304f40 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -1994,6 +1994,21 @@ class CommandLineTest(unittest.TestCase):
finally:
support.unlink(tar_name)
+ def test_create_command_compressed(self):
+ files = [support.findfile('tokenize_tests.txt'),
+ support.findfile('tokenize_tests-no-coding-cookie-'
+ 'and-utf8-bom-sig-only.txt')]
+ for filetype in (GzipTest, Bz2Test, LzmaTest):
+ if not filetype.open:
+ continue
+ try:
+ tar_name = tmpname + '.' + filetype.suffix
+ out = self.tarfilecmd('-c', tar_name, *files)
+ with filetype.taropen(tar_name) as tar:
+ tar.getmembers()
+ finally:
+ support.unlink(tar_name)
+
def test_extract_command(self):
self.make_simple_tarfile(tmpname)
for opt in '-e', '--extract':
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index 76e32fbfbd..3d8f9bc9c7 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -648,7 +648,14 @@ class PyZipFileTests(unittest.TestCase):
if name + 'o' not in namelist:
self.assertIn(name + 'c', namelist)
+ def requiresWriteAccess(self, path):
+ # effective_ids unavailable on windows
+ if not os.access(path, os.W_OK,
+ effective_ids=os.access in os.supports_effective_ids):
+ self.skipTest('requires write access to the installed location')
+
def test_write_pyfile(self):
+ self.requiresWriteAccess(os.path.dirname(__file__))
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
fn = __file__
if fn.endswith('.pyc') or fn.endswith('.pyo'):
@@ -680,6 +687,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_python_package(self):
import email
packagedir = os.path.dirname(email.__file__)
+ self.requiresWriteAccess(packagedir)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
zipfp.writepy(packagedir)
@@ -693,6 +701,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_filtered_python_package(self):
import test
packagedir = os.path.dirname(test.__file__)
+ self.requiresWriteAccess(packagedir)
with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp:
@@ -721,6 +730,7 @@ class PyZipFileTests(unittest.TestCase):
def test_write_with_optimization(self):
import email
packagedir = os.path.dirname(email.__file__)
+ self.requiresWriteAccess(packagedir)
# use .pyc if running test in optimization mode,
# use .pyo if running test in debug mode
optlevel = 1 if __debug__ else 0