mirror of
https://github.com/jart/cosmopolitan.git
synced 2025-05-22 21:32:31 +00:00
Improve Python tree-shaking
This commit is contained in:
parent
5bb2275788
commit
4f41f2184d
169 changed files with 4182 additions and 2411 deletions
28
third_party/python/Lib/_pyio.py
vendored
28
third_party/python/Lib/_pyio.py
vendored
|
@ -19,7 +19,33 @@ else:
|
|||
_setmode = None
|
||||
|
||||
import io
|
||||
from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END)
|
||||
from io import (SEEK_SET, SEEK_CUR, SEEK_END)
|
||||
|
||||
__all__ = [
|
||||
'BlockingIOError',
|
||||
'BufferedIOBase',
|
||||
'BufferedRWPair',
|
||||
'BufferedRandom',
|
||||
'BufferedReader',
|
||||
'BufferedWriter',
|
||||
'BytesIO',
|
||||
'DEFAULT_BUFFER_SIZE',
|
||||
'FileIO',
|
||||
'IOBase',
|
||||
'IncrementalNewlineDecoder',
|
||||
'OpenWrapper',
|
||||
'RawIOBase',
|
||||
'SEEK_CUR',
|
||||
'SEEK_END',
|
||||
'SEEK_SET',
|
||||
'StringIO',
|
||||
'TextIOBase',
|
||||
'TextIOWrapper',
|
||||
'UnsupportedOperation',
|
||||
'_io',
|
||||
'abc',
|
||||
'open',
|
||||
]
|
||||
|
||||
valid_seek_flags = {0, 1, 2} # Hardwired values
|
||||
if hasattr(os, 'SEEK_HOLE') :
|
||||
|
|
5
third_party/python/Lib/codecs.py
vendored
5
third_party/python/Lib/codecs.py
vendored
|
@ -11,10 +11,7 @@ import builtins, sys
|
|||
|
||||
### Registry and builtin stateless codec functions
|
||||
|
||||
try:
|
||||
from _codecs import *
|
||||
except ImportError as why:
|
||||
raise SystemError('Failed to load the builtin codecs: %s' % why)
|
||||
from _codecs import _forget_codec, ascii_decode, ascii_encode, charmap_build, charmap_decode, charmap_encode, decode, encode, escape_decode, escape_encode, latin_1_decode, latin_1_encode, lookup, lookup_error, raw_unicode_escape_decode, raw_unicode_escape_encode, readbuffer_encode, register, register_error, unicode_escape_decode, unicode_escape_encode, unicode_internal_decode, unicode_internal_encode, utf_16_be_decode, utf_16_be_encode, utf_16_decode, utf_16_encode, utf_16_ex_decode, utf_16_le_decode, utf_16_le_encode, utf_32_be_decode, utf_32_be_encode, utf_32_decode, utf_32_encode, utf_32_ex_decode, utf_32_le_decode, utf_32_le_encode, utf_7_decode, utf_7_encode, utf_8_decode, utf_8_encode
|
||||
|
||||
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
|
||||
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
|
||||
|
|
14
third_party/python/Lib/collections/__init__.py
vendored
14
third_party/python/Lib/collections/__init__.py
vendored
|
@ -15,13 +15,21 @@ list, set, and tuple.
|
|||
'''
|
||||
|
||||
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
|
||||
'UserString', 'Counter', 'OrderedDict', 'ChainMap']
|
||||
'UserString', 'Counter', 'OrderedDict', 'ChainMap',
|
||||
'Awaitable', 'Coroutine',
|
||||
'AsyncIterable', 'AsyncIterator', 'AsyncGenerator',
|
||||
'Hashable', 'Iterable', 'Iterator', 'Generator', 'Reversible',
|
||||
'Sized', 'Container', 'Callable', 'Collection',
|
||||
'Set', 'MutableSet',
|
||||
'Mapping', 'MutableMapping',
|
||||
'MappingView', 'KeysView', 'ItemsView', 'ValuesView',
|
||||
'Sequence', 'MutableSequence',
|
||||
'ByteString']
|
||||
|
||||
# For backwards compatibility, continue to make the collections ABCs
|
||||
# available through the collections module.
|
||||
from _collections_abc import *
|
||||
from _collections_abc import ABCMeta, AsyncGenerator, AsyncIterable, AsyncIterator, Awaitable, ByteString, Callable, Collection, Container, Coroutine, Generator, Hashable, ItemsView, Iterable, Iterator, KeysView, Mapping, MappingView, MutableMapping, MutableSequence, MutableSet, Reversible, Sequence, Set, Sized, ValuesView, _check_methods, abstractmethod, async_generator, bytearray_iterator, bytes_iterator, coroutine, dict_itemiterator, dict_items, dict_keyiterator, dict_keys, dict_valueiterator, dict_values, generator, list_iterator, list_reverseiterator, longrange_iterator, mappingproxy, range_iterator, set_iterator, str_iterator, sys, tuple_iterator, zip_iterator
|
||||
import _collections_abc
|
||||
__all__ += _collections_abc.__all__
|
||||
|
||||
from operator import itemgetter as _itemgetter, eq as _eq
|
||||
from keyword import iskeyword as _iskeyword
|
||||
|
|
29
third_party/python/Lib/collections/abc.py
vendored
29
third_party/python/Lib/collections/abc.py
vendored
|
@ -1,2 +1,27 @@
|
|||
from _collections_abc import *
|
||||
from _collections_abc import __all__
|
||||
from _collections_abc import (
|
||||
Awaitable,
|
||||
Coroutine,
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
AsyncGenerator,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Generator,
|
||||
Reversible,
|
||||
Sized,
|
||||
Container,
|
||||
Callable,
|
||||
Collection,
|
||||
Set,
|
||||
MutableSet,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
MappingView,
|
||||
KeysView,
|
||||
ItemsView,
|
||||
ValuesView,
|
||||
Sequence,
|
||||
MutableSequence,
|
||||
ByteString,
|
||||
)
|
||||
|
|
21
third_party/python/Lib/decimal.py
vendored
21
third_party/python/Lib/decimal.py
vendored
|
@ -1,11 +1,10 @@
|
|||
|
||||
try:
|
||||
from _decimal import *
|
||||
from _decimal import __doc__
|
||||
from _decimal import __version__
|
||||
from _decimal import __libmpdec_version__
|
||||
except ImportError:
|
||||
from _pydecimal import *
|
||||
from _pydecimal import __doc__
|
||||
from _pydecimal import __version__
|
||||
from _pydecimal import __libmpdec_version__
|
||||
# try:
|
||||
from _decimal import BasicContext, Clamped, Context, ConversionSyntax, Decimal, DecimalException, DecimalTuple, DefaultContext, DivisionByZero, DivisionImpossible, DivisionUndefined, ExtendedContext, FloatOperation, HAVE_THREADS, Inexact, InvalidContext, InvalidOperation, MAX_EMAX, MAX_PREC, MIN_EMIN, MIN_ETINY, Overflow, ROUND_05UP, ROUND_CEILING, ROUND_DOWN, ROUND_FLOOR, ROUND_HALF_DOWN, ROUND_HALF_EVEN, ROUND_HALF_UP, ROUND_UP, Rounded, Subnormal, Underflow, getcontext, localcontext, setcontext
|
||||
from _decimal import __doc__
|
||||
from _decimal import __version__
|
||||
from _decimal import __libmpdec_version__
|
||||
# except ImportError:
|
||||
# from _pydecimal import *
|
||||
# from _pydecimal import __doc__
|
||||
# from _pydecimal import __version__
|
||||
# from _pydecimal import __libmpdec_version__
|
||||
|
|
9
third_party/python/Lib/difflib.py
vendored
9
third_party/python/Lib/difflib.py
vendored
|
@ -2090,8 +2090,13 @@ def restore(delta, which):
|
|||
yield line[2:]
|
||||
|
||||
def _test():
|
||||
import doctest, difflib
|
||||
return doctest.testmod(difflib)
|
||||
import sys
|
||||
try:
|
||||
import doctest, difflib
|
||||
except ImportError:
|
||||
sys.exit(1)
|
||||
print(doctest.testmod(difflib))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_test()
|
||||
|
|
7
third_party/python/Lib/dis.py
vendored
7
third_party/python/Lib/dis.py
vendored
|
@ -6,12 +6,13 @@ import collections
|
|||
import io
|
||||
|
||||
from opcode import *
|
||||
from opcode import __all__ as _opcodes_all
|
||||
|
||||
__all__ = ["code_info", "dis", "disassemble", "distb", "disco",
|
||||
"findlinestarts", "findlabels", "show_code",
|
||||
"get_instructions", "Instruction", "Bytecode"] + _opcodes_all
|
||||
del _opcodes_all
|
||||
"get_instructions", "Instruction", "Bytecode",
|
||||
"cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
|
||||
"haslocal", "hascompare", "hasfree", "opname", "opmap",
|
||||
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs", 'stack_effect']
|
||||
|
||||
_have_code = (types.MethodType, types.FunctionType, types.CodeType,
|
||||
classmethod, staticmethod, type)
|
||||
|
|
5
third_party/python/Lib/distutils/util.py
vendored
5
third_party/python/Lib/distutils/util.py
vendored
|
@ -488,7 +488,10 @@ def run_2to3(files, fixer_names=None, options=None, explicit=None):
|
|||
return
|
||||
|
||||
# Make this class local, to delay import of 2to3
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
try:
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
except ImportError:
|
||||
raise
|
||||
class DistutilsRefactoringTool(RefactoringTool):
|
||||
def log_error(self, msg, *args, **kw):
|
||||
log.error(msg, *args)
|
||||
|
|
24
third_party/python/Lib/dummy_threading.py
vendored
24
third_party/python/Lib/dummy_threading.py
vendored
|
@ -76,3 +76,27 @@ finally:
|
|||
|
||||
del _dummy_thread
|
||||
del sys_modules
|
||||
|
||||
Barrier = Barrier
|
||||
BoundedSemaphore = BoundedSemaphore
|
||||
BrokenBarrierError = BrokenBarrierError
|
||||
Condition = Condition
|
||||
Event = Event
|
||||
Lock = Lock
|
||||
RLock = RLock
|
||||
Semaphore = Semaphore
|
||||
TIMEOUT_MAX = TIMEOUT_MAX
|
||||
Thread = Thread
|
||||
ThreadError = ThreadError
|
||||
Timer = Timer
|
||||
WeakSet = WeakSet
|
||||
activeCount = activeCount
|
||||
active_count = active_count
|
||||
currentThread = currentThread
|
||||
current_thread = current_thread
|
||||
get_ident = get_ident
|
||||
local = local
|
||||
main_thread = main_thread
|
||||
setprofile = setprofile
|
||||
settrace = settrace
|
||||
stack_size = stack_size
|
||||
|
|
2
third_party/python/Lib/email/message.py
vendored
2
third_party/python/Lib/email/message.py
vendored
|
@ -17,6 +17,7 @@ from email import errors
|
|||
from email._policybase import Policy, compat32
|
||||
from email import charset as _charset
|
||||
from email._encoded_words import decode_b
|
||||
from email.iterators import walk
|
||||
Charset = _charset.Charset
|
||||
|
||||
SEMISPACE = '; '
|
||||
|
@ -939,7 +940,6 @@ class Message:
|
|||
return c_d
|
||||
|
||||
# I.e. def walk(self): ...
|
||||
from email.iterators import walk
|
||||
|
||||
|
||||
class MIMEPart(Message):
|
||||
|
|
13
third_party/python/Lib/encodings/__init__.py
vendored
13
third_party/python/Lib/encodings/__init__.py
vendored
|
@ -154,16 +154,3 @@ def search_function(encoding):
|
|||
|
||||
# Register the search_function in the Python codec registry
|
||||
codecs.register(search_function)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
def _alias_mbcs(encoding):
|
||||
try:
|
||||
import _bootlocale
|
||||
if encoding == _bootlocale.getpreferredencoding(False):
|
||||
import encodings.mbcs
|
||||
return encodings.mbcs.getregentry()
|
||||
except ImportError:
|
||||
# Imports may fail while we are shutting down
|
||||
pass
|
||||
|
||||
codecs.register(_alias_mbcs)
|
||||
|
|
205
third_party/python/Lib/ensurepip/__init__.py
vendored
205
third_party/python/Lib/ensurepip/__init__.py
vendored
|
@ -1,205 +0,0 @@
|
|||
import os
|
||||
import os.path
|
||||
import pkgutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
|
||||
|
||||
_SETUPTOOLS_VERSION = "40.6.2"
|
||||
|
||||
_PIP_VERSION = "18.1"
|
||||
|
||||
_PROJECTS = [
|
||||
("setuptools", _SETUPTOOLS_VERSION),
|
||||
("pip", _PIP_VERSION),
|
||||
]
|
||||
|
||||
|
||||
def _run_pip(args, additional_paths=None):
|
||||
# Add our bundled software to the sys.path so we can import it
|
||||
if additional_paths is not None:
|
||||
sys.path = additional_paths + sys.path
|
||||
|
||||
# Install the bundled software
|
||||
import pip._internal
|
||||
return pip._internal.main(args)
|
||||
|
||||
|
||||
def version():
|
||||
"""
|
||||
Returns a string specifying the bundled version of pip.
|
||||
"""
|
||||
return _PIP_VERSION
|
||||
|
||||
def _disable_pip_configuration_settings():
|
||||
# We deliberately ignore all pip environment variables
|
||||
# when invoking pip
|
||||
# See http://bugs.python.org/issue19734 for details
|
||||
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
|
||||
for k in keys_to_remove:
|
||||
del os.environ[k]
|
||||
# We also ignore the settings in the default pip configuration file
|
||||
# See http://bugs.python.org/issue20053 for details
|
||||
os.environ['PIP_CONFIG_FILE'] = os.devnull
|
||||
|
||||
|
||||
def bootstrap(*, root=None, upgrade=False, user=False,
|
||||
altinstall=False, default_pip=False,
|
||||
verbosity=0):
|
||||
"""
|
||||
Bootstrap pip into the current Python installation (or the given root
|
||||
directory).
|
||||
|
||||
Note that calling this function will alter both sys.path and os.environ.
|
||||
"""
|
||||
# Discard the return value
|
||||
_bootstrap(root=root, upgrade=upgrade, user=user,
|
||||
altinstall=altinstall, default_pip=default_pip,
|
||||
verbosity=verbosity)
|
||||
|
||||
|
||||
def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
altinstall=False, default_pip=False,
|
||||
verbosity=0):
|
||||
"""
|
||||
Bootstrap pip into the current Python installation (or the given root
|
||||
directory). Returns pip command status code.
|
||||
|
||||
Note that calling this function will alter both sys.path and os.environ.
|
||||
"""
|
||||
if altinstall and default_pip:
|
||||
raise ValueError("Cannot use altinstall and default_pip together")
|
||||
|
||||
_disable_pip_configuration_settings()
|
||||
|
||||
# By default, installing pip and setuptools installs all of the
|
||||
# following scripts (X.Y == running Python version):
|
||||
#
|
||||
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
|
||||
#
|
||||
# pip 1.5+ allows ensurepip to request that some of those be left out
|
||||
if altinstall:
|
||||
# omit pip, pipX and easy_install
|
||||
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
|
||||
elif not default_pip:
|
||||
# omit pip and easy_install
|
||||
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Put our bundled wheels into a temporary directory and construct the
|
||||
# additional paths that need added to sys.path
|
||||
additional_paths = []
|
||||
for project, version in _PROJECTS:
|
||||
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
whl = pkgutil.get_data(
|
||||
"ensurepip",
|
||||
"_bundled/{}".format(wheel_name),
|
||||
)
|
||||
with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
fp.write(whl)
|
||||
|
||||
additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
|
||||
# Construct the arguments to be passed to the pip command
|
||||
args = ["install", "--no-index", "--find-links", tmpdir]
|
||||
if root:
|
||||
args += ["--root", root]
|
||||
if upgrade:
|
||||
args += ["--upgrade"]
|
||||
if user:
|
||||
args += ["--user"]
|
||||
if verbosity:
|
||||
args += ["-" + "v" * verbosity]
|
||||
|
||||
return _run_pip(args + [p[0] for p in _PROJECTS], additional_paths)
|
||||
|
||||
def _uninstall_helper(*, verbosity=0):
|
||||
"""Helper to support a clean default uninstall process on Windows
|
||||
|
||||
Note that calling this function may alter os.environ.
|
||||
"""
|
||||
# Nothing to do if pip was never installed, or has been removed
|
||||
try:
|
||||
import pip
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
# If the pip version doesn't match the bundled one, leave it alone
|
||||
if pip.__version__ != _PIP_VERSION:
|
||||
msg = ("ensurepip will only uninstall a matching version "
|
||||
"({!r} installed, {!r} bundled)")
|
||||
print(msg.format(pip.__version__, _PIP_VERSION), file=sys.stderr)
|
||||
return
|
||||
|
||||
_disable_pip_configuration_settings()
|
||||
|
||||
# Construct the arguments to be passed to the pip command
|
||||
args = ["uninstall", "-y", "--disable-pip-version-check"]
|
||||
if verbosity:
|
||||
args += ["-" + "v" * verbosity]
|
||||
|
||||
return _run_pip(args + [p[0] for p in reversed(_PROJECTS)])
|
||||
|
||||
|
||||
def _main(argv=None):
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(prog="python -m ensurepip")
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version="pip {}".format(version()),
|
||||
help="Show the version of pip that is bundled with this Python.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action="count",
|
||||
default=0,
|
||||
dest="verbosity",
|
||||
help=("Give more output. Option is additive, and can be used up to 3 "
|
||||
"times."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-U", "--upgrade",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Upgrade pip and dependencies, even if already installed.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Install using the user scheme.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--root",
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root directory.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--altinstall",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Make an alternate install, installing only the X.Y versioned "
|
||||
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--default-pip",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Make a default pip install, installing the unqualified pip "
|
||||
"and easy_install in addition to the versioned scripts."),
|
||||
)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
return _bootstrap(
|
||||
root=args.root,
|
||||
upgrade=args.upgrade,
|
||||
user=args.user,
|
||||
verbosity=args.verbosity,
|
||||
altinstall=args.altinstall,
|
||||
default_pip=args.default_pip,
|
||||
)
|
5
third_party/python/Lib/ensurepip/__main__.py
vendored
5
third_party/python/Lib/ensurepip/__main__.py
vendored
|
@ -1,5 +0,0 @@
|
|||
import ensurepip
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(ensurepip._main())
|
Binary file not shown.
Binary file not shown.
31
third_party/python/Lib/ensurepip/_uninstall.py
vendored
31
third_party/python/Lib/ensurepip/_uninstall.py
vendored
|
@ -1,31 +0,0 @@
|
|||
"""Basic pip uninstallation support, helper for the Windows uninstaller"""
|
||||
|
||||
import argparse
|
||||
import ensurepip
|
||||
import sys
|
||||
|
||||
|
||||
def _main(argv=None):
|
||||
parser = argparse.ArgumentParser(prog="python -m ensurepip._uninstall")
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version="pip {}".format(ensurepip.version()),
|
||||
help="Show the version of pip this will attempt to uninstall.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action="count",
|
||||
default=0,
|
||||
dest="verbosity",
|
||||
help=("Give more output. Option is additive, and can be used up to 3 "
|
||||
"times."),
|
||||
)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
return ensurepip._uninstall_helper(verbosity=args.verbosity)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(_main())
|
23
third_party/python/Lib/hashlib.py
vendored
23
third_party/python/Lib/hashlib.py
vendored
|
@ -66,7 +66,6 @@ algorithms_available = set(__always_supported)
|
|||
__all__ = __always_supported + ('new', 'algorithms_guaranteed',
|
||||
'algorithms_available', 'pbkdf2_hmac')
|
||||
|
||||
|
||||
__builtin_constructor_cache = {}
|
||||
|
||||
def __get_builtin_constructor(name):
|
||||
|
@ -223,16 +222,20 @@ except ImportError:
|
|||
pass
|
||||
|
||||
|
||||
for __func_name in __always_supported:
|
||||
# try them all, some may not work due to the OpenSSL
|
||||
# version not supporting that algorithm.
|
||||
try:
|
||||
globals()[__func_name] = __get_hash(__func_name)
|
||||
except ValueError:
|
||||
import logging
|
||||
logging.exception('code for hash %s was not found.', __func_name)
|
||||
md5 = __get_hash('md5')
|
||||
sha1 = __get_hash('sha1')
|
||||
sha224 = __get_hash('sha224')
|
||||
sha256 = __get_hash('sha256')
|
||||
sha384 = __get_hash('sha384')
|
||||
sha512 = __get_hash('sha512')
|
||||
sha3_224 = __get_hash('sha3_224')
|
||||
sha3_256 = __get_hash('sha3_256')
|
||||
sha3_384 = __get_hash('sha3_384')
|
||||
sha3_512 = __get_hash('sha3_512')
|
||||
shake_128 = __get_hash('shake_128')
|
||||
shake_256 = __get_hash('shake_256')
|
||||
|
||||
|
||||
# Cleanup locals()
|
||||
del __always_supported, __func_name, __get_hash
|
||||
del __always_supported, __get_hash
|
||||
del __py_new, __hash_new, __get_openssl_constructor
|
||||
|
|
8
third_party/python/Lib/heapq.py
vendored
8
third_party/python/Lib/heapq.py
vendored
|
@ -600,8 +600,10 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
import doctest
|
||||
import sys
|
||||
try:
|
||||
import doctest
|
||||
except ImportError:
|
||||
sys.exit(1)
|
||||
print(doctest.testmod())
|
||||
|
|
1
third_party/python/Lib/http/client.py
vendored
1
third_party/python/Lib/http/client.py
vendored
|
@ -77,6 +77,7 @@ import re
|
|||
import socket
|
||||
import collections
|
||||
from urllib.parse import urlsplit
|
||||
from encodings import idna, iso8859_1
|
||||
|
||||
# HTTPMessage, parse_headers(), and the HTTP status code constants are
|
||||
# intentionally omitted for simplicity
|
||||
|
|
1
third_party/python/Lib/http/server.py
vendored
1
third_party/python/Lib/http/server.py
vendored
|
@ -105,6 +105,7 @@ import copy
|
|||
import argparse
|
||||
|
||||
from http import HTTPStatus
|
||||
from encodings import idna, iso8859_1
|
||||
|
||||
|
||||
# Default error message template
|
||||
|
|
25
third_party/python/Lib/inspect.py
vendored
25
third_party/python/Lib/inspect.py
vendored
|
@ -33,7 +33,6 @@ __author__ = ('Ka-Ping Yee <ping@lfw.org>',
|
|||
|
||||
import abc
|
||||
import ast
|
||||
import dis
|
||||
import collections.abc
|
||||
import enum
|
||||
import importlib.machinery
|
||||
|
@ -51,11 +50,17 @@ import builtins
|
|||
from operator import attrgetter
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
||||
# Create constants for the compiler flags in Include/code.h
|
||||
# We try to get them from dis to avoid duplication
|
||||
mod_dict = globals()
|
||||
for k, v in dis.COMPILER_FLAG_NAMES.items():
|
||||
mod_dict["CO_" + v] = k
|
||||
# dis.COMPILER_FLAG_NAMES
|
||||
CO_OPTIMIZED = 1
|
||||
CO_NEWLOCALS = 2
|
||||
CO_VARARGS = 4
|
||||
CO_VARKEYWORDS = 8
|
||||
CO_NESTED = 16
|
||||
CO_GENERATOR = 32
|
||||
CO_NOFREE = 64
|
||||
CO_COROUTINE = 128
|
||||
CO_ITERABLE_COROUTINE = 256
|
||||
CO_ASYNC_GENERATOR = 512
|
||||
|
||||
# See Include/object.h
|
||||
TPFLAGS_IS_ABSTRACT = 1 << 20
|
||||
|
@ -3067,9 +3072,11 @@ def signature(obj, *, follow_wrapped=True):
|
|||
|
||||
def _main():
|
||||
""" Logic for inspecting an object given at command line """
|
||||
import argparse
|
||||
import importlib
|
||||
|
||||
try:
|
||||
import argparse
|
||||
import importlib
|
||||
except ImportError:
|
||||
sys.exit(1)
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'object',
|
||||
|
|
|
@ -19,9 +19,42 @@ from . import context
|
|||
# Copy stuff from default context
|
||||
#
|
||||
|
||||
globals().update((name, getattr(context._default_context, name))
|
||||
for name in context._default_context.__all__)
|
||||
__all__ = context._default_context.__all__
|
||||
Array = context._default_context.Array
|
||||
AuthenticationError = context._default_context.AuthenticationError
|
||||
Barrier = context._default_context.Barrier
|
||||
BoundedSemaphore = context._default_context.BoundedSemaphore
|
||||
BufferTooShort = context._default_context.BufferTooShort
|
||||
Condition = context._default_context.Condition
|
||||
Event = context._default_context.Event
|
||||
JoinableQueue = context._default_context.JoinableQueue
|
||||
Lock = context._default_context.Lock
|
||||
Manager = context._default_context.Manager
|
||||
Pipe = context._default_context.Pipe
|
||||
Pool = context._default_context.Pool
|
||||
Process = context._default_context.Process
|
||||
ProcessError = context._default_context.ProcessError
|
||||
Queue = context._default_context.Queue
|
||||
RLock = context._default_context.RLock
|
||||
RawArray = context._default_context.RawArray
|
||||
RawValue = context._default_context.RawValue
|
||||
Semaphore = context._default_context.Semaphore
|
||||
SimpleQueue = context._default_context.SimpleQueue
|
||||
TimeoutError = context._default_context.TimeoutError
|
||||
Value = context._default_context.Value
|
||||
active_children = context._default_context.active_children
|
||||
allow_connection_pickling = context._default_context.allow_connection_pickling
|
||||
cpu_count = context._default_context.cpu_count
|
||||
current_process = context._default_context.current_process
|
||||
freeze_support = context._default_context.freeze_support
|
||||
get_all_start_methods = context._default_context.get_all_start_methods
|
||||
get_context = context._default_context.get_context
|
||||
get_logger = context._default_context.get_logger
|
||||
get_start_method = context._default_context.get_start_method
|
||||
log_to_stderr = context._default_context.log_to_stderr
|
||||
reducer = context._default_context.reducer
|
||||
set_executable = context._default_context.set_executable
|
||||
set_forkserver_preload = context._default_context.set_forkserver_preload
|
||||
set_start_method = context._default_context.set_start_method
|
||||
|
||||
#
|
||||
# XXX These should not really be documented or public.
|
||||
|
|
100
third_party/python/Lib/multiprocessing/context.py
vendored
100
third_party/python/Lib/multiprocessing/context.py
vendored
|
@ -120,23 +120,23 @@ class BaseContext(object):
|
|||
|
||||
def RawValue(self, typecode_or_type, *args):
|
||||
'''Returns a shared object'''
|
||||
from .sharedctypes import RawValue
|
||||
# from .sharedctypes import RawValue
|
||||
return RawValue(typecode_or_type, *args)
|
||||
|
||||
def RawArray(self, typecode_or_type, size_or_initializer):
|
||||
'''Returns a shared array'''
|
||||
from .sharedctypes import RawArray
|
||||
# from .sharedctypes import RawArray
|
||||
return RawArray(typecode_or_type, size_or_initializer)
|
||||
|
||||
def Value(self, typecode_or_type, *args, lock=True):
|
||||
'''Returns a synchronized shared object'''
|
||||
from .sharedctypes import Value
|
||||
# from .sharedctypes import Value
|
||||
return Value(typecode_or_type, *args, lock=lock,
|
||||
ctx=self.get_context())
|
||||
|
||||
def Array(self, typecode_or_type, size_or_initializer, *, lock=True):
|
||||
'''Returns a synchronized shared array'''
|
||||
from .sharedctypes import Array
|
||||
# from .sharedctypes import Array
|
||||
return Array(typecode_or_type, size_or_initializer, lock=lock,
|
||||
ctx=self.get_context())
|
||||
|
||||
|
@ -267,68 +267,48 @@ DefaultContext.__all__ = list(x for x in dir(DefaultContext) if x[0] != '_')
|
|||
# Context types for fixed start method
|
||||
#
|
||||
|
||||
if sys.platform != 'win32':
|
||||
class ForkProcess(process.BaseProcess):
|
||||
_start_method = 'fork'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_fork import Popen
|
||||
return Popen(process_obj)
|
||||
|
||||
class ForkProcess(process.BaseProcess):
|
||||
_start_method = 'fork'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_fork import Popen
|
||||
return Popen(process_obj)
|
||||
class SpawnProcess(process.BaseProcess):
|
||||
_start_method = 'spawn'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_spawn_posix import Popen
|
||||
return Popen(process_obj)
|
||||
|
||||
class SpawnProcess(process.BaseProcess):
|
||||
_start_method = 'spawn'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_spawn_posix import Popen
|
||||
return Popen(process_obj)
|
||||
class ForkServerProcess(process.BaseProcess):
|
||||
_start_method = 'forkserver'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_forkserver import Popen
|
||||
return Popen(process_obj)
|
||||
|
||||
class ForkServerProcess(process.BaseProcess):
|
||||
_start_method = 'forkserver'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_forkserver import Popen
|
||||
return Popen(process_obj)
|
||||
class ForkContext(BaseContext):
|
||||
_name = 'fork'
|
||||
Process = ForkProcess
|
||||
|
||||
class ForkContext(BaseContext):
|
||||
_name = 'fork'
|
||||
Process = ForkProcess
|
||||
class SpawnContext(BaseContext):
|
||||
_name = 'spawn'
|
||||
Process = SpawnProcess
|
||||
|
||||
class SpawnContext(BaseContext):
|
||||
_name = 'spawn'
|
||||
Process = SpawnProcess
|
||||
class ForkServerContext(BaseContext):
|
||||
_name = 'forkserver'
|
||||
Process = ForkServerProcess
|
||||
def _check_available(self):
|
||||
if not reduction.HAVE_SEND_HANDLE:
|
||||
raise ValueError('forkserver start method not available')
|
||||
|
||||
class ForkServerContext(BaseContext):
|
||||
_name = 'forkserver'
|
||||
Process = ForkServerProcess
|
||||
def _check_available(self):
|
||||
if not reduction.HAVE_SEND_HANDLE:
|
||||
raise ValueError('forkserver start method not available')
|
||||
|
||||
_concrete_contexts = {
|
||||
'fork': ForkContext(),
|
||||
'spawn': SpawnContext(),
|
||||
'forkserver': ForkServerContext(),
|
||||
}
|
||||
_default_context = DefaultContext(_concrete_contexts['fork'])
|
||||
|
||||
else:
|
||||
|
||||
class SpawnProcess(process.BaseProcess):
|
||||
_start_method = 'spawn'
|
||||
@staticmethod
|
||||
def _Popen(process_obj):
|
||||
from .popen_spawn_win32 import Popen
|
||||
return Popen(process_obj)
|
||||
|
||||
class SpawnContext(BaseContext):
|
||||
_name = 'spawn'
|
||||
Process = SpawnProcess
|
||||
|
||||
_concrete_contexts = {
|
||||
'spawn': SpawnContext(),
|
||||
}
|
||||
_default_context = DefaultContext(_concrete_contexts['spawn'])
|
||||
_concrete_contexts = {
|
||||
'fork': ForkContext(),
|
||||
'spawn': SpawnContext(),
|
||||
'forkserver': ForkServerContext(),
|
||||
}
|
||||
_default_context = DefaultContext(_concrete_contexts['fork'])
|
||||
|
||||
#
|
||||
# Force the start method
|
||||
|
|
10
third_party/python/Lib/opcode.py
vendored
10
third_party/python/Lib/opcode.py
vendored
|
@ -6,7 +6,7 @@ operate on bytecodes (e.g. peephole optimizers).
|
|||
|
||||
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
|
||||
"haslocal", "hascompare", "hasfree", "opname", "opmap",
|
||||
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
|
||||
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs", 'stack_effect']
|
||||
|
||||
# It's a chicken-and-egg I'm afraid:
|
||||
# We're imported before _opcode's made.
|
||||
|
@ -15,14 +15,10 @@ __all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
|
|||
# Both our chickens and eggs are allayed.
|
||||
# --Larry Hastings, 2013/11/23
|
||||
|
||||
try:
|
||||
from _opcode import stack_effect
|
||||
__all__.append('stack_effect')
|
||||
except ImportError:
|
||||
pass
|
||||
from _opcode import stack_effect
|
||||
|
||||
cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
|
||||
'is not', 'exception match', 'BAD')
|
||||
'is not', 'exception match', 'BAD')
|
||||
|
||||
hasconst = []
|
||||
hasname = []
|
||||
|
|
426
third_party/python/Lib/os.py
vendored
426
third_party/python/Lib/os.py
vendored
|
@ -32,7 +32,7 @@ _names = sys.builtin_module_names
|
|||
__all__ = ["altsep", "curdir", "pardir", "sep", "pathsep", "linesep",
|
||||
"defpath", "name", "path", "devnull", "SEEK_SET", "SEEK_CUR",
|
||||
"SEEK_END", "fsencode", "fsdecode", "get_exec_path", "fdopen",
|
||||
"popen", "extsep"]
|
||||
"popen", "extsep", "_exit"]
|
||||
|
||||
def _exists(name):
|
||||
return name in globals()
|
||||
|
@ -45,48 +45,235 @@ def _get_exports_list(module):
|
|||
|
||||
# Any new dependencies of the os module and/or changes in path separator
|
||||
# requires updating importlib as well.
|
||||
if 'posix' in _names:
|
||||
name = 'posix'
|
||||
linesep = '\n'
|
||||
from posix import *
|
||||
try:
|
||||
from posix import _exit
|
||||
__all__.append('_exit')
|
||||
except ImportError:
|
||||
pass
|
||||
import posixpath as path
|
||||
name = 'posix'
|
||||
linesep = '\n'
|
||||
|
||||
try:
|
||||
from posix import _have_functions
|
||||
except ImportError:
|
||||
pass
|
||||
import posixpath as path
|
||||
import posix
|
||||
|
||||
import posix
|
||||
__all__.extend(_get_exports_list(posix))
|
||||
del posix
|
||||
CLD_CONTINUED = posix.CLD_CONTINUED
|
||||
CLD_DUMPED = posix.CLD_DUMPED
|
||||
CLD_EXITED = posix.CLD_EXITED
|
||||
CLD_TRAPPED = posix.CLD_TRAPPED
|
||||
DirEntry = posix.DirEntry
|
||||
EX_CANTCREAT = posix.EX_CANTCREAT
|
||||
EX_CONFIG = posix.EX_CONFIG
|
||||
EX_DATAERR = posix.EX_DATAERR
|
||||
EX_IOERR = posix.EX_IOERR
|
||||
EX_NOHOST = posix.EX_NOHOST
|
||||
EX_NOINPUT = posix.EX_NOINPUT
|
||||
EX_NOPERM = posix.EX_NOPERM
|
||||
EX_NOUSER = posix.EX_NOUSER
|
||||
EX_OK = posix.EX_OK
|
||||
EX_OSERR = posix.EX_OSERR
|
||||
EX_OSFILE = posix.EX_OSFILE
|
||||
EX_PROTOCOL = posix.EX_PROTOCOL
|
||||
EX_SOFTWARE = posix.EX_SOFTWARE
|
||||
EX_TEMPFAIL = posix.EX_TEMPFAIL
|
||||
EX_UNAVAILABLE = posix.EX_UNAVAILABLE
|
||||
EX_USAGE = posix.EX_USAGE
|
||||
F_LOCK = posix.F_LOCK
|
||||
F_OK = posix.F_OK
|
||||
F_TEST = posix.F_TEST
|
||||
F_TLOCK = posix.F_TLOCK
|
||||
F_ULOCK = posix.F_ULOCK
|
||||
GRND_NONBLOCK = posix.GRND_NONBLOCK
|
||||
GRND_NORDRND = posix.GRND_NORDRND
|
||||
GRND_NOSYSTEM = posix.GRND_NOSYSTEM
|
||||
GRND_RANDOM = posix.GRND_RANDOM
|
||||
O_ACCMODE = posix.O_ACCMODE
|
||||
O_APPEND = posix.O_APPEND
|
||||
O_ASYNC = posix.O_ASYNC
|
||||
O_CLOEXEC = posix.O_CLOEXEC
|
||||
O_CREAT = posix.O_CREAT
|
||||
O_DIRECT = posix.O_DIRECT
|
||||
O_DIRECTORY = posix.O_DIRECTORY
|
||||
O_DSYNC = posix.O_DSYNC
|
||||
O_EXCL = posix.O_EXCL
|
||||
O_LARGEFILE = posix.O_LARGEFILE
|
||||
O_NDELAY = posix.O_NDELAY
|
||||
O_NOATIME = posix.O_NOATIME
|
||||
O_NOCTTY = posix.O_NOCTTY
|
||||
O_NOFOLLOW = posix.O_NOFOLLOW
|
||||
O_NONBLOCK = posix.O_NONBLOCK
|
||||
O_PATH = posix.O_PATH
|
||||
O_RDONLY = posix.O_RDONLY
|
||||
O_RDWR = posix.O_RDWR
|
||||
O_RSYNC = posix.O_RSYNC
|
||||
O_SYNC = posix.O_SYNC
|
||||
O_TMPFILE = posix.O_TMPFILE
|
||||
O_TRUNC = posix.O_TRUNC
|
||||
O_WRONLY = posix.O_WRONLY
|
||||
POSIX_FADV_DONTNEED = posix.POSIX_FADV_DONTNEED
|
||||
POSIX_FADV_NOREUSE = posix.POSIX_FADV_NOREUSE
|
||||
POSIX_FADV_NORMAL = posix.POSIX_FADV_NORMAL
|
||||
POSIX_FADV_RANDOM = posix.POSIX_FADV_RANDOM
|
||||
POSIX_FADV_SEQUENTIAL = posix.POSIX_FADV_SEQUENTIAL
|
||||
POSIX_FADV_WILLNEED = posix.POSIX_FADV_WILLNEED
|
||||
PRIO_PGRP = posix.PRIO_PGRP
|
||||
PRIO_PROCESS = posix.PRIO_PROCESS
|
||||
PRIO_USER = posix.PRIO_USER
|
||||
RTLD_GLOBAL = posix.RTLD_GLOBAL
|
||||
RTLD_LAZY = posix.RTLD_LAZY
|
||||
RTLD_LOCAL = posix.RTLD_LOCAL
|
||||
RTLD_NOW = posix.RTLD_NOW
|
||||
R_OK = posix.R_OK
|
||||
SCHED_BATCH = posix.SCHED_BATCH
|
||||
SCHED_FIFO = posix.SCHED_FIFO
|
||||
SCHED_IDLE = posix.SCHED_IDLE
|
||||
SCHED_OTHER = posix.SCHED_OTHER
|
||||
SCHED_RESET_ON_FORK = posix.SCHED_RESET_ON_FORK
|
||||
SCHED_RR = posix.SCHED_RR
|
||||
ST_APPEND = posix.ST_APPEND
|
||||
ST_MANDLOCK = posix.ST_MANDLOCK
|
||||
ST_NOATIME = posix.ST_NOATIME
|
||||
ST_NODEV = posix.ST_NODEV
|
||||
ST_NODIRATIME = posix.ST_NODIRATIME
|
||||
ST_NOEXEC = posix.ST_NOEXEC
|
||||
ST_NOSUID = posix.ST_NOSUID
|
||||
ST_RDONLY = posix.ST_RDONLY
|
||||
ST_RELATIME = posix.ST_RELATIME
|
||||
ST_SYNCHRONOUS = posix.ST_SYNCHRONOUS
|
||||
ST_WRITE = posix.ST_WRITE
|
||||
WCONTINUED = posix.WCONTINUED
|
||||
WCOREDUMP = posix.WCOREDUMP
|
||||
WEXITED = posix.WEXITED
|
||||
WEXITSTATUS = posix.WEXITSTATUS
|
||||
WIFCONTINUED = posix.WIFCONTINUED
|
||||
WIFEXITED = posix.WIFEXITED
|
||||
WIFSIGNALED = posix.WIFSIGNALED
|
||||
WIFSTOPPED = posix.WIFSTOPPED
|
||||
WNOHANG = posix.WNOHANG
|
||||
WNOWAIT = posix.WNOWAIT
|
||||
WSTOPPED = posix.WSTOPPED
|
||||
WSTOPSIG = posix.WSTOPSIG
|
||||
WTERMSIG = posix.WTERMSIG
|
||||
WUNTRACED = posix.WUNTRACED
|
||||
W_OK = posix.W_OK
|
||||
X_OK = posix.X_OK
|
||||
_exit = posix._exit
|
||||
_have_functions = posix._have_functions
|
||||
abort = posix.abort
|
||||
access = posix.access
|
||||
chdir = posix.chdir
|
||||
chmod = posix.chmod
|
||||
chown = posix.chown
|
||||
chroot = posix.chroot
|
||||
close = posix.close
|
||||
closerange = posix.closerange
|
||||
cpu_count = posix.cpu_count
|
||||
device_encoding = posix.device_encoding
|
||||
dup = posix.dup
|
||||
dup2 = posix.dup2
|
||||
environ = posix.environ
|
||||
error = posix.error
|
||||
execv = posix.execv
|
||||
execve = posix.execve
|
||||
fchdir = posix.fchdir
|
||||
fchmod = posix.fchmod
|
||||
fchown = posix.fchown
|
||||
fdatasync = posix.fdatasync
|
||||
fork = posix.fork
|
||||
fpathconf = posix.fpathconf
|
||||
fspath = posix.fspath
|
||||
fstat = posix.fstat
|
||||
fsync = posix.fsync
|
||||
ftruncate = posix.ftruncate
|
||||
get_blocking = posix.get_blocking
|
||||
get_inheritable = posix.get_inheritable
|
||||
get_terminal_size = posix.get_terminal_size
|
||||
getcwd = posix.getcwd
|
||||
getcwdb = posix.getcwdb
|
||||
getgrouplist = posix.getgrouplist
|
||||
getgroups = posix.getgroups
|
||||
getlogin = posix.getlogin
|
||||
getpgid = posix.getpgid
|
||||
getpgrp = posix.getpgrp
|
||||
getpid = posix.getpid
|
||||
getpriority = posix.getpriority
|
||||
getsid = posix.getsid
|
||||
getuid = posix.getuid
|
||||
initgroups = posix.initgroups
|
||||
isatty = posix.isatty
|
||||
kill = posix.kill
|
||||
killpg = posix.killpg
|
||||
lchown = posix.lchown
|
||||
link = posix.link
|
||||
listdir = posix.listdir
|
||||
lseek = posix.lseek
|
||||
lstat = posix.lstat
|
||||
major = posix.major
|
||||
makedev = posix.makedev
|
||||
minor = posix.minor
|
||||
mkdir = posix.mkdir
|
||||
mkfifo = posix.mkfifo
|
||||
mknod = posix.mknod
|
||||
nice = posix.nice
|
||||
open = posix.open
|
||||
openpty = posix.openpty
|
||||
pathconf = posix.pathconf
|
||||
pathconf_names = posix.pathconf_names
|
||||
pipe = posix.pipe
|
||||
pipe2 = posix.pipe2
|
||||
posix_fadvise = posix.posix_fadvise
|
||||
pread = posix.pread
|
||||
putenv = posix.putenv
|
||||
pwrite = posix.pwrite
|
||||
read = posix.read
|
||||
readlink = posix.readlink
|
||||
readv = posix.readv
|
||||
remove = posix.remove
|
||||
rename = posix.rename
|
||||
replace = posix.replace
|
||||
rmdir = posix.rmdir
|
||||
scandir = posix.scandir
|
||||
sched_yield = posix.sched_yield
|
||||
sendfile = posix.sendfile
|
||||
set_blocking = posix.set_blocking
|
||||
set_inheritable = posix.set_inheritable
|
||||
setegid = posix.setegid
|
||||
seteuid = posix.seteuid
|
||||
setgid = posix.setgid
|
||||
setpgid = posix.setpgid
|
||||
setpriority = posix.setpriority
|
||||
setregid = posix.setregid
|
||||
setresgid = posix.setresgid
|
||||
setresuid = posix.setresuid
|
||||
setreuid = posix.setreuid
|
||||
setsid = posix.setsid
|
||||
setuid = posix.setuid
|
||||
stat = posix.stat
|
||||
stat_float_times = posix.stat_float_times
|
||||
stat_result = posix.stat_result
|
||||
statvfs_result = posix.statvfs_result
|
||||
strerror = posix.strerror
|
||||
symlink = posix.symlink
|
||||
sync = posix.sync
|
||||
sysconf = posix.sysconf
|
||||
sysconf_names = posix.sysconf_names
|
||||
system = posix.system
|
||||
tcgetpgrp = posix.tcgetpgrp
|
||||
tcsetpgrp = posix.tcsetpgrp
|
||||
terminal_size = posix.terminal_size
|
||||
times = posix.times
|
||||
times_result = posix.times_result
|
||||
truncate = posix.truncate
|
||||
umask = posix.umask
|
||||
uname = posix.uname
|
||||
uname_result = posix.uname_result
|
||||
unlink = posix.unlink
|
||||
unsetenv = posix.unsetenv
|
||||
urandom = posix.urandom
|
||||
utime = posix.utime
|
||||
wait = posix.wait
|
||||
wait3 = posix.wait3
|
||||
wait4 = posix.wait4
|
||||
waitpid = posix.waitpid
|
||||
write = posix.write
|
||||
writev = posix.writev
|
||||
|
||||
elif 'nt' in _names:
|
||||
name = 'nt'
|
||||
linesep = '\r\n'
|
||||
from nt import *
|
||||
try:
|
||||
from nt import _exit
|
||||
__all__.append('_exit')
|
||||
except ImportError:
|
||||
pass
|
||||
import ntpath as path
|
||||
|
||||
import nt
|
||||
__all__.extend(_get_exports_list(nt))
|
||||
del nt
|
||||
|
||||
try:
|
||||
from nt import _have_functions
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
else:
|
||||
raise ImportError('no os specific module found')
|
||||
__all__.extend(_get_exports_list(posix))
|
||||
del posix
|
||||
|
||||
sys.modules['os.path'] = path
|
||||
from os.path import (curdir, pardir, sep, pathsep, defpath, extsep, altsep,
|
||||
|
@ -95,91 +282,90 @@ from os.path import (curdir, pardir, sep, pathsep, defpath, extsep, altsep,
|
|||
del _names
|
||||
|
||||
|
||||
if _exists("_have_functions"):
|
||||
_globals = globals()
|
||||
def _add(str, fn):
|
||||
if (fn in _globals) and (str in _have_functions):
|
||||
_set.add(_globals[fn])
|
||||
_globals = globals()
|
||||
def _add(str, fn):
|
||||
if (fn in _globals) and (str in _have_functions):
|
||||
_set.add(_globals[fn])
|
||||
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
_add("HAVE_FCHMODAT", "chmod")
|
||||
_add("HAVE_FCHOWNAT", "chown")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_FUTIMESAT", "utime")
|
||||
_add("HAVE_LINKAT", "link")
|
||||
_add("HAVE_MKDIRAT", "mkdir")
|
||||
_add("HAVE_MKFIFOAT", "mkfifo")
|
||||
_add("HAVE_MKNODAT", "mknod")
|
||||
_add("HAVE_OPENAT", "open")
|
||||
_add("HAVE_READLINKAT", "readlink")
|
||||
_add("HAVE_RENAMEAT", "rename")
|
||||
_add("HAVE_SYMLINKAT", "symlink")
|
||||
_add("HAVE_UNLINKAT", "unlink")
|
||||
_add("HAVE_UNLINKAT", "rmdir")
|
||||
_add("HAVE_UTIMENSAT", "utime")
|
||||
supports_dir_fd = _set
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
_add("HAVE_FCHMODAT", "chmod")
|
||||
_add("HAVE_FCHOWNAT", "chown")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_FUTIMESAT", "utime")
|
||||
_add("HAVE_LINKAT", "link")
|
||||
_add("HAVE_MKDIRAT", "mkdir")
|
||||
_add("HAVE_MKFIFOAT", "mkfifo")
|
||||
_add("HAVE_MKNODAT", "mknod")
|
||||
_add("HAVE_OPENAT", "open")
|
||||
_add("HAVE_READLINKAT", "readlink")
|
||||
_add("HAVE_RENAMEAT", "rename")
|
||||
_add("HAVE_SYMLINKAT", "symlink")
|
||||
_add("HAVE_UNLINKAT", "unlink")
|
||||
_add("HAVE_UNLINKAT", "rmdir")
|
||||
_add("HAVE_UTIMENSAT", "utime")
|
||||
supports_dir_fd = _set
|
||||
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
supports_effective_ids = _set
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
supports_effective_ids = _set
|
||||
|
||||
_set = set()
|
||||
_add("HAVE_FCHDIR", "chdir")
|
||||
_add("HAVE_FCHMOD", "chmod")
|
||||
_add("HAVE_FCHOWN", "chown")
|
||||
_add("HAVE_FDOPENDIR", "listdir")
|
||||
_add("HAVE_FEXECVE", "execve")
|
||||
_set.add(stat) # fstat always works
|
||||
_add("HAVE_FTRUNCATE", "truncate")
|
||||
_add("HAVE_FUTIMENS", "utime")
|
||||
_add("HAVE_FUTIMES", "utime")
|
||||
_add("HAVE_FPATHCONF", "pathconf")
|
||||
if _exists("statvfs") and _exists("fstatvfs"): # mac os x10.3
|
||||
_add("HAVE_FSTATVFS", "statvfs")
|
||||
supports_fd = _set
|
||||
_set = set()
|
||||
_add("HAVE_FCHDIR", "chdir")
|
||||
_add("HAVE_FCHMOD", "chmod")
|
||||
_add("HAVE_FCHOWN", "chown")
|
||||
_add("HAVE_FDOPENDIR", "listdir")
|
||||
_add("HAVE_FEXECVE", "execve")
|
||||
_set.add(stat) # fstat always works
|
||||
_add("HAVE_FTRUNCATE", "truncate")
|
||||
_add("HAVE_FUTIMENS", "utime")
|
||||
_add("HAVE_FUTIMES", "utime")
|
||||
_add("HAVE_FPATHCONF", "pathconf")
|
||||
if _exists("statvfs") and _exists("fstatvfs"): # mac os x10.3
|
||||
_add("HAVE_FSTATVFS", "statvfs")
|
||||
supports_fd = _set
|
||||
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
# Some platforms don't support lchmod(). Often the function exists
|
||||
# anyway, as a stub that always returns ENOSUP or perhaps EOPNOTSUPP.
|
||||
# (No, I don't know why that's a good design.) ./configure will detect
|
||||
# this and reject it--so HAVE_LCHMOD still won't be defined on such
|
||||
# platforms. This is Very Helpful.
|
||||
#
|
||||
# However, sometimes platforms without a working lchmod() *do* have
|
||||
# fchmodat(). (Examples: Linux kernel 3.2 with glibc 2.15,
|
||||
# OpenIndiana 3.x.) And fchmodat() has a flag that theoretically makes
|
||||
# it behave like lchmod(). So in theory it would be a suitable
|
||||
# replacement for lchmod(). But when lchmod() doesn't work, fchmodat()'s
|
||||
# flag doesn't work *either*. Sadly ./configure isn't sophisticated
|
||||
# enough to detect this condition--it only determines whether or not
|
||||
# fchmodat() minimally works.
|
||||
#
|
||||
# Therefore we simply ignore fchmodat() when deciding whether or not
|
||||
# os.chmod supports follow_symlinks. Just checking lchmod() is
|
||||
# sufficient. After all--if you have a working fchmodat(), your
|
||||
# lchmod() almost certainly works too.
|
||||
#
|
||||
# _add("HAVE_FCHMODAT", "chmod")
|
||||
_add("HAVE_FCHOWNAT", "chown")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_LCHFLAGS", "chflags")
|
||||
_add("HAVE_LCHMOD", "chmod")
|
||||
if _exists("lchown"): # mac os x10.3
|
||||
_add("HAVE_LCHOWN", "chown")
|
||||
_add("HAVE_LINKAT", "link")
|
||||
_add("HAVE_LUTIMES", "utime")
|
||||
_add("HAVE_LSTAT", "stat")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_UTIMENSAT", "utime")
|
||||
_add("MS_WINDOWS", "stat")
|
||||
supports_follow_symlinks = _set
|
||||
_set = set()
|
||||
_add("HAVE_FACCESSAT", "access")
|
||||
# Some platforms don't support lchmod(). Often the function exists
|
||||
# anyway, as a stub that always returns ENOSUP or perhaps EOPNOTSUPP.
|
||||
# (No, I don't know why that's a good design.) ./configure will detect
|
||||
# this and reject it--so HAVE_LCHMOD still won't be defined on such
|
||||
# platforms. This is Very Helpful.
|
||||
#
|
||||
# However, sometimes platforms without a working lchmod() *do* have
|
||||
# fchmodat(). (Examples: Linux kernel 3.2 with glibc 2.15,
|
||||
# OpenIndiana 3.x.) And fchmodat() has a flag that theoretically makes
|
||||
# it behave like lchmod(). So in theory it would be a suitable
|
||||
# replacement for lchmod(). But when lchmod() doesn't work, fchmodat()'s
|
||||
# flag doesn't work *either*. Sadly ./configure isn't sophisticated
|
||||
# enough to detect this condition--it only determines whether or not
|
||||
# fchmodat() minimally works.
|
||||
#
|
||||
# Therefore we simply ignore fchmodat() when deciding whether or not
|
||||
# os.chmod supports follow_symlinks. Just checking lchmod() is
|
||||
# sufficient. After all--if you have a working fchmodat(), your
|
||||
# lchmod() almost certainly works too.
|
||||
#
|
||||
# _add("HAVE_FCHMODAT", "chmod")
|
||||
_add("HAVE_FCHOWNAT", "chown")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_LCHFLAGS", "chflags")
|
||||
_add("HAVE_LCHMOD", "chmod")
|
||||
if _exists("lchown"): # mac os x10.3
|
||||
_add("HAVE_LCHOWN", "chown")
|
||||
_add("HAVE_LINKAT", "link")
|
||||
_add("HAVE_LUTIMES", "utime")
|
||||
_add("HAVE_LSTAT", "stat")
|
||||
_add("HAVE_FSTATAT", "stat")
|
||||
_add("HAVE_UTIMENSAT", "utime")
|
||||
_add("MS_WINDOWS", "stat")
|
||||
supports_follow_symlinks = _set
|
||||
|
||||
del _set
|
||||
del _have_functions
|
||||
del _globals
|
||||
del _add
|
||||
del _set
|
||||
del _have_functions
|
||||
del _globals
|
||||
del _add
|
||||
|
||||
|
||||
# Python uses fixed values for the SEEK_ constants; they are mapped
|
||||
|
|
8
third_party/python/Lib/pickle.py
vendored
8
third_party/python/Lib/pickle.py
vendored
|
@ -1576,8 +1576,12 @@ except ImportError:
|
|||
|
||||
# Doctest
|
||||
def _test():
|
||||
import doctest
|
||||
return doctest.testmod()
|
||||
import sys
|
||||
try:
|
||||
import doctest
|
||||
except ImportError:
|
||||
sys.exit(1)
|
||||
print(doctest.testmod())
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
|
8
third_party/python/Lib/pickletools.py
vendored
8
third_party/python/Lib/pickletools.py
vendored
|
@ -2786,8 +2786,12 @@ __test__ = {'disassembler_test': _dis_test,
|
|||
}
|
||||
|
||||
def _test():
|
||||
import doctest
|
||||
return doctest.testmod()
|
||||
import sys
|
||||
try:
|
||||
import doctest
|
||||
except ImportError:
|
||||
sys.exit(1)
|
||||
print(doctest.testmod())
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
|
10
third_party/python/Lib/platform.py
vendored
10
third_party/python/Lib/platform.py
vendored
|
@ -535,10 +535,10 @@ def win32_ver(release='', version='', csd='', ptype=''):
|
|||
from sys import getwindowsversion
|
||||
except ImportError:
|
||||
return release, version, csd, ptype
|
||||
try:
|
||||
from winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
|
||||
except ImportError:
|
||||
from _winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
|
||||
# try:
|
||||
# from winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
|
||||
# except ImportError:
|
||||
# from _winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
|
||||
|
||||
winver = getwindowsversion()
|
||||
maj, min, build = winver.platform_version or winver[:3]
|
||||
|
@ -621,7 +621,7 @@ def mac_ver(release='', versioninfo=('', '', ''), machine=''):
|
|||
|
||||
def _java_getprop(name, default):
|
||||
|
||||
from java.lang import System
|
||||
# from java.lang import System
|
||||
try:
|
||||
value = System.getProperty(name)
|
||||
if value is None:
|
||||
|
|
19
third_party/python/Lib/re.py
vendored
19
third_party/python/Lib/re.py
vendored
|
@ -158,7 +158,24 @@ class RegexFlag(enum.IntFlag):
|
|||
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
|
||||
T = TEMPLATE
|
||||
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
|
||||
globals().update(RegexFlag.__members__)
|
||||
|
||||
ASCII = RegexFlag.ASCII
|
||||
IGNORECASE = RegexFlag.IGNORECASE
|
||||
LOCALE = RegexFlag.LOCALE
|
||||
UNICODE = RegexFlag.UNICODE
|
||||
MULTILINE = RegexFlag.MULTILINE
|
||||
DOTALL = RegexFlag.DOTALL
|
||||
VERBOSE = RegexFlag.VERBOSE
|
||||
A = RegexFlag.A
|
||||
I = RegexFlag.I
|
||||
L = RegexFlag.L
|
||||
U = RegexFlag.U
|
||||
M = RegexFlag.M
|
||||
S = RegexFlag.S
|
||||
X = RegexFlag.X
|
||||
TEMPLATE = RegexFlag.TEMPLATE
|
||||
T = RegexFlag.T
|
||||
DEBUG = RegexFlag.DEBUG
|
||||
|
||||
# sre exception
|
||||
error = sre_compile.error
|
||||
|
|
2
third_party/python/Lib/shelve.py
vendored
2
third_party/python/Lib/shelve.py
vendored
|
@ -223,7 +223,7 @@ class DbfilenameShelf(Shelf):
|
|||
"""
|
||||
|
||||
def __init__(self, filename, flag='c', protocol=None, writeback=False):
|
||||
import dbm
|
||||
# import dbm
|
||||
Shelf.__init__(self, dbm.open(filename, flag), protocol, writeback)
|
||||
|
||||
|
||||
|
|
12
third_party/python/Lib/site.py
vendored
12
third_party/python/Lib/site.py
vendored
|
@ -350,7 +350,8 @@ def setcopyright():
|
|||
builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright)
|
||||
builtins.credits = _sitebuiltins._Printer("credits", """\
|
||||
Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
|
||||
for supporting Python development. See www.python.org for more information.""")
|
||||
for supporting Python development. See www.python.org for more information.
|
||||
Thanks go to github.com/ahgamut for porting Python to Cosmopolitan Libc.""")
|
||||
files, dirs = [], []
|
||||
# Not all modules are required to have a __file__ attribute. See
|
||||
# PEP 420 for more details.
|
||||
|
@ -377,8 +378,8 @@ def enablerlcompleter():
|
|||
or in a PYTHONSTARTUP file.
|
||||
"""
|
||||
def register_readline():
|
||||
import atexit
|
||||
try:
|
||||
import atexit
|
||||
import readline
|
||||
import rlcompleter
|
||||
except ImportError:
|
||||
|
@ -592,8 +593,11 @@ def _script():
|
|||
else:
|
||||
sys.exit(3)
|
||||
else:
|
||||
import textwrap
|
||||
print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
|
||||
try:
|
||||
import textwrap
|
||||
print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
|
||||
except ImportError:
|
||||
pass
|
||||
sys.exit(10)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
6
third_party/python/Lib/smtpd.py
vendored
6
third_party/python/Lib/smtpd.py
vendored
|
@ -785,9 +785,9 @@ class MailmanProxy(PureProxy):
|
|||
|
||||
def process_message(self, peer, mailfrom, rcpttos, data):
|
||||
from io import StringIO
|
||||
from Mailman import Utils
|
||||
from Mailman import Message
|
||||
from Mailman import MailList
|
||||
# from Mailman import Utils
|
||||
# from Mailman import Message
|
||||
# from Mailman import MailList
|
||||
# If the message is to a Mailman mailing list, then we'll invoke the
|
||||
# Mailman script directly, without going through the real smtpd.
|
||||
# Otherwise we'll forward it to the local proxy for disposition.
|
||||
|
|
2
third_party/python/Lib/socket.py
vendored
2
third_party/python/Lib/socket.py
vendored
|
@ -47,7 +47,7 @@ the setsockopt() and getsockopt() methods.
|
|||
"""
|
||||
|
||||
import _socket
|
||||
from _socket import *
|
||||
from _socket import AF_APPLETALK, AF_ASH, AF_ATMPVC, AF_ATMSVC, AF_AX25, AF_BRIDGE, AF_CAN, AF_ECONET, AF_INET, AF_INET6, AF_IPX, AF_IRDA, AF_KEY, AF_LLC, AF_NETBEUI, AF_NETROM, AF_PACKET, AF_PPPOX, AF_RDS, AF_ROSE, AF_ROUTE, AF_SECURITY, AF_SNA, AF_UNIX, AF_UNSPEC, AF_X25, AI_ADDRCONFIG, AI_ALL, AI_CANONNAME, AI_NUMERICHOST, AI_NUMERICSERV, AI_PASSIVE, AI_V4MAPPED, CAPI, EAI_ADDRFAMILY, EAI_AGAIN, EAI_BADFLAGS, EAI_FAIL, EAI_FAMILY, EAI_MEMORY, EAI_NODATA, EAI_NONAME, EAI_OVERFLOW, EAI_SERVICE, EAI_SOCKTYPE, EAI_SYSTEM, INADDR_ALLHOSTS_GROUP, INADDR_ANY, INADDR_BROADCAST, INADDR_LOOPBACK, INADDR_MAX_LOCAL_GROUP, INADDR_NONE, INADDR_UNSPEC_GROUP, IPPORT_RESERVED, IPPORT_USERRESERVED, IPPROTO_AH, IPPROTO_DSTOPTS, IPPROTO_EGP, IPPROTO_ESP, IPPROTO_FRAGMENT, IPPROTO_GRE, IPPROTO_HOPOPTS, IPPROTO_ICMP, IPPROTO_ICMPV6, IPPROTO_IDP, IPPROTO_IGMP, IPPROTO_IP, IPPROTO_IPIP, IPPROTO_IPV6, IPPROTO_MAX, IPPROTO_NONE, IPPROTO_PIM, IPPROTO_PUP, IPPROTO_RAW, IPPROTO_ROUTING, IPPROTO_RSVP, IPPROTO_SCTP, IPPROTO_TCP, IPPROTO_TP, IPPROTO_UDP, IP_ADD_MEMBERSHIP, IP_DEFAULT_MULTICAST_LOOP, IP_DEFAULT_MULTICAST_TTL, IP_DROP_MEMBERSHIP, IP_HDRINCL, IP_MAX_MEMBERSHIPS, IP_MULTICAST_IF, IP_MULTICAST_LOOP, IP_MULTICAST_TTL, IP_OPTIONS, IP_RECVOPTS, IP_RECVRETOPTS, IP_RETOPTS, IP_TOS, IP_TRANSPARENT, IP_TTL, MSG_CMSG_CLOEXEC, MSG_CONFIRM, MSG_CTRUNC, MSG_DONTROUTE, MSG_DONTWAIT, MSG_EOF, MSG_EOR, MSG_ERRQUEUE, MSG_FASTOPEN, MSG_MORE, MSG_NOSIGNAL, MSG_NOTIFICATION, MSG_OOB, MSG_PEEK, MSG_TRUNC, MSG_WAITALL, NI_DGRAM, NI_MAXHOST, NI_MAXSERV, NI_NAMEREQD, NI_NOFQDN, NI_NUMERICHOST, NI_NUMERICSERV, PF_CAN, PF_PACKET, PF_RDS, SHUT_RD, SHUT_RDWR, SHUT_WR, SOCK_CLOEXEC, SOCK_DGRAM, SOCK_NONBLOCK, SOCK_RAW, SOCK_RDM, SOCK_SEQPACKET, SOCK_STREAM, SOL_IP, SOL_RDS, SOL_SOCKET, SOL_TCP, SOL_UDP, SOMAXCONN, SO_ACCEPTCONN, SO_BINDTODEVICE, SO_BROADCAST, SO_DEBUG, SO_DOMAIN, SO_DONTROUTE, SO_ERROR, SO_KEEPALIVE, SO_LINGER, SO_MARK, SO_OOBINLINE, SO_PASSCRED, SO_PASSSEC, SO_PEERCRED, SO_PEERSEC, SO_PRIORITY, SO_PROTOCOL, SO_RCVBUF, SO_RCVLOWAT, SO_RCVTIMEO, SO_REUSEADDR, SO_REUSEPORT, SO_SNDBUF, SO_SNDLOWAT, SO_SNDTIMEO, SO_TYPE, SocketType, TCP_CONGESTION, TCP_CORK, TCP_DEFER_ACCEPT, TCP_FASTOPEN, TCP_FASTOPEN_CONNECT, TCP_INFO, TCP_KEEPCNT, TCP_KEEPIDLE, TCP_KEEPINTVL, TCP_LINGER2, TCP_MAXSEG, TCP_NODELAY, TCP_QUICKACK, TCP_SAVED_SYN, TCP_SAVE_SYN, TCP_SYNCNT, TCP_USER_TIMEOUT, TCP_WINDOW_CLAMP, dup, error, gaierror, getaddrinfo, getdefaulttimeout, gethostbyaddr, gethostbyname, gethostbyname_ex, gethostname, getnameinfo, getprotobyname, getservbyname, getservbyport, has_ipv6, herror, htonl, htons, inet_aton, inet_ntoa, inet_ntop, inet_pton, ntohl, ntohs, setdefaulttimeout, sethostname, socket, socketpair, timeout
|
||||
|
||||
import os, sys, io, selectors
|
||||
from enum import IntEnum, IntFlag
|
||||
|
|
190
third_party/python/Lib/sre_constants.py
vendored
190
third_party/python/Lib/sre_constants.py
vendored
|
@ -64,67 +64,145 @@ class _NamedIntConstant(int):
|
|||
|
||||
MAXREPEAT = _NamedIntConstant(MAXREPEAT, 'MAXREPEAT')
|
||||
|
||||
def _makecodes(names):
|
||||
names = names.strip().split()
|
||||
items = [_NamedIntConstant(i, name) for i, name in enumerate(names)]
|
||||
globals().update({item.name: item for item in items})
|
||||
return items
|
||||
FAILURE = _NamedIntConstant(0, 'FAILURE')
|
||||
SUCCESS = _NamedIntConstant(1, 'SUCCESS')
|
||||
ANY = _NamedIntConstant(2, 'ANY')
|
||||
ANY_ALL = _NamedIntConstant(3, 'ANY_ALL')
|
||||
ASSERT = _NamedIntConstant(4, 'ASSERT')
|
||||
ASSERT_NOT = _NamedIntConstant(5, 'ASSERT_NOT')
|
||||
AT = _NamedIntConstant(6, 'AT')
|
||||
BRANCH = _NamedIntConstant(7, 'BRANCH')
|
||||
CALL = _NamedIntConstant(8, 'CALL')
|
||||
CATEGORY = _NamedIntConstant(9, 'CATEGORY')
|
||||
CHARSET = _NamedIntConstant(10, 'CHARSET')
|
||||
BIGCHARSET = _NamedIntConstant(11, 'BIGCHARSET')
|
||||
GROUPREF = _NamedIntConstant(12, 'GROUPREF')
|
||||
GROUPREF_EXISTS = _NamedIntConstant(13, 'GROUPREF_EXISTS')
|
||||
GROUPREF_IGNORE = _NamedIntConstant(14, 'GROUPREF_IGNORE')
|
||||
IN = _NamedIntConstant(15, 'IN')
|
||||
IN_IGNORE = _NamedIntConstant(16, 'IN_IGNORE')
|
||||
INFO = _NamedIntConstant(17, 'INFO')
|
||||
JUMP = _NamedIntConstant(18, 'JUMP')
|
||||
LITERAL = _NamedIntConstant(19, 'LITERAL')
|
||||
LITERAL_IGNORE = _NamedIntConstant(20, 'LITERAL_IGNORE')
|
||||
MARK = _NamedIntConstant(21, 'MARK')
|
||||
MAX_UNTIL = _NamedIntConstant(22, 'MAX_UNTIL')
|
||||
MIN_UNTIL = _NamedIntConstant(23, 'MIN_UNTIL')
|
||||
NOT_LITERAL = _NamedIntConstant(24, 'NOT_LITERAL')
|
||||
NOT_LITERAL_IGNORE = _NamedIntConstant(25, 'NOT_LITERAL_IGNORE')
|
||||
NEGATE = _NamedIntConstant(26, 'NEGATE')
|
||||
RANGE = _NamedIntConstant(27, 'RANGE')
|
||||
REPEAT = _NamedIntConstant(28, 'REPEAT')
|
||||
REPEAT_ONE = _NamedIntConstant(29, 'REPEAT_ONE')
|
||||
SUBPATTERN = _NamedIntConstant(30, 'SUBPATTERN')
|
||||
MIN_REPEAT_ONE = _NamedIntConstant(31, 'MIN_REPEAT_ONE')
|
||||
RANGE_IGNORE = _NamedIntConstant(32, 'RANGE_IGNORE')
|
||||
MIN_REPEAT = _NamedIntConstant(33, 'MIN_REPEAT')
|
||||
MAX_REPEAT = _NamedIntConstant(34, 'MAX_REPEAT')
|
||||
|
||||
# operators
|
||||
# failure=0 success=1 (just because it looks better that way :-)
|
||||
OPCODES = _makecodes("""
|
||||
FAILURE SUCCESS
|
||||
OPCODES = [
|
||||
FAILURE,
|
||||
SUCCESS,
|
||||
ANY,
|
||||
ANY_ALL,
|
||||
ASSERT,
|
||||
ASSERT_NOT,
|
||||
AT,
|
||||
BRANCH,
|
||||
CALL,
|
||||
CATEGORY,
|
||||
CHARSET,
|
||||
BIGCHARSET,
|
||||
GROUPREF,
|
||||
GROUPREF_EXISTS,
|
||||
GROUPREF_IGNORE,
|
||||
IN,
|
||||
IN_IGNORE,
|
||||
INFO,
|
||||
JUMP,
|
||||
LITERAL,
|
||||
LITERAL_IGNORE,
|
||||
MARK,
|
||||
MAX_UNTIL,
|
||||
MIN_UNTIL,
|
||||
NOT_LITERAL,
|
||||
NOT_LITERAL_IGNORE,
|
||||
NEGATE,
|
||||
RANGE,
|
||||
REPEAT,
|
||||
REPEAT_ONE,
|
||||
SUBPATTERN,
|
||||
MIN_REPEAT_ONE,
|
||||
RANGE_IGNORE,
|
||||
]
|
||||
|
||||
ANY ANY_ALL
|
||||
ASSERT ASSERT_NOT
|
||||
AT
|
||||
BRANCH
|
||||
CALL
|
||||
CATEGORY
|
||||
CHARSET BIGCHARSET
|
||||
GROUPREF GROUPREF_EXISTS GROUPREF_IGNORE
|
||||
IN IN_IGNORE
|
||||
INFO
|
||||
JUMP
|
||||
LITERAL LITERAL_IGNORE
|
||||
MARK
|
||||
MAX_UNTIL
|
||||
MIN_UNTIL
|
||||
NOT_LITERAL NOT_LITERAL_IGNORE
|
||||
NEGATE
|
||||
RANGE
|
||||
REPEAT
|
||||
REPEAT_ONE
|
||||
SUBPATTERN
|
||||
MIN_REPEAT_ONE
|
||||
RANGE_IGNORE
|
||||
AT_BEGINNING = _NamedIntConstant( 0, 'AT_BEGINNING')
|
||||
AT_BEGINNING_LINE = _NamedIntConstant( 1, 'AT_BEGINNING_LINE')
|
||||
AT_BEGINNING_STRING = _NamedIntConstant( 2, 'AT_BEGINNING_STRING')
|
||||
AT_BOUNDARY = _NamedIntConstant( 3, 'AT_BOUNDARY')
|
||||
AT_NON_BOUNDARY = _NamedIntConstant( 4, 'AT_NON_BOUNDARY')
|
||||
AT_END = _NamedIntConstant( 5, 'AT_END')
|
||||
AT_END_LINE = _NamedIntConstant( 6, 'AT_END_LINE')
|
||||
AT_END_STRING = _NamedIntConstant( 7, 'AT_END_STRING')
|
||||
AT_LOC_BOUNDARY = _NamedIntConstant( 8, 'AT_LOC_BOUNDARY')
|
||||
AT_LOC_NON_BOUNDARY = _NamedIntConstant( 9, 'AT_LOC_NON_BOUNDARY')
|
||||
AT_UNI_BOUNDARY = _NamedIntConstant(10, 'AT_UNI_BOUNDARY')
|
||||
AT_UNI_NON_BOUNDARY = _NamedIntConstant(11, 'AT_UNI_NON_BOUNDARY')
|
||||
|
||||
MIN_REPEAT MAX_REPEAT
|
||||
""")
|
||||
del OPCODES[-2:] # remove MIN_REPEAT and MAX_REPEAT
|
||||
ATCODES = [
|
||||
AT_BEGINNING,
|
||||
AT_BEGINNING_LINE,
|
||||
AT_BEGINNING_STRING,
|
||||
AT_BOUNDARY,
|
||||
AT_NON_BOUNDARY,
|
||||
AT_END,
|
||||
AT_END_LINE,
|
||||
AT_END_STRING,
|
||||
AT_LOC_BOUNDARY,
|
||||
AT_LOC_NON_BOUNDARY,
|
||||
AT_UNI_BOUNDARY,
|
||||
AT_UNI_NON_BOUNDARY,
|
||||
]
|
||||
|
||||
# positions
|
||||
ATCODES = _makecodes("""
|
||||
AT_BEGINNING AT_BEGINNING_LINE AT_BEGINNING_STRING
|
||||
AT_BOUNDARY AT_NON_BOUNDARY
|
||||
AT_END AT_END_LINE AT_END_STRING
|
||||
AT_LOC_BOUNDARY AT_LOC_NON_BOUNDARY
|
||||
AT_UNI_BOUNDARY AT_UNI_NON_BOUNDARY
|
||||
""")
|
||||
|
||||
# categories
|
||||
CHCODES = _makecodes("""
|
||||
CATEGORY_DIGIT CATEGORY_NOT_DIGIT
|
||||
CATEGORY_SPACE CATEGORY_NOT_SPACE
|
||||
CATEGORY_WORD CATEGORY_NOT_WORD
|
||||
CATEGORY_LINEBREAK CATEGORY_NOT_LINEBREAK
|
||||
CATEGORY_LOC_WORD CATEGORY_LOC_NOT_WORD
|
||||
CATEGORY_UNI_DIGIT CATEGORY_UNI_NOT_DIGIT
|
||||
CATEGORY_UNI_SPACE CATEGORY_UNI_NOT_SPACE
|
||||
CATEGORY_UNI_WORD CATEGORY_UNI_NOT_WORD
|
||||
CATEGORY_UNI_LINEBREAK CATEGORY_UNI_NOT_LINEBREAK
|
||||
""")
|
||||
CATEGORY_DIGIT = _NamedIntConstant( 1, 'CATEGORY_DIGIT')
|
||||
CATEGORY_NOT_DIGIT = _NamedIntConstant( 2, 'CATEGORY_NOT_DIGIT')
|
||||
CATEGORY_SPACE = _NamedIntConstant( 3, 'CATEGORY_SPACE')
|
||||
CATEGORY_NOT_SPACE = _NamedIntConstant( 4, 'CATEGORY_NOT_SPACE')
|
||||
CATEGORY_WORD = _NamedIntConstant( 5, 'CATEGORY_WORD')
|
||||
CATEGORY_NOT_WORD = _NamedIntConstant( 6, 'CATEGORY_NOT_WORD')
|
||||
CATEGORY_LINEBREAK = _NamedIntConstant( 7, 'CATEGORY_LINEBREAK')
|
||||
CATEGORY_NOT_LINEBREAK = _NamedIntConstant( 8, 'CATEGORY_NOT_LINEBREAK')
|
||||
CATEGORY_LOC_WORD = _NamedIntConstant( 9, 'CATEGORY_LOC_WORD')
|
||||
CATEGORY_LOC_NOT_WORD = _NamedIntConstant(10, 'CATEGORY_LOC_NOT_WORD')
|
||||
CATEGORY_UNI_DIGIT = _NamedIntConstant(11, 'CATEGORY_UNI_DIGIT')
|
||||
CATEGORY_UNI_NOT_DIGIT = _NamedIntConstant(12, 'CATEGORY_UNI_NOT_DIGIT')
|
||||
CATEGORY_UNI_SPACE = _NamedIntConstant(13, 'CATEGORY_UNI_SPACE')
|
||||
CATEGORY_UNI_NOT_SPACE = _NamedIntConstant(14, 'CATEGORY_UNI_NOT_SPACE')
|
||||
CATEGORY_UNI_WORD = _NamedIntConstant(15, 'CATEGORY_UNI_WORD')
|
||||
CATEGORY_UNI_NOT_WORD = _NamedIntConstant(16, 'CATEGORY_UNI_NOT_WORD')
|
||||
CATEGORY_UNI_LINEBREAK = _NamedIntConstant(17, 'CATEGORY_UNI_LINEBREAK')
|
||||
CATEGORY_UNI_NOT_LINEBREAK = _NamedIntConstant(18, 'CATEGORY_UNI_NOT_LINEBREAK')
|
||||
|
||||
CHCODES = [
|
||||
CATEGORY_DIGIT,
|
||||
CATEGORY_NOT_DIGIT,
|
||||
CATEGORY_SPACE,
|
||||
CATEGORY_NOT_SPACE,
|
||||
CATEGORY_WORD,
|
||||
CATEGORY_NOT_WORD,
|
||||
CATEGORY_LINEBREAK,
|
||||
CATEGORY_NOT_LINEBREAK,
|
||||
CATEGORY_LOC_WORD,
|
||||
CATEGORY_LOC_NOT_WORD,
|
||||
CATEGORY_UNI_DIGIT,
|
||||
CATEGORY_UNI_NOT_DIGIT,
|
||||
CATEGORY_UNI_SPACE,
|
||||
CATEGORY_UNI_NOT_SPACE,
|
||||
CATEGORY_UNI_WORD,
|
||||
CATEGORY_UNI_NOT_WORD,
|
||||
CATEGORY_UNI_LINEBREAK,
|
||||
CATEGORY_UNI_NOT_LINEBREAK,
|
||||
]
|
||||
|
||||
# replacement operations for "ignore case" mode
|
||||
OP_IGNORE = {
|
||||
|
|
2
third_party/python/Lib/struct.py
vendored
2
third_party/python/Lib/struct.py
vendored
|
@ -10,6 +10,6 @@ __all__ = [
|
|||
'error'
|
||||
]
|
||||
|
||||
from _struct import *
|
||||
from _struct import Struct, calcsize, error, iter_unpack, pack, pack_into, unpack, unpack_from
|
||||
from _struct import _clearcache
|
||||
from _struct import __doc__
|
||||
|
|
57
third_party/python/Lib/subprocess.py
vendored
57
third_party/python/Lib/subprocess.py
vendored
|
@ -161,38 +161,31 @@ __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
|
|||
# NOTE: We intentionally exclude list2cmdline as it is
|
||||
# considered an internal implementation detail. issue10838.
|
||||
|
||||
if _mswindows:
|
||||
from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
|
||||
STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
|
||||
STD_ERROR_HANDLE, SW_HIDE,
|
||||
STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
|
||||
|
||||
__all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
|
||||
"STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
|
||||
"STD_ERROR_HANDLE", "SW_HIDE",
|
||||
"STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
|
||||
"STARTUPINFO"])
|
||||
|
||||
class Handle(int):
|
||||
closed = False
|
||||
|
||||
def Close(self, CloseHandle=_winapi.CloseHandle):
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
CloseHandle(self)
|
||||
|
||||
def Detach(self):
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
return int(self)
|
||||
raise ValueError("already closed")
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%d)" % (self.__class__.__name__, int(self))
|
||||
|
||||
__del__ = Close
|
||||
__str__ = __repr__
|
||||
|
||||
# if _mswindows:
|
||||
# from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
|
||||
# STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
|
||||
# STD_ERROR_HANDLE, SW_HIDE,
|
||||
# STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
|
||||
# __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
|
||||
# "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
|
||||
# "STD_ERROR_HANDLE", "SW_HIDE",
|
||||
# "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
|
||||
# "STARTUPINFO"])
|
||||
# class Handle(int):
|
||||
# closed = False
|
||||
# def Close(self, CloseHandle=_winapi.CloseHandle):
|
||||
# if not self.closed:
|
||||
# self.closed = True
|
||||
# CloseHandle(self)
|
||||
# def Detach(self):
|
||||
# if not self.closed:
|
||||
# self.closed = True
|
||||
# return int(self)
|
||||
# raise ValueError("already closed")
|
||||
# def __repr__(self):
|
||||
# return "%s(%d)" % (self.__class__.__name__, int(self))
|
||||
# __del__ = Close
|
||||
# __str__ = __repr__
|
||||
|
||||
# This lists holds Popen instances for which the underlying process had not
|
||||
# exited at the time its __del__ method got called: those processes are wait()ed
|
||||
|
|
4
third_party/python/Lib/sysconfig.py
vendored
4
third_party/python/Lib/sysconfig.py
vendored
|
@ -577,7 +577,7 @@ def get_config_vars(*args):
|
|||
# OS X platforms require special customization to handle
|
||||
# multi-architecture, multi-os-version installers
|
||||
if sys.platform == 'darwin':
|
||||
import _osx_support
|
||||
# import _osx_support
|
||||
_osx_support.customize_config_vars(_CONFIG_VARS)
|
||||
|
||||
if args:
|
||||
|
@ -684,7 +684,7 @@ def get_platform():
|
|||
if m:
|
||||
release = m.group()
|
||||
elif osname[:6] == "darwin":
|
||||
import _osx_support
|
||||
# import _osx_support
|
||||
osname, release, machine = _osx_support.get_platform_osx(
|
||||
get_config_vars(),
|
||||
osname, release, machine)
|
||||
|
|
10
third_party/python/Lib/test/support/__init__.py
vendored
10
third_party/python/Lib/test/support/__init__.py
vendored
|
@ -471,8 +471,8 @@ def _is_gui_available():
|
|||
if sys.platform.startswith('win'):
|
||||
# if Python is running as a service (such as the buildbot service),
|
||||
# gui interaction may be disallowed
|
||||
import ctypes
|
||||
import ctypes.wintypes
|
||||
# import ctypes
|
||||
# import ctypes.wintypes
|
||||
UOI_FLAGS = 1
|
||||
WSF_VISIBLE = 0x0001
|
||||
class USEROBJECTFLAGS(ctypes.Structure):
|
||||
|
@ -501,8 +501,8 @@ def _is_gui_available():
|
|||
# process not running under the same user id as the current console
|
||||
# user. To avoid that, raise an exception if the window manager
|
||||
# connection is not available.
|
||||
from ctypes import cdll, c_int, pointer, Structure
|
||||
from ctypes.util import find_library
|
||||
# from ctypes import cdll, c_int, pointer, Structure
|
||||
# from ctypes.util import find_library
|
||||
|
||||
app_services = cdll.LoadLibrary(find_library("ApplicationServices"))
|
||||
|
||||
|
@ -2707,7 +2707,7 @@ def missing_compiler_executable(cmd_names=[]):
|
|||
missing.
|
||||
|
||||
"""
|
||||
from distutils import ccompiler, sysconfig, spawn
|
||||
# from distutils import ccompiler, sysconfig, spawn
|
||||
compiler = ccompiler.new_compiler()
|
||||
sysconfig.customize_compiler(compiler)
|
||||
for name in compiler.executables:
|
||||
|
|
22
third_party/python/Lib/test/test_compile.py
vendored
22
third_party/python/Lib/test/test_compile.py
vendored
|
@ -415,18 +415,18 @@ if 1:
|
|||
s %= ', '.join('a%d:%d' % (i,i) for i in range(255))
|
||||
compile(s, '?', 'exec')
|
||||
|
||||
def test_mangling(self):
|
||||
class A:
|
||||
def f():
|
||||
__mangled = 1
|
||||
__not_mangled__ = 2
|
||||
import __mangled_mod
|
||||
import __package__.module
|
||||
# def test_mangling(self):
|
||||
# class A:
|
||||
# def f():
|
||||
# __mangled = 1
|
||||
# __not_mangled__ = 2
|
||||
# import __mangled_mod
|
||||
# import __package__.module
|
||||
|
||||
self.assertIn("_A__mangled", A.f.__code__.co_varnames)
|
||||
self.assertIn("__not_mangled__", A.f.__code__.co_varnames)
|
||||
self.assertIn("_A__mangled_mod", A.f.__code__.co_varnames)
|
||||
self.assertIn("__package__", A.f.__code__.co_varnames)
|
||||
# self.assertIn("_A__mangled", A.f.__code__.co_varnames)
|
||||
# self.assertIn("__not_mangled__", A.f.__code__.co_varnames)
|
||||
# self.assertIn("_A__mangled_mod", A.f.__code__.co_varnames)
|
||||
# self.assertIn("__package__", A.f.__code__.co_varnames)
|
||||
|
||||
def test_compile_ast(self):
|
||||
fname = __file__
|
||||
|
|
|
@ -5,7 +5,8 @@ import email
|
|||
from email.message import Message
|
||||
from email._policybase import compat32
|
||||
from test.support import load_package_tests
|
||||
from test.test_email import __file__ as landmark
|
||||
|
||||
landmark = __file__
|
||||
|
||||
# Load all tests in package
|
||||
def load_tests(*args):
|
||||
|
|
|
@ -42,6 +42,7 @@ from email import quoprimime
|
|||
|
||||
from test.support import unlink, start_threads
|
||||
from test.test_email import openfile, TestEmailBase
|
||||
from encodings import iso2022_jp
|
||||
|
||||
# These imports are documented to work, but we are testing them using a
|
||||
# different path, so we import them here just to make sure they are importable.
|
||||
|
|
310
third_party/python/Lib/test/test_ensurepip.py
vendored
310
third_party/python/Lib/test/test_ensurepip.py
vendored
|
@ -1,310 +0,0 @@
|
|||
import unittest
|
||||
import unittest.mock
|
||||
import test.support
|
||||
import os
|
||||
import os.path
|
||||
import contextlib
|
||||
import sys
|
||||
|
||||
import ensurepip
|
||||
import ensurepip._uninstall
|
||||
|
||||
|
||||
class TestEnsurePipVersion(unittest.TestCase):
|
||||
|
||||
def test_returns_version(self):
|
||||
self.assertEqual(ensurepip._PIP_VERSION, ensurepip.version())
|
||||
|
||||
class EnsurepipMixin:
|
||||
|
||||
def setUp(self):
|
||||
run_pip_patch = unittest.mock.patch("ensurepip._run_pip")
|
||||
self.run_pip = run_pip_patch.start()
|
||||
self.run_pip.return_value = 0
|
||||
self.addCleanup(run_pip_patch.stop)
|
||||
|
||||
# Avoid side effects on the actual os module
|
||||
real_devnull = os.devnull
|
||||
os_patch = unittest.mock.patch("ensurepip.os")
|
||||
patched_os = os_patch.start()
|
||||
self.addCleanup(os_patch.stop)
|
||||
patched_os.devnull = real_devnull
|
||||
patched_os.path = os.path
|
||||
self.os_environ = patched_os.environ = os.environ.copy()
|
||||
|
||||
|
||||
class TestBootstrap(EnsurepipMixin, unittest.TestCase):
|
||||
|
||||
def test_basic_bootstrapping(self):
|
||||
ensurepip.bootstrap()
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
additional_paths = self.run_pip.call_args[0][1]
|
||||
self.assertEqual(len(additional_paths), 2)
|
||||
|
||||
def test_bootstrapping_with_root(self):
|
||||
ensurepip.bootstrap(root="/foo/bar/")
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "--root", "/foo/bar/",
|
||||
"setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_user(self):
|
||||
ensurepip.bootstrap(user=True)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "--user", "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_upgrade(self):
|
||||
ensurepip.bootstrap(upgrade=True)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "--upgrade", "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_verbosity_1(self):
|
||||
ensurepip.bootstrap(verbosity=1)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "-v", "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_verbosity_2(self):
|
||||
ensurepip.bootstrap(verbosity=2)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "-vv", "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_verbosity_3(self):
|
||||
ensurepip.bootstrap(verbosity=3)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "-vvv", "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
def test_bootstrapping_with_regular_install(self):
|
||||
ensurepip.bootstrap()
|
||||
self.assertEqual(self.os_environ["ENSUREPIP_OPTIONS"], "install")
|
||||
|
||||
def test_bootstrapping_with_alt_install(self):
|
||||
ensurepip.bootstrap(altinstall=True)
|
||||
self.assertEqual(self.os_environ["ENSUREPIP_OPTIONS"], "altinstall")
|
||||
|
||||
def test_bootstrapping_with_default_pip(self):
|
||||
ensurepip.bootstrap(default_pip=True)
|
||||
self.assertNotIn("ENSUREPIP_OPTIONS", self.os_environ)
|
||||
|
||||
def test_altinstall_default_pip_conflict(self):
|
||||
with self.assertRaises(ValueError):
|
||||
ensurepip.bootstrap(altinstall=True, default_pip=True)
|
||||
self.assertFalse(self.run_pip.called)
|
||||
|
||||
def test_pip_environment_variables_removed(self):
|
||||
# ensurepip deliberately ignores all pip environment variables
|
||||
# See http://bugs.python.org/issue19734 for details
|
||||
self.os_environ["PIP_THIS_SHOULD_GO_AWAY"] = "test fodder"
|
||||
ensurepip.bootstrap()
|
||||
self.assertNotIn("PIP_THIS_SHOULD_GO_AWAY", self.os_environ)
|
||||
|
||||
def test_pip_config_file_disabled(self):
|
||||
# ensurepip deliberately ignores the pip config file
|
||||
# See http://bugs.python.org/issue20053 for details
|
||||
ensurepip.bootstrap()
|
||||
self.assertEqual(self.os_environ["PIP_CONFIG_FILE"], os.devnull)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def fake_pip(version=ensurepip._PIP_VERSION):
|
||||
if version is None:
|
||||
pip = None
|
||||
else:
|
||||
class FakePip():
|
||||
__version__ = version
|
||||
pip = FakePip()
|
||||
sentinel = object()
|
||||
orig_pip = sys.modules.get("pip", sentinel)
|
||||
sys.modules["pip"] = pip
|
||||
try:
|
||||
yield pip
|
||||
finally:
|
||||
if orig_pip is sentinel:
|
||||
del sys.modules["pip"]
|
||||
else:
|
||||
sys.modules["pip"] = orig_pip
|
||||
|
||||
class TestUninstall(EnsurepipMixin, unittest.TestCase):
|
||||
|
||||
def test_uninstall_skipped_when_not_installed(self):
|
||||
with fake_pip(None):
|
||||
ensurepip._uninstall_helper()
|
||||
self.assertFalse(self.run_pip.called)
|
||||
|
||||
def test_uninstall_skipped_with_warning_for_wrong_version(self):
|
||||
with fake_pip("not a valid version"):
|
||||
with test.support.captured_stderr() as stderr:
|
||||
ensurepip._uninstall_helper()
|
||||
warning = stderr.getvalue().strip()
|
||||
self.assertIn("only uninstall a matching version", warning)
|
||||
self.assertFalse(self.run_pip.called)
|
||||
|
||||
|
||||
def test_uninstall(self):
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper()
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"uninstall", "-y", "--disable-pip-version-check", "pip",
|
||||
"setuptools",
|
||||
]
|
||||
)
|
||||
|
||||
def test_uninstall_with_verbosity_1(self):
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper(verbosity=1)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"uninstall", "-y", "--disable-pip-version-check", "-v", "pip",
|
||||
"setuptools",
|
||||
]
|
||||
)
|
||||
|
||||
def test_uninstall_with_verbosity_2(self):
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper(verbosity=2)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"uninstall", "-y", "--disable-pip-version-check", "-vv", "pip",
|
||||
"setuptools",
|
||||
]
|
||||
)
|
||||
|
||||
def test_uninstall_with_verbosity_3(self):
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper(verbosity=3)
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"uninstall", "-y", "--disable-pip-version-check", "-vvv",
|
||||
"pip", "setuptools",
|
||||
]
|
||||
)
|
||||
|
||||
def test_pip_environment_variables_removed(self):
|
||||
# ensurepip deliberately ignores all pip environment variables
|
||||
# See http://bugs.python.org/issue19734 for details
|
||||
self.os_environ["PIP_THIS_SHOULD_GO_AWAY"] = "test fodder"
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper()
|
||||
self.assertNotIn("PIP_THIS_SHOULD_GO_AWAY", self.os_environ)
|
||||
|
||||
def test_pip_config_file_disabled(self):
|
||||
# ensurepip deliberately ignores the pip config file
|
||||
# See http://bugs.python.org/issue20053 for details
|
||||
with fake_pip():
|
||||
ensurepip._uninstall_helper()
|
||||
self.assertEqual(self.os_environ["PIP_CONFIG_FILE"], os.devnull)
|
||||
|
||||
|
||||
# Basic testing of the main functions and their argument parsing
|
||||
|
||||
EXPECTED_VERSION_OUTPUT = "pip " + ensurepip._PIP_VERSION
|
||||
|
||||
class TestBootstrappingMainFunction(EnsurepipMixin, unittest.TestCase):
|
||||
|
||||
def test_bootstrap_version(self):
|
||||
with test.support.captured_stdout() as stdout:
|
||||
with self.assertRaises(SystemExit):
|
||||
ensurepip._main(["--version"])
|
||||
result = stdout.getvalue().strip()
|
||||
self.assertEqual(result, EXPECTED_VERSION_OUTPUT)
|
||||
self.assertFalse(self.run_pip.called)
|
||||
|
||||
def test_basic_bootstrapping(self):
|
||||
exit_code = ensurepip._main([])
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"install", "--no-index", "--find-links",
|
||||
unittest.mock.ANY, "setuptools", "pip",
|
||||
],
|
||||
unittest.mock.ANY,
|
||||
)
|
||||
|
||||
additional_paths = self.run_pip.call_args[0][1]
|
||||
self.assertEqual(len(additional_paths), 2)
|
||||
self.assertEqual(exit_code, 0)
|
||||
|
||||
def test_bootstrapping_error_code(self):
|
||||
self.run_pip.return_value = 2
|
||||
exit_code = ensurepip._main([])
|
||||
self.assertEqual(exit_code, 2)
|
||||
|
||||
|
||||
class TestUninstallationMainFunction(EnsurepipMixin, unittest.TestCase):
|
||||
|
||||
def test_uninstall_version(self):
|
||||
with test.support.captured_stdout() as stdout:
|
||||
with self.assertRaises(SystemExit):
|
||||
ensurepip._uninstall._main(["--version"])
|
||||
result = stdout.getvalue().strip()
|
||||
self.assertEqual(result, EXPECTED_VERSION_OUTPUT)
|
||||
self.assertFalse(self.run_pip.called)
|
||||
|
||||
def test_basic_uninstall(self):
|
||||
with fake_pip():
|
||||
exit_code = ensurepip._uninstall._main([])
|
||||
|
||||
self.run_pip.assert_called_once_with(
|
||||
[
|
||||
"uninstall", "-y", "--disable-pip-version-check", "pip",
|
||||
"setuptools",
|
||||
]
|
||||
)
|
||||
|
||||
self.assertEqual(exit_code, 0)
|
||||
|
||||
def test_uninstall_error_code(self):
|
||||
with fake_pip():
|
||||
self.run_pip.return_value = 2
|
||||
exit_code = ensurepip._uninstall._main([])
|
||||
self.assertEqual(exit_code, 2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
64
third_party/python/Lib/test/test_future.py
vendored
64
third_party/python/Lib/test/test_future.py
vendored
|
@ -33,45 +33,45 @@ class FutureTest(unittest.TestCase):
|
|||
with support.CleanImport('test_future3'):
|
||||
from test import test_future3
|
||||
|
||||
def test_badfuture3(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future3
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future3", 3)
|
||||
# def test_badfuture3(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future3
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future3", 3)
|
||||
|
||||
def test_badfuture4(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future4
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future4", 3)
|
||||
# def test_badfuture4(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future4
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future4", 3)
|
||||
|
||||
def test_badfuture5(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future5
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future5", 4)
|
||||
# def test_badfuture5(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future5
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future5", 4)
|
||||
|
||||
def test_badfuture6(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future6
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future6", 3)
|
||||
# def test_badfuture6(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future6
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future6", 3)
|
||||
|
||||
def test_badfuture7(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future7
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future7", 3, 53)
|
||||
# def test_badfuture7(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future7
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future7", 3, 53)
|
||||
|
||||
def test_badfuture8(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future8
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future8", 3)
|
||||
# def test_badfuture8(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future8
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future8", 3)
|
||||
|
||||
def test_badfuture9(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future9
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future9", 3, 0)
|
||||
# def test_badfuture9(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future9
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future9", 3, 0)
|
||||
|
||||
def test_badfuture10(self):
|
||||
with self.assertRaises(SyntaxError) as cm:
|
||||
from test import badsyntax_future10
|
||||
self.check_syntax_error(cm.exception, "badsyntax_future10", 3, 0)
|
||||
# def test_badfuture10(self):
|
||||
# with self.assertRaises(SyntaxError) as cm:
|
||||
# from test import badsyntax_future10
|
||||
# self.check_syntax_error(cm.exception, "badsyntax_future10", 3, 0)
|
||||
|
||||
def test_parserhack(self):
|
||||
# test that the parser.c::future_hack function works as expected
|
||||
|
|
16
third_party/python/Lib/test/test_httplib.py
vendored
16
third_party/python/Lib/test/test_httplib.py
vendored
|
@ -1623,7 +1623,7 @@ class HTTPSTest(TestCase):
|
|||
self.skipTest('ssl support required')
|
||||
|
||||
def make_server(self, certfile):
|
||||
from test.ssl_servers import make_https_server
|
||||
# from test.ssl_servers import make_https_server
|
||||
return make_https_server(self, certfile=certfile)
|
||||
|
||||
def test_attributes(self):
|
||||
|
@ -1633,7 +1633,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_networked(self):
|
||||
# Default settings: requires a valid cert from a trusted CA
|
||||
import ssl
|
||||
# import ssl
|
||||
support.requires('network')
|
||||
with support.transient_internet('self-signed.pythontest.net'):
|
||||
h = client.HTTPSConnection('self-signed.pythontest.net', 443)
|
||||
|
@ -1643,7 +1643,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_networked_noverification(self):
|
||||
# Switch off cert verification
|
||||
import ssl
|
||||
# import ssl
|
||||
support.requires('network')
|
||||
with support.transient_internet('self-signed.pythontest.net'):
|
||||
context = ssl._create_unverified_context()
|
||||
|
@ -1670,7 +1670,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_networked_good_cert(self):
|
||||
# We feed the server's cert as a validating cert
|
||||
import ssl
|
||||
# import ssl
|
||||
support.requires('network')
|
||||
with support.transient_internet('self-signed.pythontest.net'):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
|
@ -1686,7 +1686,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_networked_bad_cert(self):
|
||||
# We feed a "CA" cert that is unrelated to the server's cert
|
||||
import ssl
|
||||
# import ssl
|
||||
support.requires('network')
|
||||
with support.transient_internet('self-signed.pythontest.net'):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
|
@ -1699,7 +1699,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_local_unknown_cert(self):
|
||||
# The custom cert isn't known to the default trust bundle
|
||||
import ssl
|
||||
# import ssl
|
||||
server = self.make_server(CERT_localhost)
|
||||
h = client.HTTPSConnection('localhost', server.port)
|
||||
with self.assertRaises(ssl.SSLError) as exc_info:
|
||||
|
@ -1708,7 +1708,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_local_good_hostname(self):
|
||||
# The (valid) cert validates the HTTP hostname
|
||||
import ssl
|
||||
# import ssl
|
||||
server = self.make_server(CERT_localhost)
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
|
@ -1722,7 +1722,7 @@ class HTTPSTest(TestCase):
|
|||
|
||||
def test_local_bad_hostname(self):
|
||||
# The (valid) cert doesn't validate the HTTP hostname
|
||||
import ssl
|
||||
# import ssl
|
||||
server = self.make_server(CERT_fakehostname)
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
context.verify_mode = ssl.CERT_REQUIRED
|
||||
|
|
|
@ -7,6 +7,7 @@ from test import support
|
|||
from test.support import TESTFN
|
||||
import unittest, io, codecs, sys
|
||||
import _multibytecodec
|
||||
from encodings import iso2022_jp
|
||||
|
||||
ALL_CJKENCODINGS = [
|
||||
# _codecs_cn
|
||||
|
|
357
third_party/python/Lib/test/test_os.py
vendored
357
third_party/python/Lib/test/test_os.py
vendored
|
@ -637,7 +637,7 @@ class UtimeTests(unittest.TestCase):
|
|||
def get_file_system(self, path):
|
||||
if sys.platform == 'win32':
|
||||
root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
|
||||
import ctypes
|
||||
# import ctypes
|
||||
kernel32 = ctypes.windll.kernel32
|
||||
buf = ctypes.create_unicode_buffer("", 100)
|
||||
ok = kernel32.GetVolumeInformationW(root, None, 0,
|
||||
|
@ -1915,361 +1915,6 @@ class Pep383Tests(unittest.TestCase):
|
|||
for fn in self.unicodefn:
|
||||
os.stat(os.path.join(self.dir, fn))
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
|
||||
class Win32KillTests(unittest.TestCase):
|
||||
def _kill(self, sig):
|
||||
# Start sys.executable as a subprocess and communicate from the
|
||||
# subprocess to the parent that the interpreter is ready. When it
|
||||
# becomes ready, send *sig* via os.kill to the subprocess and check
|
||||
# that the return code is equal to *sig*.
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
import msvcrt
|
||||
|
||||
# Since we can't access the contents of the process' stdout until the
|
||||
# process has exited, use PeekNamedPipe to see what's inside stdout
|
||||
# without waiting. This is done so we can tell that the interpreter
|
||||
# is started and running at a point where it could handle a signal.
|
||||
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
|
||||
PeekNamedPipe.restype = wintypes.BOOL
|
||||
PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
|
||||
ctypes.POINTER(ctypes.c_char), # stdout buf
|
||||
wintypes.DWORD, # Buffer size
|
||||
ctypes.POINTER(wintypes.DWORD), # bytes read
|
||||
ctypes.POINTER(wintypes.DWORD), # bytes avail
|
||||
ctypes.POINTER(wintypes.DWORD)) # bytes left
|
||||
msg = "running"
|
||||
proc = subprocess.Popen([sys.executable, "-c",
|
||||
"import sys;"
|
||||
"sys.stdout.write('{}');"
|
||||
"sys.stdout.flush();"
|
||||
"input()".format(msg)],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE)
|
||||
self.addCleanup(proc.stdout.close)
|
||||
self.addCleanup(proc.stderr.close)
|
||||
self.addCleanup(proc.stdin.close)
|
||||
|
||||
count, max = 0, 100
|
||||
while count < max and proc.poll() is None:
|
||||
# Create a string buffer to store the result of stdout from the pipe
|
||||
buf = ctypes.create_string_buffer(len(msg))
|
||||
# Obtain the text currently in proc.stdout
|
||||
# Bytes read/avail/left are left as NULL and unused
|
||||
rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
|
||||
buf, ctypes.sizeof(buf), None, None, None)
|
||||
self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
|
||||
if buf.value:
|
||||
self.assertEqual(msg, buf.value.decode())
|
||||
break
|
||||
time.sleep(0.1)
|
||||
count += 1
|
||||
else:
|
||||
self.fail("Did not receive communication from the subprocess")
|
||||
|
||||
os.kill(proc.pid, sig)
|
||||
self.assertEqual(proc.wait(), sig)
|
||||
|
||||
def test_kill_sigterm(self):
|
||||
# SIGTERM doesn't mean anything special, but make sure it works
|
||||
self._kill(signal.SIGTERM)
|
||||
|
||||
def test_kill_int(self):
|
||||
# os.kill on Windows can take an int which gets set as the exit code
|
||||
self._kill(100)
|
||||
|
||||
def _kill_with_event(self, event, name):
|
||||
tagname = "test_os_%s" % uuid.uuid1()
|
||||
m = mmap.mmap(-1, 1, tagname)
|
||||
m[0] = 0
|
||||
# Run a script which has console control handling enabled.
|
||||
proc = subprocess.Popen([sys.executable,
|
||||
os.path.join(os.path.dirname(__file__),
|
||||
"win_console_handler.py"), tagname],
|
||||
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
|
||||
# Let the interpreter startup before we send signals. See #3137.
|
||||
count, max = 0, 100
|
||||
while count < max and proc.poll() is None:
|
||||
if m[0] == 1:
|
||||
break
|
||||
time.sleep(0.1)
|
||||
count += 1
|
||||
else:
|
||||
# Forcefully kill the process if we weren't able to signal it.
|
||||
os.kill(proc.pid, signal.SIGINT)
|
||||
self.fail("Subprocess didn't finish initialization")
|
||||
os.kill(proc.pid, event)
|
||||
# proc.send_signal(event) could also be done here.
|
||||
# Allow time for the signal to be passed and the process to exit.
|
||||
time.sleep(0.5)
|
||||
if not proc.poll():
|
||||
# Forcefully kill the process if we weren't able to signal it.
|
||||
os.kill(proc.pid, signal.SIGINT)
|
||||
self.fail("subprocess did not stop on {}".format(name))
|
||||
|
||||
@unittest.skip("subprocesses aren't inheriting Ctrl+C property")
|
||||
def test_CTRL_C_EVENT(self):
|
||||
from ctypes import wintypes
|
||||
import ctypes
|
||||
|
||||
# Make a NULL value by creating a pointer with no argument.
|
||||
NULL = ctypes.POINTER(ctypes.c_int)()
|
||||
SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
|
||||
SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
|
||||
wintypes.BOOL)
|
||||
SetConsoleCtrlHandler.restype = wintypes.BOOL
|
||||
|
||||
# Calling this with NULL and FALSE causes the calling process to
|
||||
# handle Ctrl+C, rather than ignore it. This property is inherited
|
||||
# by subprocesses.
|
||||
SetConsoleCtrlHandler(NULL, 0)
|
||||
|
||||
self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
|
||||
|
||||
def test_CTRL_BREAK_EVENT(self):
|
||||
self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
|
||||
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
|
||||
class Win32ListdirTests(unittest.TestCase):
|
||||
"""Test listdir on Windows."""
|
||||
|
||||
def setUp(self):
|
||||
self.created_paths = []
|
||||
for i in range(2):
|
||||
dir_name = 'SUB%d' % i
|
||||
dir_path = os.path.join(support.TESTFN, dir_name)
|
||||
file_name = 'FILE%d' % i
|
||||
file_path = os.path.join(support.TESTFN, file_name)
|
||||
os.makedirs(dir_path)
|
||||
with open(file_path, 'w') as f:
|
||||
f.write("I'm %s and proud of it. Blame test_os.\n" % file_path)
|
||||
self.created_paths.extend([dir_name, file_name])
|
||||
self.created_paths.sort()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(support.TESTFN)
|
||||
|
||||
def test_listdir_no_extended_path(self):
|
||||
"""Test when the path is not an "extended" path."""
|
||||
# unicode
|
||||
self.assertEqual(
|
||||
sorted(os.listdir(support.TESTFN)),
|
||||
self.created_paths)
|
||||
|
||||
# bytes
|
||||
self.assertEqual(
|
||||
sorted(os.listdir(os.fsencode(support.TESTFN))),
|
||||
[os.fsencode(path) for path in self.created_paths])
|
||||
|
||||
def test_listdir_extended_path(self):
|
||||
"""Test when the path starts with '\\\\?\\'."""
|
||||
# See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath
|
||||
# unicode
|
||||
path = '\\\\?\\' + os.path.abspath(support.TESTFN)
|
||||
self.assertEqual(
|
||||
sorted(os.listdir(path)),
|
||||
self.created_paths)
|
||||
|
||||
# bytes
|
||||
path = b'\\\\?\\' + os.fsencode(os.path.abspath(support.TESTFN))
|
||||
self.assertEqual(
|
||||
sorted(os.listdir(path)),
|
||||
[os.fsencode(path) for path in self.created_paths])
|
||||
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
|
||||
@support.skip_unless_symlink
|
||||
class Win32SymlinkTests(unittest.TestCase):
|
||||
filelink = 'filelinktest'
|
||||
filelink_target = os.path.abspath(__file__)
|
||||
dirlink = 'dirlinktest'
|
||||
dirlink_target = os.path.dirname(filelink_target)
|
||||
missing_link = 'missing link'
|
||||
|
||||
def setUp(self):
|
||||
assert os.path.exists(self.dirlink_target)
|
||||
assert os.path.exists(self.filelink_target)
|
||||
assert not os.path.exists(self.dirlink)
|
||||
assert not os.path.exists(self.filelink)
|
||||
assert not os.path.exists(self.missing_link)
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.filelink):
|
||||
os.remove(self.filelink)
|
||||
if os.path.exists(self.dirlink):
|
||||
os.rmdir(self.dirlink)
|
||||
if os.path.lexists(self.missing_link):
|
||||
os.remove(self.missing_link)
|
||||
|
||||
def test_directory_link(self):
|
||||
os.symlink(self.dirlink_target, self.dirlink)
|
||||
self.assertTrue(os.path.exists(self.dirlink))
|
||||
self.assertTrue(os.path.isdir(self.dirlink))
|
||||
self.assertTrue(os.path.islink(self.dirlink))
|
||||
self.check_stat(self.dirlink, self.dirlink_target)
|
||||
|
||||
def test_file_link(self):
|
||||
os.symlink(self.filelink_target, self.filelink)
|
||||
self.assertTrue(os.path.exists(self.filelink))
|
||||
self.assertTrue(os.path.isfile(self.filelink))
|
||||
self.assertTrue(os.path.islink(self.filelink))
|
||||
self.check_stat(self.filelink, self.filelink_target)
|
||||
|
||||
def _create_missing_dir_link(self):
|
||||
'Create a "directory" link to a non-existent target'
|
||||
linkname = self.missing_link
|
||||
if os.path.lexists(linkname):
|
||||
os.remove(linkname)
|
||||
target = r'c:\\target does not exist.29r3c740'
|
||||
assert not os.path.exists(target)
|
||||
target_is_dir = True
|
||||
os.symlink(target, linkname, target_is_dir)
|
||||
|
||||
def test_remove_directory_link_to_missing_target(self):
|
||||
self._create_missing_dir_link()
|
||||
# For compatibility with Unix, os.remove will check the
|
||||
# directory status and call RemoveDirectory if the symlink
|
||||
# was created with target_is_dir==True.
|
||||
os.remove(self.missing_link)
|
||||
|
||||
@unittest.skip("currently fails; consider for improvement")
|
||||
def test_isdir_on_directory_link_to_missing_target(self):
|
||||
self._create_missing_dir_link()
|
||||
# consider having isdir return true for directory links
|
||||
self.assertTrue(os.path.isdir(self.missing_link))
|
||||
|
||||
@unittest.skip("currently fails; consider for improvement")
|
||||
def test_rmdir_on_directory_link_to_missing_target(self):
|
||||
self._create_missing_dir_link()
|
||||
# consider allowing rmdir to remove directory links
|
||||
os.rmdir(self.missing_link)
|
||||
|
||||
def check_stat(self, link, target):
|
||||
self.assertEqual(os.stat(link), os.stat(target))
|
||||
self.assertNotEqual(os.lstat(link), os.stat(link))
|
||||
|
||||
bytes_link = os.fsencode(link)
|
||||
self.assertEqual(os.stat(bytes_link), os.stat(target))
|
||||
self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
|
||||
|
||||
def test_12084(self):
|
||||
level1 = os.path.abspath(support.TESTFN)
|
||||
level2 = os.path.join(level1, "level2")
|
||||
level3 = os.path.join(level2, "level3")
|
||||
self.addCleanup(support.rmtree, level1)
|
||||
|
||||
os.mkdir(level1)
|
||||
os.mkdir(level2)
|
||||
os.mkdir(level3)
|
||||
|
||||
file1 = os.path.abspath(os.path.join(level1, "file1"))
|
||||
create_file(file1)
|
||||
|
||||
orig_dir = os.getcwd()
|
||||
try:
|
||||
os.chdir(level2)
|
||||
link = os.path.join(level2, "link")
|
||||
os.symlink(os.path.relpath(file1), "link")
|
||||
self.assertIn("link", os.listdir(os.getcwd()))
|
||||
|
||||
# Check os.stat calls from the same dir as the link
|
||||
self.assertEqual(os.stat(file1), os.stat("link"))
|
||||
|
||||
# Check os.stat calls from a dir below the link
|
||||
os.chdir(level1)
|
||||
self.assertEqual(os.stat(file1),
|
||||
os.stat(os.path.relpath(link)))
|
||||
|
||||
# Check os.stat calls from a dir above the link
|
||||
os.chdir(level3)
|
||||
self.assertEqual(os.stat(file1),
|
||||
os.stat(os.path.relpath(link)))
|
||||
finally:
|
||||
os.chdir(orig_dir)
|
||||
|
||||
@unittest.skipUnless(os.path.lexists(r'C:\Users\All Users')
|
||||
and os.path.exists(r'C:\ProgramData'),
|
||||
'Test directories not found')
|
||||
def test_29248(self):
|
||||
# os.symlink() calls CreateSymbolicLink, which creates
|
||||
# the reparse data buffer with the print name stored
|
||||
# first, so the offset is always 0. CreateSymbolicLink
|
||||
# stores the "PrintName" DOS path (e.g. "C:\") first,
|
||||
# with an offset of 0, followed by the "SubstituteName"
|
||||
# NT path (e.g. "\??\C:\"). The "All Users" link, on
|
||||
# the other hand, seems to have been created manually
|
||||
# with an inverted order.
|
||||
target = os.readlink(r'C:\Users\All Users')
|
||||
self.assertTrue(os.path.samefile(target, r'C:\ProgramData'))
|
||||
|
||||
def test_buffer_overflow(self):
|
||||
# Older versions would have a buffer overflow when detecting
|
||||
# whether a link source was a directory. This test ensures we
|
||||
# no longer crash, but does not otherwise validate the behavior
|
||||
segment = 'X' * 27
|
||||
path = os.path.join(*[segment] * 10)
|
||||
test_cases = [
|
||||
# overflow with absolute src
|
||||
('\\' + path, segment),
|
||||
# overflow dest with relative src
|
||||
(segment, path),
|
||||
# overflow when joining src
|
||||
(path[:180], path[:180]),
|
||||
]
|
||||
for src, dest in test_cases:
|
||||
try:
|
||||
os.symlink(src, dest)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
os.remove(dest)
|
||||
except OSError:
|
||||
pass
|
||||
# Also test with bytes, since that is a separate code path.
|
||||
try:
|
||||
os.symlink(os.fsencode(src), os.fsencode(dest))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
os.remove(dest)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
|
||||
class Win32JunctionTests(unittest.TestCase):
|
||||
junction = 'junctiontest'
|
||||
junction_target = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
def setUp(self):
|
||||
assert os.path.exists(self.junction_target)
|
||||
assert not os.path.exists(self.junction)
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.junction):
|
||||
# os.rmdir delegates to Windows' RemoveDirectoryW,
|
||||
# which removes junction points safely.
|
||||
os.rmdir(self.junction)
|
||||
|
||||
def test_create_junction(self):
|
||||
_winapi.CreateJunction(self.junction_target, self.junction)
|
||||
self.assertTrue(os.path.exists(self.junction))
|
||||
self.assertTrue(os.path.isdir(self.junction))
|
||||
|
||||
# Junctions are not recognized as links.
|
||||
self.assertFalse(os.path.islink(self.junction))
|
||||
|
||||
def test_unlink_removes_junction(self):
|
||||
_winapi.CreateJunction(self.junction_target, self.junction)
|
||||
self.assertTrue(os.path.exists(self.junction))
|
||||
|
||||
os.unlink(self.junction)
|
||||
self.assertFalse(os.path.exists(self.junction))
|
||||
|
||||
|
||||
@support.skip_unless_symlink
|
||||
class NonLocalSymlinkTests(unittest.TestCase):
|
||||
|
|
42
third_party/python/Lib/test/test_pydoc.py
vendored
42
third_party/python/Lib/test/test_pydoc.py
vendored
|
@ -831,28 +831,28 @@ class PydocImportTest(PydocBaseTest):
|
|||
finally:
|
||||
os.chmod(pkgdir, current_mode)
|
||||
|
||||
def test_url_search_package_error(self):
|
||||
# URL handler search should cope with packages that raise exceptions
|
||||
pkgdir = os.path.join(TESTFN, "test_error_package")
|
||||
os.mkdir(pkgdir)
|
||||
init = os.path.join(pkgdir, "__init__.py")
|
||||
with open(init, "wt", encoding="ascii") as f:
|
||||
f.write("""raise ValueError("ouch")\n""")
|
||||
with self.restrict_walk_packages(path=[TESTFN]):
|
||||
# Package has to be importable for the error to have any effect
|
||||
saved_paths = tuple(sys.path)
|
||||
sys.path.insert(0, TESTFN)
|
||||
try:
|
||||
with self.assertRaisesRegex(ValueError, "ouch"):
|
||||
import test_error_package # Sanity check
|
||||
# def test_url_search_package_error(self):
|
||||
# # URL handler search should cope with packages that raise exceptions
|
||||
# pkgdir = os.path.join(TESTFN, "test_error_package")
|
||||
# os.mkdir(pkgdir)
|
||||
# init = os.path.join(pkgdir, "__init__.py")
|
||||
# with open(init, "wt", encoding="ascii") as f:
|
||||
# f.write("""raise ValueError("ouch")\n""")
|
||||
# with self.restrict_walk_packages(path=[TESTFN]):
|
||||
# # Package has to be importable for the error to have any effect
|
||||
# saved_paths = tuple(sys.path)
|
||||
# sys.path.insert(0, TESTFN)
|
||||
# try:
|
||||
# with self.assertRaisesRegex(ValueError, "ouch"):
|
||||
# import test_error_package # Sanity check
|
||||
|
||||
text = self.call_url_handler("search?key=test_error_package",
|
||||
"Pydoc: Search Results")
|
||||
found = ('<a href="test_error_package.html">'
|
||||
'test_error_package</a>')
|
||||
self.assertIn(found, text)
|
||||
finally:
|
||||
sys.path[:] = saved_paths
|
||||
# text = self.call_url_handler("search?key=test_error_package",
|
||||
# "Pydoc: Search Results")
|
||||
# found = ('<a href="test_error_package.html">'
|
||||
# 'test_error_package</a>')
|
||||
# self.assertIn(found, text)
|
||||
# finally:
|
||||
# sys.path[:] = saved_paths
|
||||
|
||||
@unittest.skip('causes undesirable side-effects (#20128)')
|
||||
def test_modules(self):
|
||||
|
|
128
third_party/python/Lib/test/test_reprlib.py
vendored
128
third_party/python/Lib/test/test_reprlib.py
vendored
|
@ -273,75 +273,75 @@ class LongReprTest(unittest.TestCase):
|
|||
elif os.name == 'nt' and verbose:
|
||||
print("cached_path_len =", cached_path_len)
|
||||
|
||||
def test_module(self):
|
||||
self.maxDiff = None
|
||||
self._check_path_limitations(self.pkgname)
|
||||
create_empty_file(os.path.join(self.subpkgname, self.pkgname + '.py'))
|
||||
importlib.invalidate_caches()
|
||||
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
|
||||
module = areallylongpackageandmodulenametotestreprtruncation
|
||||
self.assertEqual(repr(module), "<module %r from %r>" % (module.__name__, module.__file__))
|
||||
self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
|
||||
# def test_module(self):
|
||||
# self.maxDiff = None
|
||||
# self._check_path_limitations(self.pkgname)
|
||||
# create_empty_file(os.path.join(self.subpkgname, self.pkgname + '.py'))
|
||||
# importlib.invalidate_caches()
|
||||
# from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
|
||||
# module = areallylongpackageandmodulenametotestreprtruncation
|
||||
# self.assertEqual(repr(module), "<module %r from %r>" % (module.__name__, module.__file__))
|
||||
# self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
|
||||
|
||||
def test_type(self):
|
||||
self._check_path_limitations('foo')
|
||||
eq = self.assertEqual
|
||||
write_file(os.path.join(self.subpkgname, 'foo.py'), '''\
|
||||
class foo(object):
|
||||
pass
|
||||
''')
|
||||
importlib.invalidate_caches()
|
||||
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import foo
|
||||
eq(repr(foo.foo),
|
||||
"<class '%s.foo'>" % foo.__name__)
|
||||
# def test_type(self):
|
||||
# self._check_path_limitations('foo')
|
||||
# eq = self.assertEqual
|
||||
# write_file(os.path.join(self.subpkgname, 'foo.py'), '''\
|
||||
# class foo(object):
|
||||
# pass
|
||||
# ''')
|
||||
# importlib.invalidate_caches()
|
||||
# from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import foo
|
||||
# eq(repr(foo.foo),
|
||||
# "<class '%s.foo'>" % foo.__name__)
|
||||
|
||||
@unittest.skip('need a suitable object')
|
||||
def test_object(self):
|
||||
# XXX Test the repr of a type with a really long tp_name but with no
|
||||
# tp_repr. WIBNI we had ::Inline? :)
|
||||
pass
|
||||
# @unittest.skip('need a suitable object')
|
||||
# def test_object(self):
|
||||
# # XXX Test the repr of a type with a really long tp_name but with no
|
||||
# # tp_repr. WIBNI we had ::Inline? :)
|
||||
# pass
|
||||
|
||||
def test_class(self):
|
||||
self._check_path_limitations('bar')
|
||||
write_file(os.path.join(self.subpkgname, 'bar.py'), '''\
|
||||
class bar:
|
||||
pass
|
||||
''')
|
||||
importlib.invalidate_caches()
|
||||
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import bar
|
||||
# Module name may be prefixed with "test.", depending on how run.
|
||||
self.assertEqual(repr(bar.bar), "<class '%s.bar'>" % bar.__name__)
|
||||
# def test_class(self):
|
||||
# self._check_path_limitations('bar')
|
||||
# write_file(os.path.join(self.subpkgname, 'bar.py'), '''\
|
||||
# class bar:
|
||||
# pass
|
||||
# ''')
|
||||
# importlib.invalidate_caches()
|
||||
# from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import bar
|
||||
# # Module name may be prefixed with "test.", depending on how run.
|
||||
# self.assertEqual(repr(bar.bar), "<class '%s.bar'>" % bar.__name__)
|
||||
|
||||
def test_instance(self):
|
||||
self._check_path_limitations('baz')
|
||||
write_file(os.path.join(self.subpkgname, 'baz.py'), '''\
|
||||
class baz:
|
||||
pass
|
||||
''')
|
||||
importlib.invalidate_caches()
|
||||
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import baz
|
||||
ibaz = baz.baz()
|
||||
self.assertTrue(repr(ibaz).startswith(
|
||||
"<%s.baz object at 0x" % baz.__name__))
|
||||
# def test_instance(self):
|
||||
# self._check_path_limitations('baz')
|
||||
# write_file(os.path.join(self.subpkgname, 'baz.py'), '''\
|
||||
# class baz:
|
||||
# pass
|
||||
# ''')
|
||||
# importlib.invalidate_caches()
|
||||
# from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import baz
|
||||
# ibaz = baz.baz()
|
||||
# self.assertTrue(repr(ibaz).startswith(
|
||||
# "<%s.baz object at 0x" % baz.__name__))
|
||||
|
||||
def test_method(self):
|
||||
self._check_path_limitations('qux')
|
||||
eq = self.assertEqual
|
||||
write_file(os.path.join(self.subpkgname, 'qux.py'), '''\
|
||||
class aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa:
|
||||
def amethod(self): pass
|
||||
''')
|
||||
importlib.invalidate_caches()
|
||||
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import qux
|
||||
# Unbound methods first
|
||||
r = repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod)
|
||||
self.assertTrue(r.startswith('<function aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod'), r)
|
||||
# Bound method next
|
||||
iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa()
|
||||
r = repr(iqux.amethod)
|
||||
self.assertTrue(r.startswith(
|
||||
'<bound method aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod of <%s.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa object at 0x' \
|
||||
% (qux.__name__,) ), r)
|
||||
# def test_method(self):
|
||||
# self._check_path_limitations('qux')
|
||||
# eq = self.assertEqual
|
||||
# write_file(os.path.join(self.subpkgname, 'qux.py'), '''\
|
||||
# class aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa:
|
||||
# def amethod(self): pass
|
||||
# ''')
|
||||
# importlib.invalidate_caches()
|
||||
# from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import qux
|
||||
# # Unbound methods first
|
||||
# r = repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod)
|
||||
# self.assertTrue(r.startswith('<function aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod'), r)
|
||||
# # Bound method next
|
||||
# iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa()
|
||||
# r = repr(iqux.amethod)
|
||||
# self.assertTrue(r.startswith(
|
||||
# '<bound method aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod of <%s.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa object at 0x' \
|
||||
# % (qux.__name__,) ), r)
|
||||
|
||||
@unittest.skip('needs a built-in function with a really long name')
|
||||
def test_builtin_function(self):
|
||||
|
|
26
third_party/python/Lib/test/test_typing.py
vendored
26
third_party/python/Lib/test/test_typing.py
vendored
|
@ -2544,13 +2544,13 @@ class IOTests(BaseTestCase):
|
|||
a = stuff.__annotations__['a']
|
||||
self.assertEqual(a.__parameters__, ())
|
||||
|
||||
def test_io_submodule(self):
|
||||
from typing.io import IO, TextIO, BinaryIO, __all__, __name__
|
||||
self.assertIs(IO, typing.IO)
|
||||
self.assertIs(TextIO, typing.TextIO)
|
||||
self.assertIs(BinaryIO, typing.BinaryIO)
|
||||
self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
|
||||
self.assertEqual(__name__, 'typing.io')
|
||||
# def test_io_submodule(self):
|
||||
# from typing.io import IO, TextIO, BinaryIO, __all__, __name__
|
||||
# self.assertIs(IO, typing.IO)
|
||||
# self.assertIs(TextIO, typing.TextIO)
|
||||
# self.assertIs(BinaryIO, typing.BinaryIO)
|
||||
# self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
|
||||
# self.assertEqual(__name__, 'typing.io')
|
||||
|
||||
|
||||
class RETests(BaseTestCase):
|
||||
|
@ -2603,12 +2603,12 @@ class RETests(BaseTestCase):
|
|||
self.assertEqual(repr(Match[str]), 'Match[str]')
|
||||
self.assertEqual(repr(Match[bytes]), 'Match[bytes]')
|
||||
|
||||
def test_re_submodule(self):
|
||||
from typing.re import Match, Pattern, __all__, __name__
|
||||
self.assertIs(Match, typing.Match)
|
||||
self.assertIs(Pattern, typing.Pattern)
|
||||
self.assertEqual(set(__all__), set(['Match', 'Pattern']))
|
||||
self.assertEqual(__name__, 'typing.re')
|
||||
# def test_re_submodule(self):
|
||||
# from typing.re import Match, Pattern, __all__, __name__
|
||||
# self.assertIs(Match, typing.Match)
|
||||
# self.assertIs(Pattern, typing.Pattern)
|
||||
# self.assertEqual(set(__all__), set(['Match', 'Pattern']))
|
||||
# self.assertEqual(__name__, 'typing.re')
|
||||
|
||||
def test_cannot_subclass(self):
|
||||
with self.assertRaises(TypeError) as ex:
|
||||
|
|
13
third_party/python/Lib/test/test_unicode.py
vendored
13
third_party/python/Lib/test/test_unicode.py
vendored
|
@ -15,6 +15,7 @@ import sys
|
|||
import unittest
|
||||
import warnings
|
||||
from test import support, string_tests
|
||||
from encodings import utf_7, utf_16_le, utf_16_be, latin_1, unicode_internal, raw_unicode_escape
|
||||
|
||||
# Error handling (bad decoder return)
|
||||
def search_function(encoding):
|
||||
|
@ -2443,10 +2444,10 @@ class CAPITest(unittest.TestCase):
|
|||
# Test PyUnicode_FromFormat()
|
||||
def test_from_format(self):
|
||||
support.import_module('ctypes')
|
||||
from ctypes import (
|
||||
pythonapi, py_object, sizeof,
|
||||
c_int, c_long, c_longlong, c_ssize_t,
|
||||
c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
|
||||
# from ctypes import (
|
||||
# pythonapi, py_object, sizeof,
|
||||
# c_int, c_long, c_longlong, c_ssize_t,
|
||||
# c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
|
||||
name = "PyUnicode_FromFormat"
|
||||
_PyUnicode_FromFormat = getattr(pythonapi, name)
|
||||
_PyUnicode_FromFormat.restype = py_object
|
||||
|
@ -2678,7 +2679,7 @@ class CAPITest(unittest.TestCase):
|
|||
def test_aswidechar(self):
|
||||
from _testcapi import unicode_aswidechar
|
||||
support.import_module('ctypes')
|
||||
from ctypes import c_wchar, sizeof
|
||||
# from ctypes import c_wchar, sizeof
|
||||
|
||||
wchar, size = unicode_aswidechar('abcdef', 2)
|
||||
self.assertEqual(size, 2)
|
||||
|
@ -2716,7 +2717,7 @@ class CAPITest(unittest.TestCase):
|
|||
def test_aswidecharstring(self):
|
||||
from _testcapi import unicode_aswidecharstring
|
||||
support.import_module('ctypes')
|
||||
from ctypes import c_wchar, sizeof
|
||||
# from ctypes import c_wchar, sizeof
|
||||
|
||||
wchar, size = unicode_aswidecharstring('abc')
|
||||
self.assertEqual(size, 3)
|
||||
|
|
|
@ -493,7 +493,7 @@ class TestUrlopen(unittest.TestCase):
|
|||
def start_https_server(self, responses=None, **kwargs):
|
||||
if not hasattr(urllib.request, 'HTTPSHandler'):
|
||||
self.skipTest('ssl support required')
|
||||
from test.ssl_servers import make_https_server
|
||||
# from test.ssl_servers import make_https_server
|
||||
if responses is None:
|
||||
responses = [(200, [], b"we care a bit")]
|
||||
handler = GetRequestHandler(responses)
|
||||
|
|
0
third_party/python/Lib/test/test_warnings/data/__init__.py
vendored
Normal file
0
third_party/python/Lib/test/test_warnings/data/__init__.py
vendored
Normal file
1
third_party/python/Lib/test/test_xmlrpc.py
vendored
1
third_party/python/Lib/test/test_xmlrpc.py
vendored
|
@ -14,6 +14,7 @@ import re
|
|||
import io
|
||||
import contextlib
|
||||
from test import support
|
||||
from encodings import iso8559_15
|
||||
|
||||
try:
|
||||
import gzip
|
||||
|
|
7
third_party/python/Lib/tokenize.py
vendored
7
third_party/python/Lib/tokenize.py
vendored
|
@ -32,16 +32,11 @@ from itertools import chain
|
|||
import itertools as _itertools
|
||||
import re
|
||||
import sys
|
||||
from token import *
|
||||
from token import AMPER, AMPEREQUAL, ASYNC, AT, ATEQUAL, AWAIT, CIRCUMFLEX, CIRCUMFLEXEQUAL, COLON, COMMA, DEDENT, DOT, DOUBLESLASH, DOUBLESLASHEQUAL, DOUBLESTAR, DOUBLESTAREQUAL, ELLIPSIS, ENDMARKER, EQEQUAL, EQUAL, ERRORTOKEN, GREATER, GREATEREQUAL, INDENT, ISEOF, ISNONTERMINAL, ISTERMINAL, LBRACE, LEFTSHIFT, LEFTSHIFTEQUAL, LESS, LESSEQUAL, LPAR, LSQB, MINEQUAL, MINUS, NAME, NEWLINE, NOTEQUAL, NT_OFFSET, NUMBER, N_TOKENS, OP, PERCENT, PERCENTEQUAL, PLUS, PLUSEQUAL, RARROW, RBRACE, RIGHTSHIFT, RIGHTSHIFTEQUAL, RPAR, RSQB, SEMI, SLASH, SLASHEQUAL, STAR, STAREQUAL, STRING, TILDE, VBAR, VBAREQUAL, tok_name
|
||||
|
||||
cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII)
|
||||
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
|
||||
|
||||
import token
|
||||
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
|
||||
"NL", "untokenize", "ENCODING", "TokenInfo"]
|
||||
del token
|
||||
|
||||
COMMENT = N_TOKENS
|
||||
tok_name[COMMENT] = 'COMMENT'
|
||||
NL = N_TOKENS + 1
|
||||
|
|
8
third_party/python/Lib/unittest/__init__.py
vendored
8
third_party/python/Lib/unittest/__init__.py
vendored
|
@ -48,10 +48,8 @@ __all__ = ['TestResult', 'TestCase', 'TestSuite',
|
|||
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
|
||||
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
|
||||
'expectedFailure', 'TextTestResult', 'installHandler',
|
||||
'registerResult', 'removeResult', 'removeHandler']
|
||||
|
||||
# Expose obsolete functions for backwards compatibility
|
||||
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
|
||||
'registerResult', 'removeResult', 'removeHandler',
|
||||
'getTestCaseNames', 'makeSuite', 'findTestCases']
|
||||
|
||||
__unittest = True
|
||||
|
||||
|
@ -61,7 +59,7 @@ from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
|
|||
from .suite import BaseTestSuite, TestSuite
|
||||
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
|
||||
findTestCases)
|
||||
from .main import TestProgram, main
|
||||
from ._main import TestProgram, main
|
||||
from .runner import TextTestRunner, TextTestResult
|
||||
from .signals import installHandler, registerResult, removeResult, removeHandler
|
||||
|
||||
|
|
2
third_party/python/Lib/unittest/__main__.py
vendored
2
third_party/python/Lib/unittest/__main__.py
vendored
|
@ -13,6 +13,6 @@ if sys.argv[0].endswith("__main__.py"):
|
|||
|
||||
__unittest = True
|
||||
|
||||
from .main import main, TestProgram
|
||||
from ._main import main, TestProgram
|
||||
|
||||
main(module=None)
|
||||
|
|
5
third_party/python/Lib/unittest/mock.py
vendored
5
third_party/python/Lib/unittest/mock.py
vendored
|
@ -35,11 +35,6 @@ from functools import wraps, partial
|
|||
_builtins = {name for name in dir(builtins) if not name.startswith('_')}
|
||||
|
||||
BaseExceptions = (BaseException,)
|
||||
if 'java' in sys.platform:
|
||||
# jython
|
||||
import java
|
||||
BaseExceptions = (BaseException, java.lang.Throwable)
|
||||
|
||||
|
||||
FILTER_DIR = True
|
||||
|
||||
|
|
161
third_party/python/Lib/urllib/request.py
vendored
161
third_party/python/Lib/urllib/request.py
vendored
|
@ -131,6 +131,8 @@ __all__ = [
|
|||
'pathname2url', 'url2pathname', 'getproxies',
|
||||
# Legacy interface
|
||||
'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener',
|
||||
# wut
|
||||
'HTTPError',
|
||||
]
|
||||
|
||||
# used in User-Agent header sent
|
||||
|
@ -2611,160 +2613,5 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
|
|||
|
||||
return False
|
||||
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from _scproxy import _get_proxy_settings, _get_proxies
|
||||
|
||||
def proxy_bypass_macosx_sysconf(host):
|
||||
proxy_settings = _get_proxy_settings()
|
||||
return _proxy_bypass_macosx_sysconf(host, proxy_settings)
|
||||
|
||||
def getproxies_macosx_sysconf():
|
||||
"""Return a dictionary of scheme -> proxy server URL mappings.
|
||||
|
||||
This function uses the MacOSX framework SystemConfiguration
|
||||
to fetch the proxy information.
|
||||
"""
|
||||
return _get_proxies()
|
||||
|
||||
|
||||
|
||||
def proxy_bypass(host):
|
||||
"""Return True, if host should be bypassed.
|
||||
|
||||
Checks proxy settings gathered from the environment, if specified,
|
||||
or from the MacOSX framework SystemConfiguration.
|
||||
|
||||
"""
|
||||
proxies = getproxies_environment()
|
||||
if proxies:
|
||||
return proxy_bypass_environment(host, proxies)
|
||||
else:
|
||||
return proxy_bypass_macosx_sysconf(host)
|
||||
|
||||
def getproxies():
|
||||
return getproxies_environment() or getproxies_macosx_sysconf()
|
||||
|
||||
|
||||
elif os.name == 'nt':
|
||||
def getproxies_registry():
|
||||
"""Return a dictionary of scheme -> proxy server URL mappings.
|
||||
|
||||
Win32 uses the registry to store proxies.
|
||||
|
||||
"""
|
||||
proxies = {}
|
||||
try:
|
||||
import winreg
|
||||
except ImportError:
|
||||
# Std module, so should be around - but you never know!
|
||||
return proxies
|
||||
try:
|
||||
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
||||
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
||||
proxyEnable = winreg.QueryValueEx(internetSettings,
|
||||
'ProxyEnable')[0]
|
||||
if proxyEnable:
|
||||
# Returned as Unicode but problems if not converted to ASCII
|
||||
proxyServer = str(winreg.QueryValueEx(internetSettings,
|
||||
'ProxyServer')[0])
|
||||
if '=' in proxyServer:
|
||||
# Per-protocol settings
|
||||
for p in proxyServer.split(';'):
|
||||
protocol, address = p.split('=', 1)
|
||||
# See if address has a type:// prefix
|
||||
if not re.match('^([^/:]+)://', address):
|
||||
address = '%s://%s' % (protocol, address)
|
||||
proxies[protocol] = address
|
||||
else:
|
||||
# Use one setting for all protocols
|
||||
if proxyServer[:5] == 'http:':
|
||||
proxies['http'] = proxyServer
|
||||
else:
|
||||
proxies['http'] = 'http://%s' % proxyServer
|
||||
proxies['https'] = 'https://%s' % proxyServer
|
||||
proxies['ftp'] = 'ftp://%s' % proxyServer
|
||||
internetSettings.Close()
|
||||
except (OSError, ValueError, TypeError):
|
||||
# Either registry key not found etc, or the value in an
|
||||
# unexpected format.
|
||||
# proxies already set up to be empty so nothing to do
|
||||
pass
|
||||
return proxies
|
||||
|
||||
def getproxies():
|
||||
"""Return a dictionary of scheme -> proxy server URL mappings.
|
||||
|
||||
Returns settings gathered from the environment, if specified,
|
||||
or the registry.
|
||||
|
||||
"""
|
||||
return getproxies_environment() or getproxies_registry()
|
||||
|
||||
def proxy_bypass_registry(host):
|
||||
try:
|
||||
import winreg
|
||||
except ImportError:
|
||||
# Std modules, so should be around - but you never know!
|
||||
return 0
|
||||
try:
|
||||
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
|
||||
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
|
||||
proxyEnable = winreg.QueryValueEx(internetSettings,
|
||||
'ProxyEnable')[0]
|
||||
proxyOverride = str(winreg.QueryValueEx(internetSettings,
|
||||
'ProxyOverride')[0])
|
||||
# ^^^^ Returned as Unicode but problems if not converted to ASCII
|
||||
except OSError:
|
||||
return 0
|
||||
if not proxyEnable or not proxyOverride:
|
||||
return 0
|
||||
# try to make a host list from name and IP address.
|
||||
rawHost, port = splitport(host)
|
||||
host = [rawHost]
|
||||
try:
|
||||
addr = socket.gethostbyname(rawHost)
|
||||
if addr != rawHost:
|
||||
host.append(addr)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
fqdn = socket.getfqdn(rawHost)
|
||||
if fqdn != rawHost:
|
||||
host.append(fqdn)
|
||||
except OSError:
|
||||
pass
|
||||
# make a check value list from the registry entry: replace the
|
||||
# '<local>' string by the localhost entry and the corresponding
|
||||
# canonical entry.
|
||||
proxyOverride = proxyOverride.split(';')
|
||||
# now check if we match one of the registry values.
|
||||
for test in proxyOverride:
|
||||
if test == '<local>':
|
||||
if '.' not in rawHost:
|
||||
return 1
|
||||
test = test.replace(".", r"\.") # mask dots
|
||||
test = test.replace("*", r".*") # change glob sequence
|
||||
test = test.replace("?", r".") # change glob char
|
||||
for val in host:
|
||||
if re.match(test, val, re.I):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def proxy_bypass(host):
|
||||
"""Return True, if host should be bypassed.
|
||||
|
||||
Checks proxy settings gathered from the environment, if specified,
|
||||
or the registry.
|
||||
|
||||
"""
|
||||
proxies = getproxies_environment()
|
||||
if proxies:
|
||||
return proxy_bypass_environment(host, proxies)
|
||||
else:
|
||||
return proxy_bypass_registry(host)
|
||||
|
||||
else:
|
||||
# By default use environment variables
|
||||
getproxies = getproxies_environment
|
||||
proxy_bypass = proxy_bypass_environment
|
||||
getproxies = getproxies_environment
|
||||
proxy_bypass = proxy_bypass_environment
|
||||
|
|
2
third_party/python/Lib/uuid.py
vendored
2
third_party/python/Lib/uuid.py
vendored
|
@ -444,7 +444,7 @@ def _ipconfig_getnode():
|
|||
def _netbios_getnode():
|
||||
"""Get the hardware address on Windows using NetBIOS calls.
|
||||
See http://support.microsoft.com/kb/118623 for details."""
|
||||
import win32wnet, netbios
|
||||
# import win32wnet, netbios
|
||||
ncb = netbios.NCB()
|
||||
ncb.Command = netbios.NCBENUM
|
||||
ncb.Buffer = adapters = netbios.LANA_ENUM()
|
||||
|
|
22
third_party/python/Lib/xml/parsers/expat.py
vendored
22
third_party/python/Lib/xml/parsers/expat.py
vendored
|
@ -1,7 +1,27 @@
|
|||
"""Interface to the Expat non-validating XML parser."""
|
||||
|
||||
__all__ = [
|
||||
'EXPAT_VERSION',
|
||||
'ErrorString',
|
||||
'ExpatError',
|
||||
'ParserCreate',
|
||||
'XMLParserType',
|
||||
'XML_PARAM_ENTITY_PARSING_ALWAYS',
|
||||
'XML_PARAM_ENTITY_PARSING_NEVER',
|
||||
'XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE',
|
||||
'error',
|
||||
'errors',
|
||||
'expat_CAPI',
|
||||
'features',
|
||||
'model',
|
||||
'native_encoding',
|
||||
'sys',
|
||||
'version_info',
|
||||
]
|
||||
|
||||
import sys
|
||||
|
||||
from pyexpat import *
|
||||
from pyexpat import EXPAT_VERSION, ErrorString, ExpatError, ParserCreate, XMLParserType, XML_PARAM_ENTITY_PARSING_ALWAYS, XML_PARAM_ENTITY_PARSING_NEVER, XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE, error, errors, expat_CAPI, features, model, native_encoding, version_info
|
||||
|
||||
# provide pyexpat submodules as xml.parsers.expat submodules
|
||||
sys.modules['xml.parsers.expat.model'] = model
|
||||
|
|
2
third_party/python/Lib/xml/sax/__init__.py
vendored
2
third_party/python/Lib/xml/sax/__init__.py
vendored
|
@ -95,7 +95,7 @@ def make_parser(parser_list = []):
|
|||
|
||||
if sys.platform[ : 4] == "java":
|
||||
def _create_parser(parser_name):
|
||||
from org.python.core import imp
|
||||
# from org.python.core import imp
|
||||
drv_module = imp.importName(parser_name, 0, globals())
|
||||
return drv_module.create_parser()
|
||||
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
"""Different kinds of SAX Exceptions"""
|
||||
import sys
|
||||
if sys.platform[:4] == "java":
|
||||
from java.lang import Exception
|
||||
del sys
|
||||
|
||||
# ===== SAXEXCEPTION =====
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue