2015-05-03 09:15:18 +08:00
|
|
|
"""Core implementation of path-based import.
|
|
|
|
|
|
|
|
This module is NOT meant to be directly imported! It has been designed such
|
|
|
|
that it can be bootstrapped into Python as the implementation of import. As
|
|
|
|
such it requires the injection of specific modules and attributes in order to
|
|
|
|
work. One should use importlib as the public-facing version of this module.
|
|
|
|
|
|
|
|
"""
|
2018-01-16 07:07:11 +08:00
|
|
|
# IMPORTANT: Whenever making changes to this module, be sure to run a top-level
|
|
|
|
# `make regen-importlib` followed by `make` in order to get the frozen version
|
|
|
|
# of the module updated. Not doing so will result in the Makefile to fail for
|
|
|
|
# all others who don't have a ./python around to freeze the module in the early
|
|
|
|
# stages of compilation.
|
2015-05-03 09:15:18 +08:00
|
|
|
#
|
|
|
|
|
|
|
|
# See importlib._setup() for what is injected into the global namespace.
|
|
|
|
|
|
|
|
# When editing this code be aware that code executed at import time CANNOT
|
|
|
|
# reference any injected objects! This includes not only global code but also
|
|
|
|
# anything specified at the class level.
|
|
|
|
|
2020-11-19 20:43:43 +08:00
|
|
|
# Module injected manually by _set_bootstrap_module()
|
|
|
|
_bootstrap = None
|
|
|
|
|
|
|
|
# Import builtin modules
|
|
|
|
import _imp
|
|
|
|
import _io
|
|
|
|
import sys
|
|
|
|
import _warnings
|
|
|
|
import marshal
|
|
|
|
|
|
|
|
|
|
|
|
_MS_WINDOWS = (sys.platform == 'win32')
|
|
|
|
if _MS_WINDOWS:
|
|
|
|
import nt as _os
|
|
|
|
import winreg
|
|
|
|
else:
|
|
|
|
import posix as _os
|
|
|
|
|
|
|
|
|
|
|
|
if _MS_WINDOWS:
|
|
|
|
path_separators = ['\\', '/']
|
|
|
|
else:
|
|
|
|
path_separators = ['/']
|
|
|
|
# Assumption made in _path_join()
|
|
|
|
assert all(len(sep) == 1 for sep in path_separators)
|
|
|
|
path_sep = path_separators[0]
|
2021-04-07 08:02:07 +08:00
|
|
|
path_sep_tuple = tuple(path_separators)
|
2020-11-19 20:43:43 +08:00
|
|
|
path_separators = ''.join(path_separators)
|
|
|
|
_pathseps_with_colon = {f':{s}' for s in path_separators}
|
|
|
|
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
# Bootstrap-related code ######################################################
|
2016-07-17 01:44:13 +08:00
|
|
|
_CASE_INSENSITIVE_PLATFORMS_STR_KEY = 'win',
|
2024-03-19 20:36:19 +08:00
|
|
|
_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY = 'cygwin', 'darwin', 'ios', 'tvos', 'watchos'
|
2016-07-17 01:44:13 +08:00
|
|
|
_CASE_INSENSITIVE_PLATFORMS = (_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY
|
|
|
|
+ _CASE_INSENSITIVE_PLATFORMS_STR_KEY)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _make_relax_case():
|
|
|
|
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
|
2016-07-17 01:44:13 +08:00
|
|
|
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):
|
|
|
|
key = 'PYTHONCASEOK'
|
|
|
|
else:
|
|
|
|
key = b'PYTHONCASEOK'
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def _relax_case():
|
2020-03-09 19:57:53 +08:00
|
|
|
"""True if filenames must be checked case-insensitively and ignore environment flags are not set."""
|
|
|
|
return not sys.flags.ignore_environment and key in _os.environ
|
2015-05-03 09:15:18 +08:00
|
|
|
else:
|
|
|
|
def _relax_case():
|
|
|
|
"""True if filenames must be checked case-insensitively."""
|
|
|
|
return False
|
|
|
|
return _relax_case
|
|
|
|
|
2020-11-19 20:43:43 +08:00
|
|
|
_relax_case = _make_relax_case()
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2018-09-19 03:22:29 +08:00
|
|
|
def _pack_uint32(x):
|
2015-05-03 09:15:18 +08:00
|
|
|
"""Convert a 32-bit integer to little-endian."""
|
|
|
|
return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
|
|
|
|
|
|
|
|
|
2024-03-28 14:54:51 +08:00
|
|
|
def _unpack_uint64(data):
|
|
|
|
"""Convert 8 bytes in little-endian to an integer."""
|
|
|
|
assert len(data) == 8
|
|
|
|
return int.from_bytes(data, 'little')
|
|
|
|
|
2018-09-19 03:22:29 +08:00
|
|
|
def _unpack_uint32(data):
|
2015-05-03 09:15:18 +08:00
|
|
|
"""Convert 4 bytes in little-endian to an integer."""
|
2018-09-19 03:22:29 +08:00
|
|
|
assert len(data) == 4
|
|
|
|
return int.from_bytes(data, 'little')
|
|
|
|
|
|
|
|
def _unpack_uint16(data):
|
|
|
|
"""Convert 2 bytes in little-endian to an integer."""
|
|
|
|
assert len(data) == 2
|
|
|
|
return int.from_bytes(data, 'little')
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
2021-04-07 08:02:07 +08:00
|
|
|
if _MS_WINDOWS:
|
|
|
|
def _path_join(*path_parts):
|
|
|
|
"""Replacement for os.path.join()."""
|
|
|
|
if not path_parts:
|
|
|
|
return ""
|
|
|
|
if len(path_parts) == 1:
|
|
|
|
return path_parts[0]
|
|
|
|
root = ""
|
|
|
|
path = []
|
|
|
|
for new_root, tail in map(_os._path_splitroot, path_parts):
|
|
|
|
if new_root.startswith(path_sep_tuple) or new_root.endswith(path_sep_tuple):
|
|
|
|
root = new_root.rstrip(path_separators) or root
|
|
|
|
path = [path_sep + tail]
|
|
|
|
elif new_root.endswith(':'):
|
|
|
|
if root.casefold() != new_root.casefold():
|
|
|
|
# Drive relative paths have to be resolved by the OS, so we reset the
|
|
|
|
# tail but do not add a path_sep prefix.
|
|
|
|
root = new_root
|
|
|
|
path = [tail]
|
|
|
|
else:
|
|
|
|
path.append(tail)
|
|
|
|
else:
|
|
|
|
root = new_root or root
|
|
|
|
path.append(tail)
|
|
|
|
path = [p.rstrip(path_separators) for p in path if p]
|
|
|
|
if len(path) == 1 and not path[0]:
|
|
|
|
# Avoid losing the root's trailing separator when joining with nothing
|
|
|
|
return root + path_sep
|
|
|
|
return root + path_sep.join(path)
|
|
|
|
|
|
|
|
else:
|
|
|
|
def _path_join(*path_parts):
|
|
|
|
"""Replacement for os.path.join()."""
|
|
|
|
return path_sep.join([part.rstrip(path_separators)
|
|
|
|
for part in path_parts if part])
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _path_split(path):
|
|
|
|
"""Replacement for os.path.split()."""
|
2021-04-07 08:02:07 +08:00
|
|
|
i = max(path.rfind(p) for p in path_separators)
|
|
|
|
if i < 0:
|
|
|
|
return '', path
|
|
|
|
return path[:i], path[i + 1:]
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _path_stat(path):
|
|
|
|
"""Stat the path.
|
|
|
|
|
|
|
|
Made a separate function to make it easier to override in experiments
|
|
|
|
(e.g. cache stat results).
|
|
|
|
|
|
|
|
"""
|
|
|
|
return _os.stat(path)
|
|
|
|
|
|
|
|
|
|
|
|
def _path_is_mode_type(path, mode):
|
|
|
|
"""Test whether the path is the specified mode type."""
|
|
|
|
try:
|
|
|
|
stat_info = _path_stat(path)
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return (stat_info.st_mode & 0o170000) == mode
|
|
|
|
|
|
|
|
|
|
|
|
def _path_isfile(path):
|
|
|
|
"""Replacement for os.path.isfile."""
|
|
|
|
return _path_is_mode_type(path, 0o100000)
|
|
|
|
|
|
|
|
|
|
|
|
def _path_isdir(path):
|
|
|
|
"""Replacement for os.path.isdir."""
|
|
|
|
if not path:
|
|
|
|
path = _os.getcwd()
|
|
|
|
return _path_is_mode_type(path, 0o040000)
|
|
|
|
|
|
|
|
|
2021-04-07 08:02:07 +08:00
|
|
|
if _MS_WINDOWS:
|
|
|
|
def _path_isabs(path):
|
|
|
|
"""Replacement for os.path.isabs."""
|
|
|
|
if not path:
|
|
|
|
return False
|
|
|
|
root = _os._path_splitroot(path)[0].replace('/', '\\')
|
|
|
|
return len(root) > 1 and (root.startswith('\\\\') or root.endswith('\\'))
|
2018-06-16 12:40:56 +08:00
|
|
|
|
2021-04-07 08:02:07 +08:00
|
|
|
else:
|
|
|
|
def _path_isabs(path):
|
|
|
|
"""Replacement for os.path.isabs."""
|
|
|
|
return path.startswith(path_separators)
|
2018-06-16 12:40:56 +08:00
|
|
|
|
|
|
|
|
2022-10-07 06:40:22 +08:00
|
|
|
def _path_abspath(path):
|
|
|
|
"""Replacement for os.path.abspath."""
|
|
|
|
if not _path_isabs(path):
|
|
|
|
for sep in path_separators:
|
|
|
|
path = path.removeprefix(f".{sep}")
|
|
|
|
return _path_join(_os.getcwd(), path)
|
|
|
|
else:
|
|
|
|
return path
|
|
|
|
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def _write_atomic(path, data, mode=0o666):
|
|
|
|
"""Best-effort function to write data to a path atomically.
|
|
|
|
Be prepared to handle a FileExistsError if concurrent writing of the
|
|
|
|
temporary file is attempted."""
|
|
|
|
# id() is used to generate a pseudo-random filename.
|
2022-10-07 09:27:51 +08:00
|
|
|
path_tmp = f'{path}.{id(path)}'
|
2015-05-03 09:15:18 +08:00
|
|
|
fd = _os.open(path_tmp,
|
|
|
|
_os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666)
|
|
|
|
try:
|
|
|
|
# We first write data to a temporary file, and then use os.replace() to
|
|
|
|
# perform an atomic rename.
|
|
|
|
with _io.FileIO(fd, 'wb') as file:
|
|
|
|
file.write(data)
|
|
|
|
_os.replace(path_tmp, path)
|
|
|
|
except OSError:
|
|
|
|
try:
|
|
|
|
_os.unlink(path_tmp)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
_code_type = type(_write_atomic.__code__)
|
|
|
|
|
2024-08-02 18:12:19 +08:00
|
|
|
MAGIC_NUMBER = _imp.pyc_magic_number_token.to_bytes(4, 'little')
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
_PYCACHE = '__pycache__'
|
|
|
|
_OPT = 'opt-'
|
|
|
|
|
2020-11-19 20:43:43 +08:00
|
|
|
SOURCE_SUFFIXES = ['.py']
|
|
|
|
if _MS_WINDOWS:
|
|
|
|
SOURCE_SUFFIXES.append('.pyw')
|
|
|
|
|
|
|
|
EXTENSION_SUFFIXES = _imp.extension_suffixes()
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
BYTECODE_SUFFIXES = ['.pyc']
|
|
|
|
# Deprecated.
|
|
|
|
DEBUG_BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES = BYTECODE_SUFFIXES
|
|
|
|
|
|
|
|
def cache_from_source(path, debug_override=None, *, optimization=None):
|
|
|
|
"""Given the path to a .py file, return the path to its .pyc file.
|
|
|
|
|
|
|
|
The .py file does not need to exist; this simply returns the path to the
|
|
|
|
.pyc file calculated as if the .py file were imported.
|
|
|
|
|
|
|
|
The 'optimization' parameter controls the presumed optimization level of
|
|
|
|
the bytecode file. If 'optimization' is not None, the string representation
|
|
|
|
of the argument is taken and verified to be alphanumeric (else ValueError
|
|
|
|
is raised).
|
|
|
|
|
|
|
|
The debug_override parameter is deprecated. If debug_override is not None,
|
|
|
|
a True value is the same as setting 'optimization' to the empty string
|
|
|
|
while a False value is equivalent to setting 'optimization' to '1'.
|
|
|
|
|
|
|
|
If sys.implementation.cache_tag is None then NotImplementedError is raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if debug_override is not None:
|
|
|
|
_warnings.warn('the debug_override parameter is deprecated; use '
|
|
|
|
"'optimization' instead", DeprecationWarning)
|
|
|
|
if optimization is not None:
|
|
|
|
message = 'debug_override or optimization must be set to None'
|
|
|
|
raise TypeError(message)
|
|
|
|
optimization = '' if debug_override else 1
|
2016-09-08 09:39:18 +08:00
|
|
|
path = _os.fspath(path)
|
2015-05-03 09:15:18 +08:00
|
|
|
head, tail = _path_split(path)
|
|
|
|
base, sep, rest = tail.rpartition('.')
|
|
|
|
tag = sys.implementation.cache_tag
|
|
|
|
if tag is None:
|
|
|
|
raise NotImplementedError('sys.implementation.cache_tag is None')
|
|
|
|
almost_filename = ''.join([(base if base else rest), sep, tag])
|
|
|
|
if optimization is None:
|
|
|
|
if sys.flags.optimize == 0:
|
|
|
|
optimization = ''
|
|
|
|
else:
|
|
|
|
optimization = sys.flags.optimize
|
|
|
|
optimization = str(optimization)
|
|
|
|
if optimization != '':
|
|
|
|
if not optimization.isalnum():
|
2022-10-07 09:27:51 +08:00
|
|
|
raise ValueError(f'{optimization!r} is not alphanumeric')
|
|
|
|
almost_filename = f'{almost_filename}.{_OPT}{optimization}'
|
2018-06-16 12:40:56 +08:00
|
|
|
filename = almost_filename + BYTECODE_SUFFIXES[0]
|
|
|
|
if sys.pycache_prefix is not None:
|
|
|
|
# We need an absolute path to the py file to avoid the possibility of
|
|
|
|
# collisions within sys.pycache_prefix, if someone has two different
|
|
|
|
# `foo/bar.py` on their system and they import both of them using the
|
|
|
|
# same sys.pycache_prefix. Let's say sys.pycache_prefix is
|
|
|
|
# `C:\Bytecode`; the idea here is that if we get `Foo\Bar`, we first
|
|
|
|
# make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative
|
|
|
|
# (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an
|
|
|
|
# unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`.
|
2022-10-07 06:40:22 +08:00
|
|
|
head = _path_abspath(head)
|
2018-06-16 12:40:56 +08:00
|
|
|
|
|
|
|
# Strip initial drive from a Windows path. We know we have an absolute
|
|
|
|
# path here, so the second part of the check rules out a POSIX path that
|
|
|
|
# happens to contain a colon at the second character.
|
|
|
|
if head[1] == ':' and head[0] not in path_separators:
|
|
|
|
head = head[2:]
|
|
|
|
|
|
|
|
# Strip initial path separator from `head` to complete the conversion
|
|
|
|
# back to a root-relative path before joining.
|
|
|
|
return _path_join(
|
|
|
|
sys.pycache_prefix,
|
|
|
|
head.lstrip(path_separators),
|
|
|
|
filename,
|
|
|
|
)
|
|
|
|
return _path_join(head, _PYCACHE, filename)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def source_from_cache(path):
|
|
|
|
"""Given the path to a .pyc. file, return the path to its .py file.
|
|
|
|
|
|
|
|
The .pyc file does not need to exist; this simply returns the path to
|
|
|
|
the .py file calculated to correspond to the .pyc file. If path does
|
|
|
|
not conform to PEP 3147/488 format, ValueError will be raised. If
|
|
|
|
sys.implementation.cache_tag is None then NotImplementedError is raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if sys.implementation.cache_tag is None:
|
|
|
|
raise NotImplementedError('sys.implementation.cache_tag is None')
|
2016-09-08 09:39:18 +08:00
|
|
|
path = _os.fspath(path)
|
2015-05-03 09:15:18 +08:00
|
|
|
head, pycache_filename = _path_split(path)
|
2018-06-16 12:40:56 +08:00
|
|
|
found_in_pycache_prefix = False
|
|
|
|
if sys.pycache_prefix is not None:
|
|
|
|
stripped_path = sys.pycache_prefix.rstrip(path_separators)
|
|
|
|
if head.startswith(stripped_path + path_sep):
|
|
|
|
head = head[len(stripped_path):]
|
|
|
|
found_in_pycache_prefix = True
|
|
|
|
if not found_in_pycache_prefix:
|
|
|
|
head, pycache = _path_split(head)
|
|
|
|
if pycache != _PYCACHE:
|
|
|
|
raise ValueError(f'{_PYCACHE} not bottom-level directory in '
|
|
|
|
f'{path!r}')
|
2015-05-03 09:15:18 +08:00
|
|
|
dot_count = pycache_filename.count('.')
|
|
|
|
if dot_count not in {2, 3}:
|
2018-06-16 12:40:56 +08:00
|
|
|
raise ValueError(f'expected only 2 or 3 dots in {pycache_filename!r}')
|
2015-05-03 09:15:18 +08:00
|
|
|
elif dot_count == 3:
|
|
|
|
optimization = pycache_filename.rsplit('.', 2)[-2]
|
|
|
|
if not optimization.startswith(_OPT):
|
|
|
|
raise ValueError("optimization portion of filename does not start "
|
2018-06-16 12:40:56 +08:00
|
|
|
f"with {_OPT!r}")
|
2015-05-03 09:15:18 +08:00
|
|
|
opt_level = optimization[len(_OPT):]
|
|
|
|
if not opt_level.isalnum():
|
2018-06-16 12:40:56 +08:00
|
|
|
raise ValueError(f"optimization level {optimization!r} is not an "
|
|
|
|
"alphanumeric value")
|
2015-05-03 09:15:18 +08:00
|
|
|
base_filename = pycache_filename.partition('.')[0]
|
|
|
|
return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
|
|
|
|
|
|
|
|
|
|
|
|
def _get_sourcefile(bytecode_path):
|
|
|
|
"""Convert a bytecode file path to a source path (if possible).
|
|
|
|
|
|
|
|
This function exists purely for backwards-compatibility for
|
|
|
|
PyImport_ExecCodeModuleWithFilenames() in the C API.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if len(bytecode_path) == 0:
|
|
|
|
return None
|
|
|
|
rest, _, extension = bytecode_path.rpartition('.')
|
|
|
|
if not rest or extension.lower()[-3:-1] != 'py':
|
|
|
|
return bytecode_path
|
|
|
|
try:
|
|
|
|
source_path = source_from_cache(bytecode_path)
|
|
|
|
except (NotImplementedError, ValueError):
|
|
|
|
source_path = bytecode_path[:-1]
|
|
|
|
return source_path if _path_isfile(source_path) else bytecode_path
|
|
|
|
|
|
|
|
|
|
|
|
def _get_cached(filename):
|
|
|
|
if filename.endswith(tuple(SOURCE_SUFFIXES)):
|
|
|
|
try:
|
|
|
|
return cache_from_source(filename)
|
|
|
|
except NotImplementedError:
|
|
|
|
pass
|
|
|
|
elif filename.endswith(tuple(BYTECODE_SUFFIXES)):
|
|
|
|
return filename
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _calc_mode(path):
|
|
|
|
"""Calculate the mode permissions for a bytecode file."""
|
|
|
|
try:
|
|
|
|
mode = _path_stat(path).st_mode
|
|
|
|
except OSError:
|
|
|
|
mode = 0o666
|
|
|
|
# We always ensure write access so we can update cached files
|
|
|
|
# later even when the source files are read-only on Windows (#6074)
|
|
|
|
mode |= 0o200
|
|
|
|
return mode
|
|
|
|
|
|
|
|
|
|
|
|
def _check_name(method):
|
|
|
|
"""Decorator to verify that the module being requested matches the one the
|
|
|
|
loader can handle.
|
|
|
|
|
|
|
|
The first argument (self) must define _name which the second argument is
|
|
|
|
compared against. If the comparison fails then ImportError is raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
def _check_name_wrapper(self, name=None, *args, **kwargs):
|
|
|
|
if name is None:
|
|
|
|
name = self.name
|
|
|
|
elif self.name != name:
|
2015-05-23 20:24:10 +08:00
|
|
|
raise ImportError('loader for %s cannot handle %s' %
|
|
|
|
(self.name, name), name=name)
|
2015-05-03 09:15:18 +08:00
|
|
|
return method(self, name, *args, **kwargs)
|
2020-11-19 20:43:43 +08:00
|
|
|
|
|
|
|
# FIXME: @_check_name is used to define class methods before the
|
|
|
|
# _bootstrap module is set by _set_bootstrap_module().
|
|
|
|
if _bootstrap is not None:
|
2015-05-03 09:15:18 +08:00
|
|
|
_wrap = _bootstrap._wrap
|
2020-11-19 20:43:43 +08:00
|
|
|
else:
|
2015-05-03 09:15:18 +08:00
|
|
|
def _wrap(new, old):
|
|
|
|
for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
|
|
|
|
if hasattr(old, replace):
|
|
|
|
setattr(new, replace, getattr(old, replace))
|
|
|
|
new.__dict__.update(old.__dict__)
|
2020-11-19 20:43:43 +08:00
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
_wrap(_check_name_wrapper, method)
|
|
|
|
return _check_name_wrapper
|
|
|
|
|
|
|
|
|
2017-12-10 02:26:52 +08:00
|
|
|
def _classify_pyc(data, name, exc_details):
|
|
|
|
"""Perform basic validity checking of a pyc header and return the flags field,
|
|
|
|
which determines how the pyc should be further validated against the source.
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2017-12-10 02:26:52 +08:00
|
|
|
*data* is the contents of the pyc file. (Only the first 16 bytes are
|
|
|
|
required, though.)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2017-12-10 02:26:52 +08:00
|
|
|
*name* is the name of the module being imported. It is used for logging.
|
|
|
|
|
|
|
|
*exc_details* is a dictionary passed to ImportError if it raised for
|
|
|
|
improved debugging.
|
|
|
|
|
|
|
|
ImportError is raised when the magic number is incorrect or when the flags
|
|
|
|
field is invalid. EOFError is raised when the data is found to be truncated.
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
"""
|
|
|
|
magic = data[:4]
|
|
|
|
if magic != MAGIC_NUMBER:
|
2017-12-10 02:26:52 +08:00
|
|
|
message = f'bad magic number in {name!r}: {magic!r}'
|
2015-10-01 16:55:52 +08:00
|
|
|
_bootstrap._verbose_message('{}', message)
|
2015-05-03 09:15:18 +08:00
|
|
|
raise ImportError(message, **exc_details)
|
2017-12-10 02:26:52 +08:00
|
|
|
if len(data) < 16:
|
|
|
|
message = f'reached EOF while reading pyc header of {name!r}'
|
2015-10-01 16:55:52 +08:00
|
|
|
_bootstrap._verbose_message('{}', message)
|
2015-05-03 09:15:18 +08:00
|
|
|
raise EOFError(message)
|
2018-09-19 03:22:29 +08:00
|
|
|
flags = _unpack_uint32(data[4:8])
|
2017-12-10 02:26:52 +08:00
|
|
|
# Only the first two flags are defined.
|
|
|
|
if flags & ~0b11:
|
|
|
|
message = f'invalid flags {flags!r} in {name!r}'
|
|
|
|
raise ImportError(message, **exc_details)
|
|
|
|
return flags
|
|
|
|
|
|
|
|
|
|
|
|
def _validate_timestamp_pyc(data, source_mtime, source_size, name,
|
|
|
|
exc_details):
|
|
|
|
"""Validate a pyc against the source last-modified time.
|
|
|
|
|
|
|
|
*data* is the contents of the pyc file. (Only the first 16 bytes are
|
|
|
|
required.)
|
|
|
|
|
|
|
|
*source_mtime* is the last modified timestamp of the source file.
|
|
|
|
|
|
|
|
*source_size* is None or the size of the source file in bytes.
|
|
|
|
|
|
|
|
*name* is the name of the module being imported. It is used for logging.
|
|
|
|
|
|
|
|
*exc_details* is a dictionary passed to ImportError if it raised for
|
|
|
|
improved debugging.
|
|
|
|
|
|
|
|
An ImportError is raised if the bytecode is stale.
|
|
|
|
|
|
|
|
"""
|
2018-09-19 03:22:29 +08:00
|
|
|
if _unpack_uint32(data[8:12]) != (source_mtime & 0xFFFFFFFF):
|
2017-12-10 02:26:52 +08:00
|
|
|
message = f'bytecode is stale for {name!r}'
|
2015-10-01 16:55:52 +08:00
|
|
|
_bootstrap._verbose_message('{}', message)
|
2017-12-10 02:26:52 +08:00
|
|
|
raise ImportError(message, **exc_details)
|
|
|
|
if (source_size is not None and
|
2018-09-19 03:22:29 +08:00
|
|
|
_unpack_uint32(data[12:16]) != (source_size & 0xFFFFFFFF)):
|
2017-12-10 02:26:52 +08:00
|
|
|
raise ImportError(f'bytecode is stale for {name!r}', **exc_details)
|
|
|
|
|
|
|
|
|
|
|
|
def _validate_hash_pyc(data, source_hash, name, exc_details):
|
|
|
|
"""Validate a hash-based pyc by checking the real source hash against the one in
|
|
|
|
the pyc header.
|
|
|
|
|
|
|
|
*data* is the contents of the pyc file. (Only the first 16 bytes are
|
|
|
|
required.)
|
|
|
|
|
|
|
|
*source_hash* is the importlib.util.source_hash() of the source file.
|
|
|
|
|
|
|
|
*name* is the name of the module being imported. It is used for logging.
|
|
|
|
|
|
|
|
*exc_details* is a dictionary passed to ImportError if it raised for
|
|
|
|
improved debugging.
|
|
|
|
|
|
|
|
An ImportError is raised if the bytecode is stale.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if data[8:16] != source_hash:
|
|
|
|
raise ImportError(
|
|
|
|
f'hash in bytecode doesn\'t match hash of source {name!r}',
|
|
|
|
**exc_details,
|
|
|
|
)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
|
2017-12-10 02:26:52 +08:00
|
|
|
"""Compile bytecode as found in a pyc."""
|
2015-05-03 09:15:18 +08:00
|
|
|
code = marshal.loads(data)
|
|
|
|
if isinstance(code, _code_type):
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('code object from {!r}', bytecode_path)
|
2015-05-03 09:15:18 +08:00
|
|
|
if source_path is not None:
|
|
|
|
_imp._fix_co_filename(code, source_path)
|
|
|
|
return code
|
|
|
|
else:
|
2022-10-07 09:27:51 +08:00
|
|
|
raise ImportError(f'Non-code object in {bytecode_path!r}',
|
2015-05-03 09:15:18 +08:00
|
|
|
name=name, path=bytecode_path)
|
|
|
|
|
2017-12-10 02:26:52 +08:00
|
|
|
|
|
|
|
def _code_to_timestamp_pyc(code, mtime=0, source_size=0):
|
|
|
|
"Produce the data for a timestamp-based pyc."
|
2015-05-03 09:15:18 +08:00
|
|
|
data = bytearray(MAGIC_NUMBER)
|
2018-09-19 03:22:29 +08:00
|
|
|
data.extend(_pack_uint32(0))
|
|
|
|
data.extend(_pack_uint32(mtime))
|
|
|
|
data.extend(_pack_uint32(source_size))
|
2015-05-03 09:15:18 +08:00
|
|
|
data.extend(marshal.dumps(code))
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-12-10 02:26:52 +08:00
|
|
|
def _code_to_hash_pyc(code, source_hash, checked=True):
|
|
|
|
"Produce the data for a hash-based pyc."
|
|
|
|
data = bytearray(MAGIC_NUMBER)
|
|
|
|
flags = 0b1 | checked << 1
|
2018-09-19 03:22:29 +08:00
|
|
|
data.extend(_pack_uint32(flags))
|
2017-12-10 02:26:52 +08:00
|
|
|
assert len(source_hash) == 8
|
|
|
|
data.extend(source_hash)
|
|
|
|
data.extend(marshal.dumps(code))
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def decode_source(source_bytes):
|
|
|
|
"""Decode bytes representing source code and return the string.
|
|
|
|
|
|
|
|
Universal newline support is used in the decoding.
|
|
|
|
"""
|
|
|
|
import tokenize # To avoid bootstrap issues.
|
|
|
|
source_bytes_readline = _io.BytesIO(source_bytes).readline
|
|
|
|
encoding = tokenize.detect_encoding(source_bytes_readline)
|
|
|
|
newline_decoder = _io.IncrementalNewlineDecoder(None, True)
|
|
|
|
return newline_decoder.decode(source_bytes.decode(encoding[0]))
|
|
|
|
|
|
|
|
|
|
|
|
# Module specifications #######################################################
|
|
|
|
|
|
|
|
_POPULATE = object()
|
|
|
|
|
|
|
|
|
|
|
|
def spec_from_file_location(name, location=None, *, loader=None,
|
|
|
|
submodule_search_locations=_POPULATE):
|
|
|
|
"""Return a module spec based on a file location.
|
|
|
|
|
|
|
|
To indicate that the module is a package, set
|
|
|
|
submodule_search_locations to a list of directory paths. An
|
|
|
|
empty list is sufficient, though its not otherwise useful to the
|
|
|
|
import system.
|
|
|
|
|
|
|
|
The loader must take a spec as its only __init__() arg.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if location is None:
|
|
|
|
# The caller may simply want a partially populated location-
|
|
|
|
# oriented spec. So we set the location to a bogus value and
|
|
|
|
# fill in as much as we can.
|
|
|
|
location = '<unknown>'
|
|
|
|
if hasattr(loader, 'get_filename'):
|
|
|
|
# ExecutionLoader
|
|
|
|
try:
|
|
|
|
location = loader.get_filename(name)
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2016-09-08 09:39:18 +08:00
|
|
|
else:
|
|
|
|
location = _os.fspath(location)
|
2022-10-07 06:40:22 +08:00
|
|
|
try:
|
|
|
|
location = _path_abspath(location)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
# If the location is on the filesystem, but doesn't actually exist,
|
|
|
|
# we could return None here, indicating that the location is not
|
|
|
|
# valid. However, we don't have a good way of testing since an
|
|
|
|
# indirect location (e.g. a zip file or URL) will look like a
|
|
|
|
# non-existent file relative to the filesystem.
|
|
|
|
|
|
|
|
spec = _bootstrap.ModuleSpec(name, loader, origin=location)
|
|
|
|
spec._set_fileattr = True
|
|
|
|
|
|
|
|
# Pick a loader if one wasn't provided.
|
|
|
|
if loader is None:
|
|
|
|
for loader_class, suffixes in _get_supported_file_loaders():
|
|
|
|
if location.endswith(tuple(suffixes)):
|
|
|
|
loader = loader_class(name, location)
|
|
|
|
spec.loader = loader
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Set submodule_search_paths appropriately.
|
|
|
|
if submodule_search_locations is _POPULATE:
|
|
|
|
# Check the loader.
|
|
|
|
if hasattr(loader, 'is_package'):
|
|
|
|
try:
|
|
|
|
is_package = loader.is_package(name)
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if is_package:
|
|
|
|
spec.submodule_search_locations = []
|
|
|
|
else:
|
|
|
|
spec.submodule_search_locations = submodule_search_locations
|
|
|
|
if spec.submodule_search_locations == []:
|
|
|
|
if location:
|
|
|
|
dirname = _path_split(location)[0]
|
|
|
|
spec.submodule_search_locations.append(dirname)
|
|
|
|
|
|
|
|
return spec
|
|
|
|
|
|
|
|
|
gh-86298: Ensure that __loader__ and __spec__.loader agree in warnings.warn_explicit() (GH-97803)
In `_warnings.c`, in the C equivalent of `warnings.warn_explicit()`, if the module globals are given (and not None), the warning will attempt to get the source line for the issued warning. To do this, it needs the module's loader.
Previously, it would only look up `__loader__` in the module globals. In https://github.com/python/cpython/issues/86298 we want to defer to the `__spec__.loader` if available.
The first step on this journey is to check that `loader == __spec__.loader` and issue another warning if it is not. This commit does that.
Since this is a PoC, only manual testing for now.
```python
# /tmp/foo.py
import warnings
import bar
warnings.warn_explicit(
'warning!',
RuntimeWarning,
'bar.py', 2,
module='bar knee',
module_globals=bar.__dict__,
)
```
```python
# /tmp/bar.py
import sys
import os
import pathlib
# __loader__ = pathlib.Path()
```
Then running this: `./python.exe -Wdefault /tmp/foo.py`
Produces:
```
bar.py:2: RuntimeWarning: warning!
import os
```
Uncomment the `__loader__ = ` line in `bar.py` and try it again:
```
sys:1: ImportWarning: Module bar; __loader__ != __spec__.loader (<_frozen_importlib_external.SourceFileLoader object at 0x109f7dfa0> != PosixPath('.'))
bar.py:2: RuntimeWarning: warning!
import os
```
Automerge-Triggered-By: GH:warsaw
2022-10-07 10:32:53 +08:00
|
|
|
def _bless_my_loader(module_globals):
|
|
|
|
"""Helper function for _warnings.c
|
|
|
|
|
|
|
|
See GH#97850 for details.
|
|
|
|
"""
|
|
|
|
# 2022-10-06(warsaw): For now, this helper is only used in _warnings.c and
|
|
|
|
# that use case only has the module globals. This function could be
|
|
|
|
# extended to accept either that or a module object. However, in the
|
|
|
|
# latter case, it would be better to raise certain exceptions when looking
|
|
|
|
# at a module, which should have either a __loader__ or __spec__.loader.
|
|
|
|
# For backward compatibility, it is possible that we'll get an empty
|
|
|
|
# dictionary for the module globals, and that cannot raise an exception.
|
|
|
|
if not isinstance(module_globals, dict):
|
|
|
|
return None
|
|
|
|
|
|
|
|
missing = object()
|
|
|
|
loader = module_globals.get('__loader__', None)
|
|
|
|
spec = module_globals.get('__spec__', missing)
|
|
|
|
|
|
|
|
if loader is None:
|
|
|
|
if spec is missing:
|
|
|
|
# If working with a module:
|
|
|
|
# raise AttributeError('Module globals is missing a __spec__')
|
|
|
|
return None
|
|
|
|
elif spec is None:
|
|
|
|
raise ValueError('Module globals is missing a __spec__.loader')
|
|
|
|
|
|
|
|
spec_loader = getattr(spec, 'loader', missing)
|
|
|
|
|
|
|
|
if spec_loader in (missing, None):
|
|
|
|
if loader is None:
|
|
|
|
exc = AttributeError if spec_loader is missing else ValueError
|
|
|
|
raise exc('Module globals is missing a __spec__.loader')
|
|
|
|
_warnings.warn(
|
|
|
|
'Module globals is missing a __spec__.loader',
|
|
|
|
DeprecationWarning)
|
|
|
|
spec_loader = loader
|
|
|
|
|
|
|
|
assert spec_loader is not None
|
|
|
|
if loader is not None and loader != spec_loader:
|
|
|
|
_warnings.warn(
|
|
|
|
'Module globals; __loader__ != __spec__.loader',
|
|
|
|
DeprecationWarning)
|
|
|
|
return loader
|
|
|
|
|
|
|
|
return spec_loader
|
|
|
|
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
# Loaders #####################################################################
|
|
|
|
|
|
|
|
class WindowsRegistryFinder:
|
|
|
|
|
|
|
|
"""Meta path finder for modules declared in the Windows registry."""
|
|
|
|
|
|
|
|
REGISTRY_KEY = (
|
|
|
|
'Software\\Python\\PythonCore\\{sys_version}'
|
|
|
|
'\\Modules\\{fullname}')
|
|
|
|
REGISTRY_KEY_DEBUG = (
|
|
|
|
'Software\\Python\\PythonCore\\{sys_version}'
|
|
|
|
'\\Modules\\{fullname}\\Debug')
|
2020-11-19 20:43:43 +08:00
|
|
|
DEBUG_BUILD = (_MS_WINDOWS and '_d.pyd' in EXTENSION_SUFFIXES)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2020-11-20 21:44:02 +08:00
|
|
|
@staticmethod
|
|
|
|
def _open_registry(key):
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
2020-03-25 01:03:34 +08:00
|
|
|
return winreg.OpenKey(winreg.HKEY_CURRENT_USER, key)
|
2015-05-03 09:15:18 +08:00
|
|
|
except OSError:
|
2020-03-25 01:03:34 +08:00
|
|
|
return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _search_registry(cls, fullname):
|
|
|
|
if cls.DEBUG_BUILD:
|
|
|
|
registry_key = cls.REGISTRY_KEY_DEBUG
|
|
|
|
else:
|
|
|
|
registry_key = cls.REGISTRY_KEY
|
|
|
|
key = registry_key.format(fullname=fullname,
|
2016-02-11 19:10:36 +08:00
|
|
|
sys_version='%d.%d' % sys.version_info[:2])
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
|
|
|
with cls._open_registry(key) as hkey:
|
2020-03-25 01:03:34 +08:00
|
|
|
filepath = winreg.QueryValue(hkey, '')
|
2015-05-03 09:15:18 +08:00
|
|
|
except OSError:
|
|
|
|
return None
|
|
|
|
return filepath
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def find_spec(cls, fullname, path=None, target=None):
|
|
|
|
filepath = cls._search_registry(fullname)
|
|
|
|
if filepath is None:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
_path_stat(filepath)
|
|
|
|
except OSError:
|
|
|
|
return None
|
|
|
|
for loader, suffixes in _get_supported_file_loaders():
|
|
|
|
if filepath.endswith(tuple(suffixes)):
|
2015-05-16 11:54:59 +08:00
|
|
|
spec = _bootstrap.spec_from_loader(fullname,
|
|
|
|
loader(fullname, filepath),
|
|
|
|
origin=filepath)
|
2015-05-03 09:15:18 +08:00
|
|
|
return spec
|
|
|
|
|
|
|
|
|
|
|
|
class _LoaderBasics:
|
|
|
|
|
|
|
|
"""Base class of common code needed by both SourceLoader and
|
|
|
|
SourcelessFileLoader."""
|
|
|
|
|
|
|
|
def is_package(self, fullname):
|
|
|
|
"""Concrete implementation of InspectLoader.is_package by checking if
|
|
|
|
the path returned by get_filename has a filename of '__init__.py'."""
|
|
|
|
filename = _path_split(self.get_filename(fullname))[1]
|
|
|
|
filename_base = filename.rsplit('.', 1)[0]
|
|
|
|
tail_name = fullname.rpartition('.')[2]
|
|
|
|
return filename_base == '__init__' and tail_name != '__init__'
|
|
|
|
|
|
|
|
def create_module(self, spec):
|
|
|
|
"""Use default semantics for module creation."""
|
|
|
|
|
|
|
|
def exec_module(self, module):
|
|
|
|
"""Execute the module."""
|
|
|
|
code = self.get_code(module.__name__)
|
|
|
|
if code is None:
|
2022-10-07 09:27:51 +08:00
|
|
|
raise ImportError(f'cannot load module {module.__name__!r} when '
|
|
|
|
'get_code() returns None')
|
2015-05-03 09:15:18 +08:00
|
|
|
_bootstrap._call_with_frames_removed(exec, code, module.__dict__)
|
|
|
|
|
2015-05-16 11:54:59 +08:00
|
|
|
def load_module(self, fullname):
|
2020-12-05 07:39:21 +08:00
|
|
|
"""This method is deprecated."""
|
|
|
|
# Warning implemented in _load_module_shim().
|
2015-05-16 11:54:59 +08:00
|
|
|
return _bootstrap._load_module_shim(self, fullname)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
class SourceLoader(_LoaderBasics):
|
|
|
|
|
|
|
|
def path_mtime(self, path):
|
|
|
|
"""Optional method that returns the modification time (an int) for the
|
2018-10-25 19:04:28 +08:00
|
|
|
specified path (a str).
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2017-04-16 15:46:38 +08:00
|
|
|
Raises OSError when the path cannot be handled.
|
2015-05-03 09:15:18 +08:00
|
|
|
"""
|
2017-04-16 15:46:38 +08:00
|
|
|
raise OSError
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
def path_stats(self, path):
|
2018-10-25 19:04:28 +08:00
|
|
|
"""Optional method returning a metadata dict for the specified
|
|
|
|
path (a str).
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
Possible keys:
|
|
|
|
- 'mtime' (mandatory) is the numeric timestamp of last source
|
|
|
|
code modification;
|
|
|
|
- 'size' (optional) is the size in bytes of the source code.
|
|
|
|
|
|
|
|
Implementing this method allows the loader to read bytecode files.
|
2017-04-16 15:46:38 +08:00
|
|
|
Raises OSError when the path cannot be handled.
|
2015-05-03 09:15:18 +08:00
|
|
|
"""
|
|
|
|
return {'mtime': self.path_mtime(path)}
|
|
|
|
|
|
|
|
def _cache_bytecode(self, source_path, cache_path, data):
|
|
|
|
"""Optional method which writes data (bytes) to a file path (a str).
|
|
|
|
|
|
|
|
Implementing this method allows for the writing of bytecode files.
|
|
|
|
|
|
|
|
The source path is needed in order to correctly transfer permissions
|
|
|
|
"""
|
|
|
|
# For backwards compatibility, we delegate to set_data()
|
|
|
|
return self.set_data(cache_path, data)
|
|
|
|
|
|
|
|
def set_data(self, path, data):
|
|
|
|
"""Optional method which writes data (bytes) to a file path (a str).
|
|
|
|
|
|
|
|
Implementing this method allows for the writing of bytecode files.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def get_source(self, fullname):
|
|
|
|
"""Concrete implementation of InspectLoader.get_source."""
|
|
|
|
path = self.get_filename(fullname)
|
|
|
|
try:
|
|
|
|
source_bytes = self.get_data(path)
|
|
|
|
except OSError as exc:
|
|
|
|
raise ImportError('source not available through get_data()',
|
|
|
|
name=fullname) from exc
|
|
|
|
return decode_source(source_bytes)
|
|
|
|
|
|
|
|
def source_to_code(self, data, path, *, _optimize=-1):
|
|
|
|
"""Return the code object compiled from source.
|
|
|
|
|
|
|
|
The 'data' argument can be any object type that compile() supports.
|
|
|
|
"""
|
|
|
|
return _bootstrap._call_with_frames_removed(compile, data, path, 'exec',
|
|
|
|
dont_inherit=True, optimize=_optimize)
|
|
|
|
|
|
|
|
def get_code(self, fullname):
|
|
|
|
"""Concrete implementation of InspectLoader.get_code.
|
|
|
|
|
|
|
|
Reading of bytecode requires path_stats to be implemented. To write
|
|
|
|
bytecode, set_data must also be implemented.
|
|
|
|
|
|
|
|
"""
|
|
|
|
source_path = self.get_filename(fullname)
|
|
|
|
source_mtime = None
|
2017-12-10 02:26:52 +08:00
|
|
|
source_bytes = None
|
|
|
|
source_hash = None
|
|
|
|
hash_based = False
|
|
|
|
check_source = True
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
|
|
|
bytecode_path = cache_from_source(source_path)
|
|
|
|
except NotImplementedError:
|
|
|
|
bytecode_path = None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
st = self.path_stats(source_path)
|
2017-04-16 15:46:38 +08:00
|
|
|
except OSError:
|
2015-05-03 09:15:18 +08:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
source_mtime = int(st['mtime'])
|
|
|
|
try:
|
|
|
|
data = self.get_data(bytecode_path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
else:
|
2017-12-10 02:26:52 +08:00
|
|
|
exc_details = {
|
|
|
|
'name': fullname,
|
|
|
|
'path': bytecode_path,
|
|
|
|
}
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
2017-12-10 02:26:52 +08:00
|
|
|
flags = _classify_pyc(data, fullname, exc_details)
|
|
|
|
bytes_data = memoryview(data)[16:]
|
|
|
|
hash_based = flags & 0b1 != 0
|
|
|
|
if hash_based:
|
|
|
|
check_source = flags & 0b10 != 0
|
|
|
|
if (_imp.check_hash_based_pycs != 'never' and
|
|
|
|
(check_source or
|
|
|
|
_imp.check_hash_based_pycs == 'always')):
|
|
|
|
source_bytes = self.get_data(source_path)
|
|
|
|
source_hash = _imp.source_hash(
|
2024-07-31 03:31:05 +08:00
|
|
|
_imp.pyc_magic_number_token,
|
2017-12-10 02:26:52 +08:00
|
|
|
source_bytes,
|
|
|
|
)
|
|
|
|
_validate_hash_pyc(data, source_hash, fullname,
|
|
|
|
exc_details)
|
|
|
|
else:
|
|
|
|
_validate_timestamp_pyc(
|
|
|
|
data,
|
|
|
|
source_mtime,
|
|
|
|
st['size'],
|
|
|
|
fullname,
|
|
|
|
exc_details,
|
|
|
|
)
|
2015-05-03 09:15:18 +08:00
|
|
|
except (ImportError, EOFError):
|
|
|
|
pass
|
|
|
|
else:
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('{} matches {}', bytecode_path,
|
|
|
|
source_path)
|
2015-05-03 09:15:18 +08:00
|
|
|
return _compile_bytecode(bytes_data, name=fullname,
|
|
|
|
bytecode_path=bytecode_path,
|
|
|
|
source_path=source_path)
|
2017-12-10 02:26:52 +08:00
|
|
|
if source_bytes is None:
|
|
|
|
source_bytes = self.get_data(source_path)
|
2015-05-03 09:15:18 +08:00
|
|
|
code_object = self.source_to_code(source_bytes, source_path)
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('code object from {}', source_path)
|
2015-05-03 09:15:18 +08:00
|
|
|
if (not sys.dont_write_bytecode and bytecode_path is not None and
|
|
|
|
source_mtime is not None):
|
2017-12-10 02:26:52 +08:00
|
|
|
if hash_based:
|
|
|
|
if source_hash is None:
|
2024-07-31 03:31:05 +08:00
|
|
|
source_hash = _imp.source_hash(_imp.pyc_magic_number_token,
|
2022-11-24 06:47:31 +08:00
|
|
|
source_bytes)
|
2017-12-10 02:26:52 +08:00
|
|
|
data = _code_to_hash_pyc(code_object, source_hash, check_source)
|
|
|
|
else:
|
|
|
|
data = _code_to_timestamp_pyc(code_object, source_mtime,
|
|
|
|
len(source_bytes))
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
|
|
|
self._cache_bytecode(source_path, bytecode_path, data)
|
|
|
|
except NotImplementedError:
|
|
|
|
pass
|
|
|
|
return code_object
|
|
|
|
|
|
|
|
|
|
|
|
class FileLoader:
|
|
|
|
|
|
|
|
"""Base file loader class which implements the loader protocol methods that
|
|
|
|
require file system usage."""
|
|
|
|
|
|
|
|
def __init__(self, fullname, path):
|
|
|
|
"""Cache the module name and the path to the file found by the
|
|
|
|
finder."""
|
|
|
|
self.name = fullname
|
|
|
|
self.path = path
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return (self.__class__ == other.__class__ and
|
|
|
|
self.__dict__ == other.__dict__)
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self.name) ^ hash(self.path)
|
|
|
|
|
|
|
|
@_check_name
|
|
|
|
def load_module(self, fullname):
|
|
|
|
"""Load a module from a file.
|
|
|
|
|
|
|
|
This method is deprecated. Use exec_module() instead.
|
|
|
|
|
|
|
|
"""
|
|
|
|
# The only reason for this method is for the name check.
|
|
|
|
# Issue #14857: Avoid the zero-argument form of super so the implementation
|
2020-12-05 07:39:21 +08:00
|
|
|
# of that form can be updated without breaking the frozen module.
|
2015-05-03 09:15:18 +08:00
|
|
|
return super(FileLoader, self).load_module(fullname)
|
|
|
|
|
|
|
|
@_check_name
|
|
|
|
def get_filename(self, fullname):
|
|
|
|
"""Return the path to the source file as found by the finder."""
|
|
|
|
return self.path
|
|
|
|
|
|
|
|
def get_data(self, path):
|
|
|
|
"""Return the data from path as raw bytes."""
|
2019-05-23 23:45:22 +08:00
|
|
|
if isinstance(self, (SourceLoader, ExtensionFileLoader)):
|
|
|
|
with _io.open_code(str(path)) as file:
|
|
|
|
return file.read()
|
|
|
|
else:
|
|
|
|
with _io.FileIO(path, 'r') as file:
|
|
|
|
return file.read()
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2018-01-16 07:07:11 +08:00
|
|
|
@_check_name
|
|
|
|
def get_resource_reader(self, module):
|
2020-06-08 09:00:51 +08:00
|
|
|
from importlib.readers import FileReader
|
|
|
|
return FileReader(self)
|
2018-01-16 07:07:11 +08:00
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
class SourceFileLoader(FileLoader, SourceLoader):
|
|
|
|
|
|
|
|
"""Concrete implementation of SourceLoader using the file system."""
|
|
|
|
|
|
|
|
def path_stats(self, path):
|
|
|
|
"""Return the metadata for the path."""
|
|
|
|
st = _path_stat(path)
|
|
|
|
return {'mtime': st.st_mtime, 'size': st.st_size}
|
|
|
|
|
|
|
|
def _cache_bytecode(self, source_path, bytecode_path, data):
|
|
|
|
# Adapt between the two APIs
|
|
|
|
mode = _calc_mode(source_path)
|
|
|
|
return self.set_data(bytecode_path, data, _mode=mode)
|
|
|
|
|
|
|
|
def set_data(self, path, data, *, _mode=0o666):
|
|
|
|
"""Write bytes data to a file."""
|
|
|
|
parent, filename = _path_split(path)
|
|
|
|
path_parts = []
|
|
|
|
# Figure out what directories are missing.
|
|
|
|
while parent and not _path_isdir(parent):
|
|
|
|
parent, part = _path_split(parent)
|
|
|
|
path_parts.append(part)
|
|
|
|
# Create needed directories.
|
|
|
|
for part in reversed(path_parts):
|
|
|
|
parent = _path_join(parent, part)
|
|
|
|
try:
|
|
|
|
_os.mkdir(parent)
|
|
|
|
except FileExistsError:
|
|
|
|
# Probably another Python process already created the dir.
|
|
|
|
continue
|
|
|
|
except OSError as exc:
|
|
|
|
# Could be a permission error, read-only filesystem: just forget
|
|
|
|
# about writing the data.
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('could not create {!r}: {!r}',
|
|
|
|
parent, exc)
|
2015-05-03 09:15:18 +08:00
|
|
|
return
|
|
|
|
try:
|
|
|
|
_write_atomic(path, data, _mode)
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('created {!r}', path)
|
2015-05-03 09:15:18 +08:00
|
|
|
except OSError as exc:
|
|
|
|
# Same as above: just don't write the bytecode.
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('could not create {!r}: {!r}', path,
|
|
|
|
exc)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
class SourcelessFileLoader(FileLoader, _LoaderBasics):
|
|
|
|
|
|
|
|
"""Loader which handles sourceless file imports."""
|
|
|
|
|
|
|
|
def get_code(self, fullname):
|
|
|
|
path = self.get_filename(fullname)
|
|
|
|
data = self.get_data(path)
|
2017-12-10 02:26:52 +08:00
|
|
|
# Call _classify_pyc to do basic validation of the pyc but ignore the
|
|
|
|
# result. There's no source to check against.
|
|
|
|
exc_details = {
|
|
|
|
'name': fullname,
|
|
|
|
'path': path,
|
|
|
|
}
|
|
|
|
_classify_pyc(data, fullname, exc_details)
|
|
|
|
return _compile_bytecode(
|
|
|
|
memoryview(data)[16:],
|
|
|
|
name=fullname,
|
|
|
|
bytecode_path=path,
|
|
|
|
)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
def get_source(self, fullname):
|
|
|
|
"""Return None as there is no source code."""
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-05-23 20:24:10 +08:00
|
|
|
class ExtensionFileLoader(FileLoader, _LoaderBasics):
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
"""Loader for extension modules.
|
|
|
|
|
|
|
|
The constructor is designed to work with FileFinder.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, name, path):
|
|
|
|
self.name = name
|
|
|
|
self.path = path
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return (self.__class__ == other.__class__ and
|
|
|
|
self.__dict__ == other.__dict__)
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self.name) ^ hash(self.path)
|
|
|
|
|
2015-05-23 20:24:10 +08:00
|
|
|
def create_module(self, spec):
|
2021-06-13 10:47:44 +08:00
|
|
|
"""Create an uninitialized extension module"""
|
2015-05-23 20:24:10 +08:00
|
|
|
module = _bootstrap._call_with_frames_removed(
|
|
|
|
_imp.create_dynamic, spec)
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('extension module {!r} loaded from {!r}',
|
2015-05-23 20:24:10 +08:00
|
|
|
spec.name, self.path)
|
2015-05-03 09:15:18 +08:00
|
|
|
return module
|
|
|
|
|
2015-05-23 20:24:10 +08:00
|
|
|
def exec_module(self, module):
|
|
|
|
"""Initialize an extension module"""
|
|
|
|
_bootstrap._call_with_frames_removed(_imp.exec_dynamic, module)
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('extension module {!r} executed from {!r}',
|
2015-05-23 20:24:10 +08:00
|
|
|
self.name, self.path)
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def is_package(self, fullname):
|
|
|
|
"""Return True if the extension module is a package."""
|
|
|
|
file_name = _path_split(self.path)[1]
|
|
|
|
return any(file_name == '__init__' + suffix
|
|
|
|
for suffix in EXTENSION_SUFFIXES)
|
|
|
|
|
|
|
|
def get_code(self, fullname):
|
|
|
|
"""Return None as an extension module cannot create a code object."""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_source(self, fullname):
|
|
|
|
"""Return None as extension modules have no source code."""
|
|
|
|
return None
|
|
|
|
|
|
|
|
@_check_name
|
|
|
|
def get_filename(self, fullname):
|
|
|
|
"""Return the path to the source file as found by the finder."""
|
|
|
|
return self.path
|
|
|
|
|
|
|
|
|
|
|
|
class _NamespacePath:
|
|
|
|
"""Represents a namespace package's path. It uses the module name
|
|
|
|
to find its parent module, and from there it looks up the parent's
|
|
|
|
__path__. When this changes, the module's own path is recomputed,
|
|
|
|
using path_finder. For top-level modules, the parent module's path
|
|
|
|
is sys.path."""
|
|
|
|
|
2021-11-23 23:38:02 +08:00
|
|
|
# When invalidate_caches() is called, this epoch is incremented
|
|
|
|
# https://bugs.python.org/issue45703
|
|
|
|
_epoch = 0
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def __init__(self, name, path, path_finder):
|
|
|
|
self._name = name
|
|
|
|
self._path = path
|
|
|
|
self._last_parent_path = tuple(self._get_parent_path())
|
2021-11-23 23:38:02 +08:00
|
|
|
self._last_epoch = self._epoch
|
2015-05-03 09:15:18 +08:00
|
|
|
self._path_finder = path_finder
|
|
|
|
|
|
|
|
def _find_parent_path_names(self):
|
|
|
|
"""Returns a tuple of (parent-module-name, parent-path-attr-name)"""
|
|
|
|
parent, dot, me = self._name.rpartition('.')
|
|
|
|
if dot == '':
|
|
|
|
# This is a top-level module. sys.path contains the parent path.
|
|
|
|
return 'sys', 'path'
|
|
|
|
# Not a top-level module. parent-module.__path__ contains the
|
|
|
|
# parent path.
|
|
|
|
return parent, '__path__'
|
|
|
|
|
|
|
|
def _get_parent_path(self):
|
|
|
|
parent_module_name, path_attr_name = self._find_parent_path_names()
|
|
|
|
return getattr(sys.modules[parent_module_name], path_attr_name)
|
|
|
|
|
|
|
|
def _recalculate(self):
|
|
|
|
# If the parent's path has changed, recalculate _path
|
|
|
|
parent_path = tuple(self._get_parent_path()) # Make a copy
|
2021-11-23 23:38:02 +08:00
|
|
|
if parent_path != self._last_parent_path or self._epoch != self._last_epoch:
|
2015-05-03 09:15:18 +08:00
|
|
|
spec = self._path_finder(self._name, parent_path)
|
|
|
|
# Note that no changes are made if a loader is returned, but we
|
|
|
|
# do remember the new parent path
|
|
|
|
if spec is not None and spec.loader is None:
|
|
|
|
if spec.submodule_search_locations:
|
|
|
|
self._path = spec.submodule_search_locations
|
|
|
|
self._last_parent_path = parent_path # Save the copy
|
2021-11-23 23:38:02 +08:00
|
|
|
self._last_epoch = self._epoch
|
2015-05-03 09:15:18 +08:00
|
|
|
return self._path
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self._recalculate())
|
|
|
|
|
2019-03-09 02:58:00 +08:00
|
|
|
def __getitem__(self, index):
|
|
|
|
return self._recalculate()[index]
|
|
|
|
|
2016-03-16 06:08:44 +08:00
|
|
|
def __setitem__(self, index, path):
|
|
|
|
self._path[index] = path
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
def __len__(self):
|
|
|
|
return len(self._recalculate())
|
|
|
|
|
|
|
|
def __repr__(self):
|
2022-10-07 09:27:51 +08:00
|
|
|
return f'_NamespacePath({self._path!r})'
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self._recalculate()
|
|
|
|
|
|
|
|
def append(self, item):
|
|
|
|
self._path.append(item)
|
|
|
|
|
|
|
|
|
2021-10-21 05:05:29 +08:00
|
|
|
# This class is actually exposed publicly in a namespace package's __loader__
|
|
|
|
# attribute, so it should be available through a non-private name.
|
2022-08-05 08:24:26 +08:00
|
|
|
# https://github.com/python/cpython/issues/92054
|
2021-10-21 05:05:29 +08:00
|
|
|
class NamespaceLoader:
|
2015-05-03 09:15:18 +08:00
|
|
|
def __init__(self, name, path, path_finder):
|
|
|
|
self._path = _NamespacePath(name, path, path_finder)
|
|
|
|
|
|
|
|
def is_package(self, fullname):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_source(self, fullname):
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def get_code(self, fullname):
|
|
|
|
return compile('', '<string>', 'exec', dont_inherit=True)
|
|
|
|
|
|
|
|
def create_module(self, spec):
|
|
|
|
"""Use default semantics for module creation."""
|
|
|
|
|
|
|
|
def exec_module(self, module):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def load_module(self, fullname):
|
|
|
|
"""Load a namespace module.
|
|
|
|
|
|
|
|
This method is deprecated. Use exec_module() instead.
|
|
|
|
|
|
|
|
"""
|
|
|
|
# The import system never calls this method.
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('namespace module loaded with path {!r}',
|
|
|
|
self._path)
|
2020-12-05 07:39:21 +08:00
|
|
|
# Warning implemented in _load_module_shim().
|
2015-05-16 11:54:59 +08:00
|
|
|
return _bootstrap._load_module_shim(self, fullname)
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2021-03-05 02:43:00 +08:00
|
|
|
def get_resource_reader(self, module):
|
|
|
|
from importlib.readers import NamespaceReader
|
|
|
|
return NamespaceReader(self._path)
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2021-10-21 05:05:29 +08:00
|
|
|
# We use this exclusively in module_from_spec() for backward-compatibility.
|
|
|
|
_NamespaceLoader = NamespaceLoader
|
|
|
|
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
# Finders #####################################################################
|
|
|
|
|
|
|
|
class PathFinder:
|
|
|
|
|
|
|
|
"""Meta path finder for sys.path and package __path__ attributes."""
|
|
|
|
|
2020-11-20 21:44:02 +08:00
|
|
|
@staticmethod
|
|
|
|
def invalidate_caches():
|
2015-05-03 09:15:18 +08:00
|
|
|
"""Call the invalidate_caches() method on all path entry finders
|
2024-05-07 16:52:48 +08:00
|
|
|
stored in sys.path_importer_cache (where implemented)."""
|
2018-04-07 07:10:18 +08:00
|
|
|
for name, finder in list(sys.path_importer_cache.items()):
|
2022-06-11 05:56:26 +08:00
|
|
|
# Drop entry if finder name is a relative path. The current
|
|
|
|
# working directory may have changed.
|
|
|
|
if finder is None or not _path_isabs(name):
|
2018-04-07 07:10:18 +08:00
|
|
|
del sys.path_importer_cache[name]
|
|
|
|
elif hasattr(finder, 'invalidate_caches'):
|
2015-05-03 09:15:18 +08:00
|
|
|
finder.invalidate_caches()
|
2021-11-23 23:38:02 +08:00
|
|
|
# Also invalidate the caches of _NamespacePaths
|
|
|
|
# https://bugs.python.org/issue45703
|
|
|
|
_NamespacePath._epoch += 1
|
2015-05-03 09:15:18 +08:00
|
|
|
|
2024-03-15 05:59:00 +08:00
|
|
|
from importlib.metadata import MetadataPathFinder
|
|
|
|
MetadataPathFinder.invalidate_caches()
|
|
|
|
|
2020-11-20 21:44:02 +08:00
|
|
|
@staticmethod
|
|
|
|
def _path_hooks(path):
|
2016-07-09 02:09:35 +08:00
|
|
|
"""Search sys.path_hooks for a finder for 'path'."""
|
2015-05-03 09:15:18 +08:00
|
|
|
if sys.path_hooks is not None and not sys.path_hooks:
|
|
|
|
_warnings.warn('sys.path_hooks is empty', ImportWarning)
|
|
|
|
for hook in sys.path_hooks:
|
|
|
|
try:
|
|
|
|
return hook(path)
|
|
|
|
except ImportError:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _path_importer_cache(cls, path):
|
|
|
|
"""Get the finder for the path entry from sys.path_importer_cache.
|
|
|
|
|
|
|
|
If the path entry is not in the cache, find the appropriate finder
|
|
|
|
and cache it. If no finder is available, store None.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if path == '':
|
|
|
|
try:
|
|
|
|
path = _os.getcwd()
|
|
|
|
except FileNotFoundError:
|
|
|
|
# Don't cache the failure as the cwd can easily change to
|
|
|
|
# a valid directory later on.
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
finder = sys.path_importer_cache[path]
|
|
|
|
except KeyError:
|
|
|
|
finder = cls._path_hooks(path)
|
|
|
|
sys.path_importer_cache[path] = finder
|
|
|
|
return finder
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _get_spec(cls, fullname, path, target=None):
|
|
|
|
"""Find the loader or namespace_path for this module/package name."""
|
|
|
|
# If this ends up being a namespace package, namespace_path is
|
|
|
|
# the list of paths that will become its __path__
|
|
|
|
namespace_path = []
|
|
|
|
for entry in path:
|
2022-07-17 09:07:53 +08:00
|
|
|
if not isinstance(entry, str):
|
2015-05-03 09:15:18 +08:00
|
|
|
continue
|
|
|
|
finder = cls._path_importer_cache(entry)
|
|
|
|
if finder is not None:
|
2023-05-03 19:55:22 +08:00
|
|
|
spec = finder.find_spec(fullname, target)
|
2015-05-03 09:15:18 +08:00
|
|
|
if spec is None:
|
|
|
|
continue
|
|
|
|
if spec.loader is not None:
|
|
|
|
return spec
|
|
|
|
portions = spec.submodule_search_locations
|
|
|
|
if portions is None:
|
|
|
|
raise ImportError('spec missing loader')
|
|
|
|
# This is possibly part of a namespace package.
|
|
|
|
# Remember these path entries (if any) for when we
|
|
|
|
# create a namespace package, and continue iterating
|
|
|
|
# on path.
|
|
|
|
namespace_path.extend(portions)
|
|
|
|
else:
|
|
|
|
spec = _bootstrap.ModuleSpec(fullname, None)
|
|
|
|
spec.submodule_search_locations = namespace_path
|
|
|
|
return spec
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def find_spec(cls, fullname, path=None, target=None):
|
2016-07-09 02:09:35 +08:00
|
|
|
"""Try to find a spec for 'fullname' on sys.path or 'path'.
|
|
|
|
|
|
|
|
The search is based on sys.path_hooks and sys.path_importer_cache.
|
|
|
|
"""
|
2015-05-03 09:15:18 +08:00
|
|
|
if path is None:
|
|
|
|
path = sys.path
|
|
|
|
spec = cls._get_spec(fullname, path, target)
|
|
|
|
if spec is None:
|
|
|
|
return None
|
|
|
|
elif spec.loader is None:
|
|
|
|
namespace_path = spec.submodule_search_locations
|
|
|
|
if namespace_path:
|
2018-02-03 04:15:58 +08:00
|
|
|
# We found at least one namespace path. Return a spec which
|
|
|
|
# can create the namespace package.
|
|
|
|
spec.origin = None
|
2015-05-03 09:15:18 +08:00
|
|
|
spec.submodule_search_locations = _NamespacePath(fullname, namespace_path, cls._get_spec)
|
|
|
|
return spec
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return spec
|
|
|
|
|
2020-11-20 21:44:02 +08:00
|
|
|
@staticmethod
|
|
|
|
def find_distributions(*args, **kwargs):
|
2019-05-25 07:59:01 +08:00
|
|
|
"""
|
|
|
|
Find distributions.
|
|
|
|
|
|
|
|
Return an iterable of all Distribution instances capable of
|
2019-09-10 21:53:31 +08:00
|
|
|
loading the metadata for packages matching ``context.name``
|
|
|
|
(or all names if ``None`` indicated) along the paths in the list
|
|
|
|
of directories ``context.path``.
|
2019-05-25 07:59:01 +08:00
|
|
|
"""
|
2019-09-12 17:29:11 +08:00
|
|
|
from importlib.metadata import MetadataPathFinder
|
|
|
|
return MetadataPathFinder.find_distributions(*args, **kwargs)
|
2019-05-25 07:59:01 +08:00
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
class FileFinder:
|
|
|
|
|
|
|
|
"""File-based finder.
|
|
|
|
|
|
|
|
Interactions with the file system are cached for performance, being
|
|
|
|
refreshed when the directory the finder is handling has been modified.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, path, *loader_details):
|
|
|
|
"""Initialize with the path to search on and a variable number of
|
|
|
|
2-tuples containing the loader and the file suffixes the loader
|
|
|
|
recognizes."""
|
|
|
|
loaders = []
|
|
|
|
for loader, suffixes in loader_details:
|
|
|
|
loaders.extend((suffix, loader) for suffix in suffixes)
|
|
|
|
self._loaders = loaders
|
|
|
|
# Base (directory) path
|
2022-06-11 05:56:26 +08:00
|
|
|
if not path or path == '.':
|
|
|
|
self.path = _os.getcwd()
|
|
|
|
else:
|
2022-10-07 06:40:22 +08:00
|
|
|
self.path = _path_abspath(path)
|
2015-05-03 09:15:18 +08:00
|
|
|
self._path_mtime = -1
|
|
|
|
self._path_cache = set()
|
|
|
|
self._relaxed_path_cache = set()
|
|
|
|
|
|
|
|
def invalidate_caches(self):
|
|
|
|
"""Invalidate the directory mtime."""
|
|
|
|
self._path_mtime = -1
|
|
|
|
|
|
|
|
def _get_spec(self, loader_class, fullname, path, smsl, target):
|
|
|
|
loader = loader_class(fullname, path)
|
|
|
|
return spec_from_file_location(fullname, path, loader=loader,
|
|
|
|
submodule_search_locations=smsl)
|
|
|
|
|
|
|
|
def find_spec(self, fullname, target=None):
|
2016-07-09 02:09:35 +08:00
|
|
|
"""Try to find a spec for the specified module.
|
|
|
|
|
|
|
|
Returns the matching spec, or None if not found.
|
|
|
|
"""
|
2015-05-03 09:15:18 +08:00
|
|
|
is_namespace = False
|
|
|
|
tail_module = fullname.rpartition('.')[2]
|
|
|
|
try:
|
|
|
|
mtime = _path_stat(self.path or _os.getcwd()).st_mtime
|
|
|
|
except OSError:
|
|
|
|
mtime = -1
|
|
|
|
if mtime != self._path_mtime:
|
|
|
|
self._fill_cache()
|
|
|
|
self._path_mtime = mtime
|
|
|
|
# tail_module keeps the original casing, for __file__ and friends
|
|
|
|
if _relax_case():
|
|
|
|
cache = self._relaxed_path_cache
|
|
|
|
cache_module = tail_module.lower()
|
|
|
|
else:
|
|
|
|
cache = self._path_cache
|
|
|
|
cache_module = tail_module
|
|
|
|
# Check if the module is the name of a directory (and thus a package).
|
|
|
|
if cache_module in cache:
|
|
|
|
base_path = _path_join(self.path, tail_module)
|
|
|
|
for suffix, loader_class in self._loaders:
|
|
|
|
init_filename = '__init__' + suffix
|
|
|
|
full_path = _path_join(base_path, init_filename)
|
|
|
|
if _path_isfile(full_path):
|
|
|
|
return self._get_spec(loader_class, fullname, full_path, [base_path], target)
|
|
|
|
else:
|
|
|
|
# If a namespace package, return the path if we don't
|
|
|
|
# find a module in the next section.
|
|
|
|
is_namespace = _path_isdir(base_path)
|
|
|
|
# Check for a file w/ a proper suffix exists.
|
|
|
|
for suffix, loader_class in self._loaders:
|
2021-04-07 08:02:07 +08:00
|
|
|
try:
|
|
|
|
full_path = _path_join(self.path, tail_module + suffix)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('trying {}', full_path, verbosity=2)
|
2015-05-03 09:15:18 +08:00
|
|
|
if cache_module + suffix in cache:
|
|
|
|
if _path_isfile(full_path):
|
2015-09-26 04:05:13 +08:00
|
|
|
return self._get_spec(loader_class, fullname, full_path,
|
|
|
|
None, target)
|
2015-05-03 09:15:18 +08:00
|
|
|
if is_namespace:
|
2015-09-26 04:05:13 +08:00
|
|
|
_bootstrap._verbose_message('possible namespace for {}', base_path)
|
2015-05-03 09:15:18 +08:00
|
|
|
spec = _bootstrap.ModuleSpec(fullname, None)
|
|
|
|
spec.submodule_search_locations = [base_path]
|
|
|
|
return spec
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _fill_cache(self):
|
|
|
|
"""Fill the cache of potential modules and packages for this directory."""
|
|
|
|
path = self.path
|
|
|
|
try:
|
|
|
|
contents = _os.listdir(path or _os.getcwd())
|
|
|
|
except (FileNotFoundError, PermissionError, NotADirectoryError):
|
|
|
|
# Directory has either been removed, turned into a file, or made
|
|
|
|
# unreadable.
|
|
|
|
contents = []
|
|
|
|
# We store two cached versions, to handle runtime changes of the
|
|
|
|
# PYTHONCASEOK environment variable.
|
|
|
|
if not sys.platform.startswith('win'):
|
|
|
|
self._path_cache = set(contents)
|
|
|
|
else:
|
|
|
|
# Windows users can import modules with case-insensitive file
|
|
|
|
# suffixes (for legacy reasons). Make the suffix lowercase here
|
|
|
|
# so it's done once instead of for every import. This is safe as
|
|
|
|
# the specified suffixes to check against are always specified in a
|
|
|
|
# case-sensitive manner.
|
|
|
|
lower_suffix_contents = set()
|
|
|
|
for item in contents:
|
|
|
|
name, dot, suffix = item.partition('.')
|
|
|
|
if dot:
|
2022-10-07 09:27:51 +08:00
|
|
|
new_name = f'{name}.{suffix.lower()}'
|
2015-05-03 09:15:18 +08:00
|
|
|
else:
|
|
|
|
new_name = name
|
|
|
|
lower_suffix_contents.add(new_name)
|
|
|
|
self._path_cache = lower_suffix_contents
|
|
|
|
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
|
|
|
|
self._relaxed_path_cache = {fn.lower() for fn in contents}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def path_hook(cls, *loader_details):
|
|
|
|
"""A class method which returns a closure to use on sys.path_hook
|
|
|
|
which will return an instance using the specified loaders and the path
|
|
|
|
called on the closure.
|
|
|
|
|
|
|
|
If the path called on the closure is not a directory, ImportError is
|
|
|
|
raised.
|
|
|
|
|
|
|
|
"""
|
|
|
|
def path_hook_for_FileFinder(path):
|
|
|
|
"""Path hook for importlib.machinery.FileFinder."""
|
|
|
|
if not _path_isdir(path):
|
|
|
|
raise ImportError('only directories are supported', path=path)
|
|
|
|
return cls(path, *loader_details)
|
|
|
|
|
|
|
|
return path_hook_for_FileFinder
|
|
|
|
|
|
|
|
def __repr__(self):
|
2022-10-07 09:27:51 +08:00
|
|
|
return f'FileFinder({self.path!r})'
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
2024-03-19 20:36:19 +08:00
|
|
|
class AppleFrameworkLoader(ExtensionFileLoader):
|
|
|
|
"""A loader for modules that have been packaged as frameworks for
|
|
|
|
compatibility with Apple's iOS App Store policies.
|
|
|
|
"""
|
|
|
|
def create_module(self, spec):
|
|
|
|
# If the ModuleSpec has been created by the FileFinder, it will have
|
|
|
|
# been created with an origin pointing to the .fwork file. We need to
|
|
|
|
# redirect this to the location in the Frameworks folder, using the
|
|
|
|
# content of the .fwork file.
|
|
|
|
if spec.origin.endswith(".fwork"):
|
|
|
|
with _io.FileIO(spec.origin, 'r') as file:
|
|
|
|
framework_binary = file.read().decode().strip()
|
|
|
|
bundle_path = _path_split(sys.executable)[0]
|
|
|
|
spec.origin = _path_join(bundle_path, framework_binary)
|
|
|
|
|
|
|
|
# If the loader is created based on the spec for a loaded module, the
|
|
|
|
# path will be pointing at the Framework location. If this occurs,
|
|
|
|
# get the original .fwork location to use as the module's __file__.
|
|
|
|
if self.path.endswith(".fwork"):
|
|
|
|
path = self.path
|
|
|
|
else:
|
|
|
|
with _io.FileIO(self.path + ".origin", 'r') as file:
|
|
|
|
origin = file.read().decode().strip()
|
|
|
|
bundle_path = _path_split(sys.executable)[0]
|
|
|
|
path = _path_join(bundle_path, origin)
|
|
|
|
|
|
|
|
module = _bootstrap._call_with_frames_removed(_imp.create_dynamic, spec)
|
|
|
|
|
|
|
|
_bootstrap._verbose_message(
|
|
|
|
"Apple framework extension module {!r} loaded from {!r} (path {!r})",
|
|
|
|
spec.name,
|
|
|
|
spec.origin,
|
|
|
|
path,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ensure that the __file__ points at the .fwork location
|
|
|
|
module.__file__ = path
|
|
|
|
|
|
|
|
return module
|
|
|
|
|
2015-05-03 09:15:18 +08:00
|
|
|
# Import setup ###############################################################
|
|
|
|
|
|
|
|
def _fix_up_module(ns, name, pathname, cpathname=None):
|
|
|
|
# This function is used by PyImport_ExecCodeModuleObject().
|
|
|
|
loader = ns.get('__loader__')
|
|
|
|
spec = ns.get('__spec__')
|
|
|
|
if not loader:
|
|
|
|
if spec:
|
|
|
|
loader = spec.loader
|
|
|
|
elif pathname == cpathname:
|
|
|
|
loader = SourcelessFileLoader(name, pathname)
|
|
|
|
else:
|
|
|
|
loader = SourceFileLoader(name, pathname)
|
|
|
|
if not spec:
|
|
|
|
spec = spec_from_file_location(name, pathname, loader=loader)
|
2022-10-07 06:40:22 +08:00
|
|
|
if cpathname:
|
|
|
|
spec.cached = _path_abspath(cpathname)
|
2015-05-03 09:15:18 +08:00
|
|
|
try:
|
|
|
|
ns['__spec__'] = spec
|
|
|
|
ns['__loader__'] = loader
|
|
|
|
ns['__file__'] = pathname
|
|
|
|
ns['__cached__'] = cpathname
|
|
|
|
except Exception:
|
|
|
|
# Not important enough to report.
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def _get_supported_file_loaders():
|
|
|
|
"""Returns a list of file-based module loaders.
|
|
|
|
|
|
|
|
Each item is a tuple (loader, suffixes).
|
|
|
|
"""
|
2024-08-14 04:44:57 +08:00
|
|
|
extension_loaders = []
|
|
|
|
if hasattr(_imp, 'create_dynamic'):
|
|
|
|
if sys.platform in {"ios", "tvos", "watchos"}:
|
|
|
|
extension_loaders = [(AppleFrameworkLoader, [
|
|
|
|
suffix.replace(".so", ".fwork")
|
|
|
|
for suffix in _imp.extension_suffixes()
|
|
|
|
])]
|
|
|
|
extension_loaders.append((ExtensionFileLoader, _imp.extension_suffixes()))
|
2015-05-03 09:15:18 +08:00
|
|
|
source = SourceFileLoader, SOURCE_SUFFIXES
|
|
|
|
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
|
2024-03-19 20:36:19 +08:00
|
|
|
return extension_loaders + [source, bytecode]
|
2015-05-03 09:15:18 +08:00
|
|
|
|
|
|
|
|
2020-11-19 20:43:43 +08:00
|
|
|
def _set_bootstrap_module(_bootstrap_module):
|
|
|
|
global _bootstrap
|
2015-05-03 09:15:18 +08:00
|
|
|
_bootstrap = _bootstrap_module
|
|
|
|
|
|
|
|
|
|
|
|
def _install(_bootstrap_module):
|
|
|
|
"""Install the path-based import components."""
|
2020-11-19 20:43:43 +08:00
|
|
|
_set_bootstrap_module(_bootstrap_module)
|
2015-05-03 09:15:18 +08:00
|
|
|
supported_loaders = _get_supported_file_loaders()
|
|
|
|
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
|
|
|
|
sys.meta_path.append(PathFinder)
|