before send to remote
This commit is contained in:
0
env/lib/python3.8/site-packages/pip/_internal/operations/__init__.py
vendored
Normal file
0
env/lib/python3.8/site-packages/pip/_internal/operations/__init__.py
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc
vendored
Normal file
Binary file not shown.
0
env/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py
vendored
Normal file
0
env/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
40
env/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py
vendored
Normal file
40
env/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Metadata generation logic for source distributions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_metadata(build_env, backend):
|
||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
||||
"""Generate metadata using mechanisms described in PEP 517.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
metadata_tmpdir = TempDirectory(
|
||||
kind="modern-metadata", globally_managed=True
|
||||
)
|
||||
|
||||
metadata_dir = metadata_tmpdir.path
|
||||
|
||||
with build_env:
|
||||
# Note that Pep517HookCaller implements a fallback for
|
||||
# prepare_metadata_for_build_wheel, so we don't have to
|
||||
# consider the possibility that this hook doesn't exist.
|
||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
||||
with backend.subprocess_runner(runner):
|
||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
||||
metadata_dir
|
||||
)
|
||||
|
||||
return os.path.join(metadata_dir, distinfo_dir)
|
122
env/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py
vendored
Normal file
122
env/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Metadata generation logic for legacy source distributions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _find_egg_info(source_directory, is_editable):
|
||||
# type: (str, bool) -> str
|
||||
"""Find an .egg-info in `source_directory`, based on `is_editable`.
|
||||
"""
|
||||
|
||||
def looks_like_virtual_env(path):
|
||||
# type: (str) -> bool
|
||||
return (
|
||||
os.path.lexists(os.path.join(path, 'bin', 'python')) or
|
||||
os.path.exists(os.path.join(path, 'Scripts', 'Python.exe'))
|
||||
)
|
||||
|
||||
def locate_editable_egg_info(base):
|
||||
# type: (str) -> List[str]
|
||||
candidates = [] # type: List[str]
|
||||
for root, dirs, files in os.walk(base):
|
||||
for dir_ in vcs.dirnames:
|
||||
if dir_ in dirs:
|
||||
dirs.remove(dir_)
|
||||
# Iterate over a copy of ``dirs``, since mutating
|
||||
# a list while iterating over it can cause trouble.
|
||||
# (See https://github.com/pypa/pip/pull/462.)
|
||||
for dir_ in list(dirs):
|
||||
if looks_like_virtual_env(os.path.join(root, dir_)):
|
||||
dirs.remove(dir_)
|
||||
# Also don't search through tests
|
||||
elif dir_ == 'test' or dir_ == 'tests':
|
||||
dirs.remove(dir_)
|
||||
candidates.extend(os.path.join(root, dir_) for dir_ in dirs)
|
||||
return [f for f in candidates if f.endswith('.egg-info')]
|
||||
|
||||
def depth_of_directory(dir_):
|
||||
# type: (str) -> int
|
||||
return (
|
||||
dir_.count(os.path.sep) +
|
||||
(os.path.altsep and dir_.count(os.path.altsep) or 0)
|
||||
)
|
||||
|
||||
base = source_directory
|
||||
if is_editable:
|
||||
filenames = locate_editable_egg_info(base)
|
||||
else:
|
||||
base = os.path.join(base, 'pip-egg-info')
|
||||
filenames = os.listdir(base)
|
||||
|
||||
if not filenames:
|
||||
raise InstallationError(
|
||||
"Files/directories not found in {}".format(base)
|
||||
)
|
||||
|
||||
# If we have more than one match, we pick the toplevel one. This
|
||||
# can easily be the case if there is a dist folder which contains
|
||||
# an extracted tarball for testing purposes.
|
||||
if len(filenames) > 1:
|
||||
filenames.sort(key=depth_of_directory)
|
||||
|
||||
return os.path.join(base, filenames[0])
|
||||
|
||||
|
||||
def generate_metadata(
|
||||
build_env, # type: BuildEnvironment
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
editable, # type: bool
|
||||
isolated, # type: bool
|
||||
details, # type: str
|
||||
):
|
||||
# type: (...) -> str
|
||||
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
logger.debug(
|
||||
'Running setup.py (path:%s) egg_info for package %s',
|
||||
setup_py_path, details,
|
||||
)
|
||||
|
||||
egg_info_dir = None # type: Optional[str]
|
||||
# For non-editable installs, don't put the .egg-info files at the root,
|
||||
# to avoid confusion due to the source code being considered an installed
|
||||
# egg.
|
||||
if not editable:
|
||||
egg_info_dir = os.path.join(source_dir, 'pip-egg-info')
|
||||
# setuptools complains if the target directory does not exist.
|
||||
ensure_dir(egg_info_dir)
|
||||
|
||||
args = make_setuptools_egg_info_args(
|
||||
setup_py_path,
|
||||
egg_info_dir=egg_info_dir,
|
||||
no_user_config=isolated,
|
||||
)
|
||||
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=source_dir,
|
||||
command_desc='python setup.py egg_info',
|
||||
)
|
||||
|
||||
# Return the .egg-info directory.
|
||||
return _find_egg_info(source_dir, editable)
|
46
env/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py
vendored
Normal file
46
env/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def build_wheel_pep517(
|
||||
name, # type: str
|
||||
backend, # type: Pep517HookCaller
|
||||
metadata_directory, # type: str
|
||||
build_options, # type: List[str]
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Build one InstallRequirement using the PEP 517 build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
"""
|
||||
assert metadata_directory is not None
|
||||
if build_options:
|
||||
# PEP 517 does not support --build-options
|
||||
logger.error('Cannot build wheel for %s using PEP 517 when '
|
||||
'--build-option is present' % (name,))
|
||||
return None
|
||||
try:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
'Building wheel for {} (PEP 517)'.format(name)
|
||||
)
|
||||
with backend.subprocess_runner(runner):
|
||||
wheel_name = backend.build_wheel(
|
||||
tempd,
|
||||
metadata_directory=metadata_directory,
|
||||
)
|
||||
except Exception:
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
return None
|
||||
return os.path.join(tempd, wheel_name)
|
115
env/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py
vendored
Normal file
115
env/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
import logging
|
||||
import os.path
|
||||
|
||||
from pip._internal.utils.setuptools_build import (
|
||||
make_setuptools_bdist_wheel_args,
|
||||
)
|
||||
from pip._internal.utils.subprocess import (
|
||||
LOG_DIVIDER,
|
||||
call_subprocess,
|
||||
format_command_args,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.ui import open_spinner
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Text
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_command_result(
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: Text
|
||||
):
|
||||
# type: (...) -> str
|
||||
"""Format command information for logging."""
|
||||
command_desc = format_command_args(command_args)
|
||||
text = 'Command arguments: {}\n'.format(command_desc)
|
||||
|
||||
if not command_output:
|
||||
text += 'Command output: None'
|
||||
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||
text += 'Command output: [use --verbose to show]'
|
||||
else:
|
||||
if not command_output.endswith('\n'):
|
||||
command_output += '\n'
|
||||
text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def get_legacy_build_wheel_path(
|
||||
names, # type: List[str]
|
||||
temp_dir, # type: str
|
||||
name, # type: str
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: Text
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Return the path to the wheel in the temporary build directory."""
|
||||
# Sort for determinism.
|
||||
names = sorted(names)
|
||||
if not names:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created no files.\n'
|
||||
).format(name)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
return None
|
||||
|
||||
if len(names) > 1:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created more than one file.\n'
|
||||
'Filenames (choosing first): {}\n'
|
||||
).format(name, names)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
|
||||
return os.path.join(temp_dir, names[0])
|
||||
|
||||
|
||||
def build_wheel_legacy(
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
global_options, # type: List[str]
|
||||
build_options, # type: List[str]
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Build one unpacked package using the "legacy" build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
"""
|
||||
wheel_args = make_setuptools_bdist_wheel_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
build_options=build_options,
|
||||
destination_dir=tempd,
|
||||
)
|
||||
|
||||
spin_message = 'Building wheel for %s (setup.py)' % (name,)
|
||||
with open_spinner(spin_message) as spinner:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
|
||||
try:
|
||||
output = call_subprocess(
|
||||
wheel_args,
|
||||
cwd=source_dir,
|
||||
spinner=spinner,
|
||||
)
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
return None
|
||||
|
||||
names = os.listdir(tempd)
|
||||
wheel_path = get_legacy_build_wheel_path(
|
||||
names=names,
|
||||
temp_dir=tempd,
|
||||
name=name,
|
||||
command_args=wheel_args,
|
||||
command_output=output,
|
||||
)
|
||||
return wheel_path
|
163
env/lib/python3.8/site-packages/pip/_internal/operations/check.py
vendored
Normal file
163
env/lib/python3.8/site-packages/pip/_internal/operations/check.py
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Validation of dependencies of packages
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.distributions import (
|
||||
make_distribution_for_install_requirement,
|
||||
)
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from typing import (
|
||||
Any, Callable, Dict, Optional, Set, Tuple, List
|
||||
)
|
||||
|
||||
# Shorthands
|
||||
PackageSet = Dict[str, 'PackageDetails']
|
||||
Missing = Tuple[str, Any]
|
||||
Conflicting = Tuple[str, str, Any]
|
||||
|
||||
MissingDict = Dict[str, List[Missing]]
|
||||
ConflictingDict = Dict[str, List[Conflicting]]
|
||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||
|
||||
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
||||
|
||||
|
||||
def create_package_set_from_installed(**kwargs):
|
||||
# type: (**Any) -> Tuple[PackageSet, bool]
|
||||
"""Converts a list of distributions into a PackageSet.
|
||||
"""
|
||||
# Default to using all packages installed on the system
|
||||
if kwargs == {}:
|
||||
kwargs = {"local_only": False, "skip": ()}
|
||||
|
||||
package_set = {}
|
||||
problems = False
|
||||
for dist in get_installed_distributions(**kwargs):
|
||||
name = canonicalize_name(dist.project_name)
|
||||
try:
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
except RequirementParseError as e:
|
||||
# Don't crash on broken metadata
|
||||
logger.warning("Error parsing requirements for %s: %s", name, e)
|
||||
problems = True
|
||||
return package_set, problems
|
||||
|
||||
|
||||
def check_package_set(package_set, should_ignore=None):
|
||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
||||
"""Check if a package set is consistent
|
||||
|
||||
If should_ignore is passed, it should be a callable that takes a
|
||||
package name and returns a boolean.
|
||||
"""
|
||||
if should_ignore is None:
|
||||
def should_ignore(name):
|
||||
return False
|
||||
|
||||
missing = {}
|
||||
conflicting = {}
|
||||
|
||||
for package_name in package_set:
|
||||
# Info about dependencies of package_name
|
||||
missing_deps = set() # type: Set[Missing]
|
||||
conflicting_deps = set() # type: Set[Conflicting]
|
||||
|
||||
if should_ignore(package_name):
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
name = canonicalize_name(req.project_name) # type: str
|
||||
|
||||
# Check if it's missing
|
||||
if name not in package_set:
|
||||
missed = True
|
||||
if req.marker is not None:
|
||||
missed = req.marker.evaluate()
|
||||
if missed:
|
||||
missing_deps.add((name, req))
|
||||
continue
|
||||
|
||||
# Check if there's a conflict
|
||||
version = package_set[name].version # type: str
|
||||
if not req.specifier.contains(version, prereleases=True):
|
||||
conflicting_deps.add((name, version, req))
|
||||
|
||||
if missing_deps:
|
||||
missing[package_name] = sorted(missing_deps, key=str)
|
||||
if conflicting_deps:
|
||||
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
||||
|
||||
return missing, conflicting
|
||||
|
||||
|
||||
def check_install_conflicts(to_install):
|
||||
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
|
||||
"""For checking if the dependency graph would be consistent after \
|
||||
installing given requirements
|
||||
"""
|
||||
# Start from the current state
|
||||
package_set, _ = create_package_set_from_installed()
|
||||
# Install packages
|
||||
would_be_installed = _simulate_installation_of(to_install, package_set)
|
||||
|
||||
# Only warn about directly-dependent packages; create a whitelist of them
|
||||
whitelist = _create_whitelist(would_be_installed, package_set)
|
||||
|
||||
return (
|
||||
package_set,
|
||||
check_package_set(
|
||||
package_set, should_ignore=lambda name: name not in whitelist
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _simulate_installation_of(to_install, package_set):
|
||||
# type: (List[InstallRequirement], PackageSet) -> Set[str]
|
||||
"""Computes the version of packages after installing to_install.
|
||||
"""
|
||||
|
||||
# Keep track of packages that were installed
|
||||
installed = set()
|
||||
|
||||
# Modify it as installing requirement_set would (assuming no errors)
|
||||
for inst_req in to_install:
|
||||
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||
dist = abstract_dist.get_pkg_resources_distribution()
|
||||
|
||||
name = canonicalize_name(dist.key)
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
|
||||
installed.add(name)
|
||||
|
||||
return installed
|
||||
|
||||
|
||||
def _create_whitelist(would_be_installed, package_set):
|
||||
# type: (Set[str], PackageSet) -> Set[str]
|
||||
packages_affected = set(would_be_installed)
|
||||
|
||||
for package_name in package_set:
|
||||
if package_name in packages_affected:
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
if canonicalize_name(req.name) in packages_affected:
|
||||
packages_affected.add(package_name)
|
||||
break
|
||||
|
||||
return packages_affected
|
265
env/lib/python3.8/site-packages/pip/_internal/operations/freeze.py
vendored
Normal file
265
env/lib/python3.8/site-packages/pip/_internal/operations/freeze.py
vendored
Normal file
@@ -0,0 +1,265 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
# mypy: disallow-untyped-defs=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.exceptions import BadCommand, InstallationError
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_file import COMMENT_RE
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable,
|
||||
get_installed_distributions,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
|
||||
)
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._vendor.pkg_resources import (
|
||||
Distribution, Requirement
|
||||
)
|
||||
|
||||
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def freeze(
|
||||
requirement=None, # type: Optional[List[str]]
|
||||
find_links=None, # type: Optional[List[str]]
|
||||
local_only=None, # type: Optional[bool]
|
||||
user_only=None, # type: Optional[bool]
|
||||
paths=None, # type: Optional[List[str]]
|
||||
skip_regex=None, # type: Optional[str]
|
||||
isolated=False, # type: bool
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
exclude_editable=False, # type: bool
|
||||
skip=() # type: Container[str]
|
||||
):
|
||||
# type: (...) -> Iterator[str]
|
||||
find_links = find_links or []
|
||||
skip_match = None
|
||||
|
||||
if skip_regex:
|
||||
skip_match = re.compile(skip_regex).search
|
||||
|
||||
for link in find_links:
|
||||
yield '-f %s' % link
|
||||
installations = {} # type: Dict[str, FrozenRequirement]
|
||||
for dist in get_installed_distributions(local_only=local_only,
|
||||
skip=(),
|
||||
user_only=user_only,
|
||||
paths=paths):
|
||||
try:
|
||||
req = FrozenRequirement.from_dist(dist)
|
||||
except RequirementParseError as exc:
|
||||
# We include dist rather than dist.project_name because the
|
||||
# dist string includes more information, like the version and
|
||||
# location. We also include the exception message to aid
|
||||
# troubleshooting.
|
||||
logger.warning(
|
||||
'Could not generate requirement for distribution %r: %s',
|
||||
dist, exc
|
||||
)
|
||||
continue
|
||||
if exclude_editable and req.editable:
|
||||
continue
|
||||
installations[req.canonical_name] = req
|
||||
|
||||
if requirement:
|
||||
# the options that don't get turned into an InstallRequirement
|
||||
# should only be emitted once, even if the same option is in multiple
|
||||
# requirements files, so we need to keep track of what has been emitted
|
||||
# so that we don't emit it again if it's seen again
|
||||
emitted_options = set() # type: Set[str]
|
||||
# keep track of which files a requirement is in so that we can
|
||||
# give an accurate warning if a requirement appears multiple times.
|
||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
||||
for req_file_path in requirement:
|
||||
with open(req_file_path) as req_file:
|
||||
for line in req_file:
|
||||
if (not line.strip() or
|
||||
line.strip().startswith('#') or
|
||||
(skip_match and skip_match(line)) or
|
||||
line.startswith((
|
||||
'-r', '--requirement',
|
||||
'-Z', '--always-unzip',
|
||||
'-f', '--find-links',
|
||||
'-i', '--index-url',
|
||||
'--pre',
|
||||
'--trusted-host',
|
||||
'--process-dependency-links',
|
||||
'--extra-index-url'))):
|
||||
line = line.rstrip()
|
||||
if line not in emitted_options:
|
||||
emitted_options.add(line)
|
||||
yield line
|
||||
continue
|
||||
|
||||
if line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line_req = install_req_from_editable(
|
||||
line,
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
else:
|
||||
line_req = install_req_from_line(
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
isolated=isolated,
|
||||
wheel_cache=wheel_cache,
|
||||
)
|
||||
|
||||
if not line_req.name:
|
||||
logger.info(
|
||||
"Skipping line in requirement file [%s] because "
|
||||
"it's not clear what it would install: %s",
|
||||
req_file_path, line.strip(),
|
||||
)
|
||||
logger.info(
|
||||
" (add #egg=PackageName to the URL to avoid"
|
||||
" this warning)"
|
||||
)
|
||||
else:
|
||||
line_req_canonical_name = canonicalize_name(
|
||||
line_req.name)
|
||||
if line_req_canonical_name not in installations:
|
||||
# either it's not installed, or it is installed
|
||||
# but has been processed already
|
||||
if not req_files[line_req.name]:
|
||||
logger.warning(
|
||||
"Requirement file [%s] contains %s, but "
|
||||
"package %r is not installed",
|
||||
req_file_path,
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
line_req.name
|
||||
)
|
||||
else:
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
else:
|
||||
yield str(installations[
|
||||
line_req_canonical_name]).rstrip()
|
||||
del installations[line_req_canonical_name]
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
|
||||
# Warn about requirements that were included multiple times (in a
|
||||
# single requirements file or in different requirements files).
|
||||
for name, files in six.iteritems(req_files):
|
||||
if len(files) > 1:
|
||||
logger.warning("Requirement %s included multiple times [%s]",
|
||||
name, ', '.join(sorted(set(files))))
|
||||
|
||||
yield(
|
||||
'## The following requirements were added by '
|
||||
'pip freeze:'
|
||||
)
|
||||
for installation in sorted(
|
||||
installations.values(), key=lambda x: x.name.lower()):
|
||||
if installation.canonical_name not in skip:
|
||||
yield str(installation).rstrip()
|
||||
|
||||
|
||||
def get_requirement_info(dist):
|
||||
# type: (Distribution) -> RequirementInfo
|
||||
"""
|
||||
Compute and return values (req, editable, comments) for use in
|
||||
FrozenRequirement.from_dist().
|
||||
"""
|
||||
if not dist_is_editable(dist):
|
||||
return (None, False, [])
|
||||
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
|
||||
from pip._internal.vcs import vcs, RemoteNotFoundError
|
||||
vcs_backend = vcs.get_backend_for_dir(location)
|
||||
|
||||
if vcs_backend is None:
|
||||
req = dist.as_requirement()
|
||||
logger.debug(
|
||||
'No VCS found for editable requirement "%s" in: %r', req,
|
||||
location,
|
||||
)
|
||||
comments = [
|
||||
'# Editable install with no version control ({})'.format(req)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
try:
|
||||
req = vcs_backend.get_src_requirement(location, dist.project_name)
|
||||
except RemoteNotFoundError:
|
||||
req = dist.as_requirement()
|
||||
comments = [
|
||||
'# Editable {} install with no remote ({})'.format(
|
||||
type(vcs_backend).__name__, req,
|
||||
)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
except BadCommand:
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s '
|
||||
'(%s command not found in path)',
|
||||
location,
|
||||
vcs_backend.name,
|
||||
)
|
||||
return (None, True, [])
|
||||
|
||||
except InstallationError as exc:
|
||||
logger.warning(
|
||||
"Error when trying to get requirement for VCS system %s, "
|
||||
"falling back to uneditable format", exc
|
||||
)
|
||||
else:
|
||||
if req is not None:
|
||||
return (req, True, [])
|
||||
|
||||
logger.warning(
|
||||
'Could not determine repository location of %s', location
|
||||
)
|
||||
comments = ['## !! Could not determine repository location']
|
||||
|
||||
return (None, False, comments)
|
||||
|
||||
|
||||
class FrozenRequirement(object):
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
||||
self.name = name
|
||||
self.canonical_name = canonicalize_name(name)
|
||||
self.req = req
|
||||
self.editable = editable
|
||||
self.comments = comments
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
# type: (Distribution) -> FrozenRequirement
|
||||
req, editable, comments = get_requirement_info(dist)
|
||||
if req is None:
|
||||
req = dist.as_requirement()
|
||||
|
||||
return cls(dist.project_name, req, editable, comments=comments)
|
||||
|
||||
def __str__(self):
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = '-e %s' % req
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
2
env/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py
vendored
Normal file
2
env/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"""For modules related to installing packages.
|
||||
"""
|
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc
vendored
Normal file
BIN
env/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc
vendored
Normal file
Binary file not shown.
52
env/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py
vendored
Normal file
52
env/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Legacy editable installation process, i.e. `setup.py develop`.
|
||||
"""
|
||||
import logging
|
||||
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_develop_args
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install_editable(
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
prefix, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
isolated, # type: bool
|
||||
build_env, # type: BuildEnvironment
|
||||
unpacked_source_directory, # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Install a package in editable mode. Most arguments are pass-through
|
||||
to setuptools.
|
||||
"""
|
||||
logger.info('Running setup.py develop for %s', name)
|
||||
|
||||
args = make_setuptools_develop_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
no_user_config=isolated,
|
||||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=unpacked_source_directory,
|
||||
)
|
129
env/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py
vendored
Normal file
129
env/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
"""Legacy installation process, i.e. `setup.py install`.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from distutils.util import change_root
|
||||
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install(
|
||||
install_req, # type: InstallRequirement
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
root, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
prefix, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
pycompile, # type: bool
|
||||
scheme, # type: Scheme
|
||||
):
|
||||
# type: (...) -> None
|
||||
# Extend the list of global and install options passed on to
|
||||
# the setup.py call with the ones from the requirements file.
|
||||
# Options specified in requirements file override those
|
||||
# specified on the command line, since the last option given
|
||||
# to setup.py is the one that is used.
|
||||
global_options = list(global_options) + \
|
||||
install_req.options.get('global_options', [])
|
||||
install_options = list(install_options) + \
|
||||
install_req.options.get('install_options', [])
|
||||
|
||||
header_dir = scheme.headers
|
||||
|
||||
with TempDirectory(kind="record") as temp_dir:
|
||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
||||
install_args = make_setuptools_install_args(
|
||||
install_req.setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
record_filename=record_filename,
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
header_dir=header_dir,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
no_user_config=install_req.isolated,
|
||||
pycompile=pycompile,
|
||||
)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
"Running setup.py install for {}".format(install_req.name)
|
||||
)
|
||||
with indent_log(), install_req.build_env:
|
||||
runner(
|
||||
cmd=install_args,
|
||||
cwd=install_req.unpacked_source_directory,
|
||||
)
|
||||
|
||||
if not os.path.exists(record_filename):
|
||||
logger.debug('Record file %s not found', record_filename)
|
||||
return
|
||||
install_req.install_succeeded = True
|
||||
|
||||
# We intentionally do not use any encoding to read the file because
|
||||
# setuptools writes the file using distutils.file_util.write_file,
|
||||
# which does not specify an encoding.
|
||||
with open(record_filename) as f:
|
||||
record_lines = f.read().splitlines()
|
||||
|
||||
def prepend_root(path):
|
||||
# type: (str) -> str
|
||||
if root is None or not os.path.isabs(path):
|
||||
return path
|
||||
else:
|
||||
return change_root(root, path)
|
||||
|
||||
for line in record_lines:
|
||||
directory = os.path.dirname(line)
|
||||
if directory.endswith('.egg-info'):
|
||||
egg_info_dir = prepend_root(directory)
|
||||
break
|
||||
else:
|
||||
deprecated(
|
||||
reason=(
|
||||
"{} did not indicate that it installed an "
|
||||
".egg-info directory. Only setup.py projects "
|
||||
"generating .egg-info directories are supported."
|
||||
).format(install_req),
|
||||
replacement=(
|
||||
"for maintainers: updating the setup.py of {0}. "
|
||||
"For users: contact the maintainers of {0} to let "
|
||||
"them know to update their setup.py.".format(
|
||||
install_req.name
|
||||
)
|
||||
),
|
||||
gone_in="20.2",
|
||||
issue=6998,
|
||||
)
|
||||
# FIXME: put the record somewhere
|
||||
return
|
||||
new_lines = []
|
||||
for line in record_lines:
|
||||
filename = line.strip()
|
||||
if os.path.isdir(filename):
|
||||
filename += os.path.sep
|
||||
new_lines.append(
|
||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
||||
)
|
||||
new_lines.sort()
|
||||
ensure_dir(egg_info_dir)
|
||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
||||
with open(inst_files_path, 'w') as f:
|
||||
f.write('\n'.join(new_lines) + '\n')
|
615
env/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py
vendored
Normal file
615
env/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py
vendored
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Support for installing and building the "wheel" binary package format.
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import compileall
|
||||
import csv
|
||||
import logging
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import warnings
|
||||
from base64 import urlsafe_b64encode
|
||||
from zipfile import ZipFile
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.distlib.scripts import ScriptMaker
|
||||
from pip._vendor.distlib.util import get_export_entry
|
||||
from pip._vendor.six import StringIO
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import get_major_minor_version
|
||||
from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.utils.wheel import parse_wheel
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from email.message import Message
|
||||
from typing import (
|
||||
Dict, List, Optional, Sequence, Tuple, IO, Text, Any,
|
||||
Iterable, Callable, Set,
|
||||
)
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
|
||||
InstalledCSVRow = Tuple[str, ...]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normpath(src, p):
|
||||
# type: (str, str) -> str
|
||||
return os.path.relpath(src, p).replace(os.path.sep, '/')
|
||||
|
||||
|
||||
def rehash(path, blocksize=1 << 20):
|
||||
# type: (str, int) -> Tuple[str, str]
|
||||
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||
h, length = hash_file(path, blocksize)
|
||||
digest = 'sha256=' + urlsafe_b64encode(
|
||||
h.digest()
|
||||
).decode('latin1').rstrip('=')
|
||||
# unicode/str python2 issues
|
||||
return (digest, str(length)) # type: ignore
|
||||
|
||||
|
||||
def open_for_csv(name, mode):
|
||||
# type: (str, Text) -> IO[Any]
|
||||
if sys.version_info[0] < 3:
|
||||
nl = {} # type: Dict[str, Any]
|
||||
bin = 'b'
|
||||
else:
|
||||
nl = {'newline': ''} # type: Dict[str, Any]
|
||||
bin = ''
|
||||
return open(name, mode + bin, **nl)
|
||||
|
||||
|
||||
def fix_script(path):
|
||||
# type: (str) -> Optional[bool]
|
||||
"""Replace #!python with #!/path/to/python
|
||||
Return True if file was changed.
|
||||
"""
|
||||
# XXX RECORD hashes will need to be updated
|
||||
if os.path.isfile(path):
|
||||
with open(path, 'rb') as script:
|
||||
firstline = script.readline()
|
||||
if not firstline.startswith(b'#!python'):
|
||||
return False
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||
rest = script.read()
|
||||
with open(path, 'wb') as script:
|
||||
script.write(firstline)
|
||||
script.write(rest)
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
def wheel_root_is_purelib(metadata):
|
||||
# type: (Message) -> bool
|
||||
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||
|
||||
|
||||
def get_entrypoints(filename):
|
||||
# type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
|
||||
if not os.path.exists(filename):
|
||||
return {}, {}
|
||||
|
||||
# This is done because you can pass a string to entry_points wrappers which
|
||||
# means that they may or may not be valid INI files. The attempt here is to
|
||||
# strip leading and trailing whitespace in order to make them valid INI
|
||||
# files.
|
||||
with open(filename) as fp:
|
||||
data = StringIO()
|
||||
for line in fp:
|
||||
data.write(line.strip())
|
||||
data.write("\n")
|
||||
data.seek(0)
|
||||
|
||||
# get the entry points and then the script names
|
||||
entry_points = pkg_resources.EntryPoint.parse_map(data)
|
||||
console = entry_points.get('console_scripts', {})
|
||||
gui = entry_points.get('gui_scripts', {})
|
||||
|
||||
def _split_ep(s):
|
||||
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
|
||||
"""get the string representation of EntryPoint,
|
||||
remove space and split on '='
|
||||
"""
|
||||
split_parts = str(s).replace(" ", "").split("=")
|
||||
return split_parts[0], split_parts[1]
|
||||
|
||||
# convert the EntryPoint objects into strings with module:function
|
||||
console = dict(_split_ep(v) for v in console.values())
|
||||
gui = dict(_split_ep(v) for v in gui.values())
|
||||
return console, gui
|
||||
|
||||
|
||||
def message_about_scripts_not_on_PATH(scripts):
|
||||
# type: (Sequence[str]) -> Optional[str]
|
||||
"""Determine if any scripts are not on PATH and format a warning.
|
||||
Returns a warning message if one or more scripts are not on PATH,
|
||||
otherwise None.
|
||||
"""
|
||||
if not scripts:
|
||||
return None
|
||||
|
||||
# Group scripts by the path they were installed in
|
||||
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
||||
for destfile in scripts:
|
||||
parent_dir = os.path.dirname(destfile)
|
||||
script_name = os.path.basename(destfile)
|
||||
grouped_by_dir[parent_dir].add(script_name)
|
||||
|
||||
# We don't want to warn for directories that are on PATH.
|
||||
not_warn_dirs = [
|
||||
os.path.normcase(i).rstrip(os.sep) for i in
|
||||
os.environ.get("PATH", "").split(os.pathsep)
|
||||
]
|
||||
# If an executable sits with sys.executable, we don't warn for it.
|
||||
# This covers the case of venv invocations without activating the venv.
|
||||
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||
warn_for = {
|
||||
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
||||
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||
} # type: Dict[str, Set[str]]
|
||||
if not warn_for:
|
||||
return None
|
||||
|
||||
# Format a message
|
||||
msg_lines = []
|
||||
for parent_dir, dir_scripts in warn_for.items():
|
||||
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
||||
if len(sorted_scripts) == 1:
|
||||
start_text = "script {} is".format(sorted_scripts[0])
|
||||
else:
|
||||
start_text = "scripts {} are".format(
|
||||
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
||||
)
|
||||
|
||||
msg_lines.append(
|
||||
"The {} installed in '{}' which is not on PATH."
|
||||
.format(start_text, parent_dir)
|
||||
)
|
||||
|
||||
last_line_fmt = (
|
||||
"Consider adding {} to PATH or, if you prefer "
|
||||
"to suppress this warning, use --no-warn-script-location."
|
||||
)
|
||||
if len(msg_lines) == 1:
|
||||
msg_lines.append(last_line_fmt.format("this directory"))
|
||||
else:
|
||||
msg_lines.append(last_line_fmt.format("these directories"))
|
||||
|
||||
# Add a note if any directory starts with ~
|
||||
warn_for_tilde = any(
|
||||
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
|
||||
)
|
||||
if warn_for_tilde:
|
||||
tilde_warning_msg = (
|
||||
"NOTE: The current PATH contains path(s) starting with `~`, "
|
||||
"which may not be expanded by all applications."
|
||||
)
|
||||
msg_lines.append(tilde_warning_msg)
|
||||
|
||||
# Returns the formatted multiline message
|
||||
return "\n".join(msg_lines)
|
||||
|
||||
|
||||
def sorted_outrows(outrows):
|
||||
# type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
|
||||
"""Return the given rows of a RECORD file in sorted order.
|
||||
|
||||
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
||||
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
||||
passed to this function, the size can be an integer as an int or string,
|
||||
or the empty string.
|
||||
"""
|
||||
# Normally, there should only be one row per path, in which case the
|
||||
# second and third elements don't come into play when sorting.
|
||||
# However, in cases in the wild where a path might happen to occur twice,
|
||||
# we don't want the sort operation to trigger an error (but still want
|
||||
# determinism). Since the third element can be an int or string, we
|
||||
# coerce each element to a string to avoid a TypeError in this case.
|
||||
# For additional background, see--
|
||||
# https://github.com/pypa/pip/issues/5868
|
||||
return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
|
||||
|
||||
|
||||
def get_csv_rows_for_installed(
|
||||
old_csv_rows, # type: Iterable[List[str]]
|
||||
installed, # type: Dict[str, str]
|
||||
changed, # type: Set[str]
|
||||
generated, # type: List[str]
|
||||
lib_dir, # type: str
|
||||
):
|
||||
# type: (...) -> List[InstalledCSVRow]
|
||||
"""
|
||||
:param installed: A map from archive RECORD path to installation RECORD
|
||||
path.
|
||||
"""
|
||||
installed_rows = [] # type: List[InstalledCSVRow]
|
||||
for row in old_csv_rows:
|
||||
if len(row) > 3:
|
||||
logger.warning(
|
||||
'RECORD line has more than three elements: {}'.format(row)
|
||||
)
|
||||
# Make a copy because we are mutating the row.
|
||||
row = list(row)
|
||||
old_path = row[0]
|
||||
new_path = installed.pop(old_path, old_path)
|
||||
row[0] = new_path
|
||||
if new_path in changed:
|
||||
digest, length = rehash(new_path)
|
||||
row[1] = digest
|
||||
row[2] = length
|
||||
installed_rows.append(tuple(row))
|
||||
for f in generated:
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((normpath(f, lib_dir), digest, str(length)))
|
||||
for f in installed:
|
||||
installed_rows.append((installed[f], '', ''))
|
||||
return installed_rows
|
||||
|
||||
|
||||
class MissingCallableSuffix(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _raise_for_invalid_entrypoint(specification):
|
||||
# type: (str) -> None
|
||||
entry = get_export_entry(specification)
|
||||
if entry is not None and entry.suffix is None:
|
||||
raise MissingCallableSuffix(str(entry))
|
||||
|
||||
|
||||
class PipScriptMaker(ScriptMaker):
|
||||
def make(self, specification, options=None):
|
||||
# type: (str, Dict[str, Any]) -> List[str]
|
||||
_raise_for_invalid_entrypoint(specification)
|
||||
return super(PipScriptMaker, self).make(specification, options)
|
||||
|
||||
|
||||
def install_unpacked_wheel(
|
||||
name, # type: str
|
||||
wheeldir, # type: str
|
||||
wheel_zip, # type: ZipFile
|
||||
scheme, # type: Scheme
|
||||
req_description, # type: str
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Install a wheel.
|
||||
|
||||
:param name: Name of the project to install
|
||||
:param wheeldir: Base directory of the unpacked wheel
|
||||
:param wheel_zip: open ZipFile for wheel being installed
|
||||
:param scheme: Distutils scheme dictating the install directories
|
||||
:param req_description: String used in place of the requirement, for
|
||||
logging
|
||||
:param pycompile: Whether to byte-compile installed Python files
|
||||
:param warn_script_location: Whether to check that scripts are installed
|
||||
into a directory on PATH
|
||||
:raises UnsupportedWheel:
|
||||
* when the directory holds an unpacked wheel with incompatible
|
||||
Wheel-Version
|
||||
* when the .dist-info dir does not match the wheel
|
||||
"""
|
||||
# TODO: Investigate and break this up.
|
||||
# TODO: Look into moving this into a dedicated class for representing an
|
||||
# installation.
|
||||
|
||||
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
||||
|
||||
info_dir, metadata = parse_wheel(wheel_zip, name)
|
||||
|
||||
if wheel_root_is_purelib(metadata):
|
||||
lib_dir = scheme.purelib
|
||||
else:
|
||||
lib_dir = scheme.platlib
|
||||
|
||||
subdirs = os.listdir(source)
|
||||
data_dirs = [s for s in subdirs if s.endswith('.data')]
|
||||
|
||||
# Record details of the files moved
|
||||
# installed = files copied from the wheel to the destination
|
||||
# changed = files changed while installing (scripts #! line typically)
|
||||
# generated = files newly generated during the install (script wrappers)
|
||||
installed = {} # type: Dict[str, str]
|
||||
changed = set()
|
||||
generated = [] # type: List[str]
|
||||
|
||||
# Compile all of the pyc files that we're going to be installing
|
||||
if pycompile:
|
||||
with captured_stdout() as stdout:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore')
|
||||
compileall.compile_dir(source, force=True, quiet=True)
|
||||
logger.debug(stdout.getvalue())
|
||||
|
||||
def record_installed(srcfile, destfile, modified=False):
|
||||
# type: (str, str, bool) -> None
|
||||
"""Map archive RECORD paths to installation RECORD paths."""
|
||||
oldpath = normpath(srcfile, wheeldir)
|
||||
newpath = normpath(destfile, lib_dir)
|
||||
installed[oldpath] = newpath
|
||||
if modified:
|
||||
changed.add(destfile)
|
||||
|
||||
def clobber(
|
||||
source, # type: str
|
||||
dest, # type: str
|
||||
is_base, # type: bool
|
||||
fixer=None, # type: Optional[Callable[[str], Any]]
|
||||
filter=None # type: Optional[Callable[[str], bool]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
ensure_dir(dest) # common for the 'include' path
|
||||
|
||||
for dir, subdirs, files in os.walk(source):
|
||||
basedir = dir[len(source):].lstrip(os.path.sep)
|
||||
destdir = os.path.join(dest, basedir)
|
||||
if is_base and basedir == '':
|
||||
subdirs[:] = [s for s in subdirs if not s.endswith('.data')]
|
||||
for f in files:
|
||||
# Skip unwanted files
|
||||
if filter and filter(f):
|
||||
continue
|
||||
srcfile = os.path.join(dir, f)
|
||||
destfile = os.path.join(dest, basedir, f)
|
||||
# directory creation is lazy and after the file filtering above
|
||||
# to ensure we don't install empty dirs; empty dirs can't be
|
||||
# uninstalled.
|
||||
ensure_dir(destdir)
|
||||
|
||||
# copyfile (called below) truncates the destination if it
|
||||
# exists and then writes the new contents. This is fine in most
|
||||
# cases, but can cause a segfault if pip has loaded a shared
|
||||
# object (e.g. from pyopenssl through its vendored urllib3)
|
||||
# Since the shared object is mmap'd an attempt to call a
|
||||
# symbol in it will then cause a segfault. Unlinking the file
|
||||
# allows writing of new contents while allowing the process to
|
||||
# continue to use the old copy.
|
||||
if os.path.exists(destfile):
|
||||
os.unlink(destfile)
|
||||
|
||||
# We use copyfile (not move, copy, or copy2) to be extra sure
|
||||
# that we are not moving directories over (copyfile fails for
|
||||
# directories) as well as to ensure that we are not copying
|
||||
# over any metadata because we want more control over what
|
||||
# metadata we actually copy over.
|
||||
shutil.copyfile(srcfile, destfile)
|
||||
|
||||
# Copy over the metadata for the file, currently this only
|
||||
# includes the atime and mtime.
|
||||
st = os.stat(srcfile)
|
||||
if hasattr(os, "utime"):
|
||||
os.utime(destfile, (st.st_atime, st.st_mtime))
|
||||
|
||||
# If our file is executable, then make our destination file
|
||||
# executable.
|
||||
if os.access(srcfile, os.X_OK):
|
||||
st = os.stat(srcfile)
|
||||
permissions = (
|
||||
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
)
|
||||
os.chmod(destfile, permissions)
|
||||
|
||||
changed = False
|
||||
if fixer:
|
||||
changed = fixer(destfile)
|
||||
record_installed(srcfile, destfile, changed)
|
||||
|
||||
clobber(source, lib_dir, True)
|
||||
|
||||
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||
|
||||
# Get the defined entry points
|
||||
ep_file = os.path.join(dest_info_dir, 'entry_points.txt')
|
||||
console, gui = get_entrypoints(ep_file)
|
||||
|
||||
def is_entrypoint_wrapper(name):
|
||||
# type: (str) -> bool
|
||||
# EP, EP.exe and EP-script.py are scripts generated for
|
||||
# entry point EP by setuptools
|
||||
if name.lower().endswith('.exe'):
|
||||
matchname = name[:-4]
|
||||
elif name.lower().endswith('-script.py'):
|
||||
matchname = name[:-10]
|
||||
elif name.lower().endswith(".pya"):
|
||||
matchname = name[:-4]
|
||||
else:
|
||||
matchname = name
|
||||
# Ignore setuptools-generated scripts
|
||||
return (matchname in console or matchname in gui)
|
||||
|
||||
for datadir in data_dirs:
|
||||
fixer = None
|
||||
filter = None
|
||||
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
||||
fixer = None
|
||||
if subdir == 'scripts':
|
||||
fixer = fix_script
|
||||
filter = is_entrypoint_wrapper
|
||||
source = os.path.join(wheeldir, datadir, subdir)
|
||||
dest = getattr(scheme, subdir)
|
||||
clobber(source, dest, False, fixer=fixer, filter=filter)
|
||||
|
||||
maker = PipScriptMaker(None, scheme.scripts)
|
||||
|
||||
# Ensure old scripts are overwritten.
|
||||
# See https://github.com/pypa/pip/issues/1800
|
||||
maker.clobber = True
|
||||
|
||||
# Ensure we don't generate any variants for scripts because this is almost
|
||||
# never what somebody wants.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||
maker.variants = {''}
|
||||
|
||||
# This is required because otherwise distlib creates scripts that are not
|
||||
# executable.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||
maker.set_mode = True
|
||||
|
||||
scripts_to_generate = []
|
||||
|
||||
# Special case pip and setuptools to generate versioned wrappers
|
||||
#
|
||||
# The issue is that some projects (specifically, pip and setuptools) use
|
||||
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||
# the wheel metadata at build time, and so if the wheel is installed with
|
||||
# a *different* version of Python the entry points will be wrong. The
|
||||
# correct fix for this is to enhance the metadata to be able to describe
|
||||
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||
# available.
|
||||
# In the meantime, projects using versioned entry points will either have
|
||||
# incorrect versioned entry points, or they will not be able to distribute
|
||||
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||
#
|
||||
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||
# override the versioned entry points in the wheel and generate the
|
||||
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
||||
# is available.
|
||||
#
|
||||
# To add the level of hack in this section of code, in order to support
|
||||
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||
# variable which will control which version scripts get installed.
|
||||
#
|
||||
# ENSUREPIP_OPTIONS=altinstall
|
||||
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||
# ENSUREPIP_OPTIONS=install
|
||||
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||
# not altinstall
|
||||
# DEFAULT
|
||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||
# and easy_install-X.Y.
|
||||
pip_script = console.pop('pip', None)
|
||||
if pip_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append('pip = ' + pip_script)
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
scripts_to_generate.append(
|
||||
'pip%s = %s' % (sys.version_info[0], pip_script)
|
||||
)
|
||||
|
||||
scripts_to_generate.append(
|
||||
'pip%s = %s' % (get_major_minor_version(), pip_script)
|
||||
)
|
||||
# Delete any other versioned pip entry points
|
||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||
for k in pip_ep:
|
||||
del console[k]
|
||||
easy_install_script = console.pop('easy_install', None)
|
||||
if easy_install_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append(
|
||||
'easy_install = ' + easy_install_script
|
||||
)
|
||||
|
||||
scripts_to_generate.append(
|
||||
'easy_install-%s = %s' % (
|
||||
get_major_minor_version(), easy_install_script
|
||||
)
|
||||
)
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [
|
||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||
]
|
||||
for k in easy_install_ep:
|
||||
del console[k]
|
||||
|
||||
# Generate the console and GUI entry points specified in the wheel
|
||||
scripts_to_generate.extend(
|
||||
'%s = %s' % kv for kv in console.items()
|
||||
)
|
||||
|
||||
gui_scripts_to_generate = [
|
||||
'%s = %s' % kv for kv in gui.items()
|
||||
]
|
||||
|
||||
generated_console_scripts = [] # type: List[str]
|
||||
|
||||
try:
|
||||
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||
generated.extend(generated_console_scripts)
|
||||
|
||||
generated.extend(
|
||||
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
||||
)
|
||||
except MissingCallableSuffix as e:
|
||||
entry = e.args[0]
|
||||
raise InstallationError(
|
||||
"Invalid script entry point: {} for req: {} - A callable "
|
||||
"suffix is required. Cf https://packaging.python.org/"
|
||||
"specifications/entry-points/#use-for-scripts for more "
|
||||
"information.".format(entry, req_description)
|
||||
)
|
||||
|
||||
if warn_script_location:
|
||||
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||
if msg is not None:
|
||||
logger.warning(msg)
|
||||
|
||||
# Record pip as the installer
|
||||
installer = os.path.join(dest_info_dir, 'INSTALLER')
|
||||
temp_installer = os.path.join(dest_info_dir, 'INSTALLER.pip')
|
||||
with open(temp_installer, 'wb') as installer_file:
|
||||
installer_file.write(b'pip\n')
|
||||
shutil.move(temp_installer, installer)
|
||||
generated.append(installer)
|
||||
|
||||
# Record details of all files installed
|
||||
record = os.path.join(dest_info_dir, 'RECORD')
|
||||
temp_record = os.path.join(dest_info_dir, 'RECORD.pip')
|
||||
with open_for_csv(record, 'r') as record_in:
|
||||
with open_for_csv(temp_record, 'w+') as record_out:
|
||||
reader = csv.reader(record_in)
|
||||
outrows = get_csv_rows_for_installed(
|
||||
reader, installed=installed, changed=changed,
|
||||
generated=generated, lib_dir=lib_dir,
|
||||
)
|
||||
writer = csv.writer(record_out)
|
||||
# Sort to simplify testing.
|
||||
for row in sorted_outrows(outrows):
|
||||
writer.writerow(row)
|
||||
shutil.move(temp_record, record)
|
||||
|
||||
|
||||
def install_wheel(
|
||||
name, # type: str
|
||||
wheel_path, # type: str
|
||||
scheme, # type: Scheme
|
||||
req_description, # type: str
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True, # type: bool
|
||||
_temp_dir_for_testing=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
with TempDirectory(
|
||||
path=_temp_dir_for_testing, kind="unpacked-wheel"
|
||||
) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
|
||||
unpack_file(wheel_path, unpacked_dir.path)
|
||||
install_unpacked_wheel(
|
||||
name=name,
|
||||
wheeldir=unpacked_dir.path,
|
||||
wheel_zip=z,
|
||||
scheme=scheme,
|
||||
req_description=req_description,
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
)
|
591
env/lib/python3.8/site-packages/pip/_internal/operations/prepare.py
vendored
Normal file
591
env/lib/python3.8/site-packages/pip/_internal/operations/prepare.py
vendored
Normal file
@@ -0,0 +1,591 @@
|
||||
"""Prepares a distribution for installation
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pip._vendor import requests
|
||||
from pip._vendor.six import PY2
|
||||
|
||||
from pip._internal.distributions import (
|
||||
make_distribution_for_install_requirement,
|
||||
)
|
||||
from pip._internal.distributions.installed import InstalledDistribution
|
||||
from pip._internal.exceptions import (
|
||||
DirectoryUrlHashUnsupported,
|
||||
HashMismatch,
|
||||
HashUnpinned,
|
||||
InstallationError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.utils.filesystem import copy2_fixed
|
||||
from pip._internal.utils.hashes import MissingHashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.marker_files import write_delete_marker_file
|
||||
from pip._internal.utils.misc import (
|
||||
ask_path_exists,
|
||||
backup_dir,
|
||||
display_path,
|
||||
hide_url,
|
||||
path_to_display,
|
||||
rmtree,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Callable, List, Optional, Tuple,
|
||||
)
|
||||
|
||||
from mypy_extensions import TypedDict
|
||||
|
||||
from pip._internal.distributions import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.download import Downloader
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
|
||||
if PY2:
|
||||
CopytreeKwargs = TypedDict(
|
||||
'CopytreeKwargs',
|
||||
{
|
||||
'ignore': Callable[[str, List[str]], List[str]],
|
||||
'symlinks': bool,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
else:
|
||||
CopytreeKwargs = TypedDict(
|
||||
'CopytreeKwargs',
|
||||
{
|
||||
'copy_function': Callable[[str, str], None],
|
||||
'ignore': Callable[[str, List[str]], List[str]],
|
||||
'ignore_dangling_symlinks': bool,
|
||||
'symlinks': bool,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
req, # type: InstallRequirement
|
||||
req_tracker, # type: RequirementTracker
|
||||
finder, # type: PackageFinder
|
||||
build_isolation # type: bool
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare a distribution for installation.
|
||||
"""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with req_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
||||
return abstract_dist
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location):
|
||||
# type: (Link, str) -> None
|
||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||
assert vcs_backend is not None
|
||||
vcs_backend.unpack(location, url=hide_url(link.url))
|
||||
|
||||
|
||||
def _copy_file(filename, location, link):
|
||||
# type: (str, str, Link) -> None
|
||||
copy = True
|
||||
download_location = os.path.join(location, link.filename)
|
||||
if os.path.exists(download_location):
|
||||
response = ask_path_exists(
|
||||
'The file {} exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'.format(
|
||||
display_path(download_location)
|
||||
),
|
||||
('i', 'w', 'b', 'a'),
|
||||
)
|
||||
if response == 'i':
|
||||
copy = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(download_location))
|
||||
os.remove(download_location)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(download_location)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
display_path(download_location),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(download_location, dest_file)
|
||||
elif response == 'a':
|
||||
sys.exit(-1)
|
||||
if copy:
|
||||
shutil.copy(filename, download_location)
|
||||
logger.info('Saved %s', display_path(download_location))
|
||||
|
||||
|
||||
def unpack_http_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
downloader, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> str
|
||||
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
else:
|
||||
# let's download to a tmp dir
|
||||
from_path, content_type = _download_http_url(
|
||||
link, downloader, temp_dir.path, hashes
|
||||
)
|
||||
|
||||
# unpack the archive to the build dir location. even when only
|
||||
# downloading archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type)
|
||||
|
||||
return from_path
|
||||
|
||||
|
||||
def _copy2_ignoring_special_files(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Copying special files is not supported, but as a convenience to users
|
||||
we skip errors copying them. This supports tools that may create e.g.
|
||||
socket files in the project source directory.
|
||||
"""
|
||||
try:
|
||||
copy2_fixed(src, dest)
|
||||
except shutil.SpecialFileError as e:
|
||||
# SpecialFileError may be raised due to either the source or
|
||||
# destination. If the destination was the cause then we would actually
|
||||
# care, but since the destination directory is deleted prior to
|
||||
# copy we ignore all of them assuming it is caused by the source.
|
||||
logger.warning(
|
||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
||||
str(e),
|
||||
path_to_display(src),
|
||||
path_to_display(dest),
|
||||
)
|
||||
|
||||
|
||||
def _copy_source_tree(source, target):
|
||||
# type: (str, str) -> None
|
||||
def ignore(d, names):
|
||||
# type: (str, List[str]) -> List[str]
|
||||
# Pulling in those directories can potentially be very slow,
|
||||
# exclude the following directories if they appear in the top
|
||||
# level dir (and only it).
|
||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
||||
return ['.tox', '.nox'] if d == source else []
|
||||
|
||||
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
|
||||
|
||||
if not PY2:
|
||||
# Python 2 does not support copy_function, so we only ignore
|
||||
# errors on special file copy in Python 3.
|
||||
kwargs['copy_function'] = _copy2_ignoring_special_files
|
||||
|
||||
shutil.copytree(source, target, **kwargs)
|
||||
|
||||
|
||||
def unpack_file_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Unpack link into location.
|
||||
"""
|
||||
link_path = link.file_path
|
||||
# If it's a url to a local directory
|
||||
if link.is_existing_dir():
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
_copy_source_tree(link_path, location)
|
||||
return None
|
||||
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
else:
|
||||
from_path = link_path
|
||||
|
||||
# If --require-hashes is off, `hashes` is either empty, the
|
||||
# link's embedded hash, or MissingHashes; it is required to
|
||||
# match. If --require-hashes is on, we are satisfied by any
|
||||
# hash in `hashes` matching: a URL-based or an option-based
|
||||
# one; no internet-sourced hash will be in `hashes`.
|
||||
if hashes:
|
||||
hashes.check_against_path(from_path)
|
||||
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies
|
||||
unpack_file(from_path, location, content_type)
|
||||
|
||||
return from_path
|
||||
|
||||
|
||||
def unpack_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
downloader, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Unpack link into location, downloading if required.
|
||||
|
||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||
required, and unhashable types of requirements (like VCS ones, which
|
||||
would ordinarily raise HashUnsupported) are allowed.
|
||||
"""
|
||||
# non-editable vcs urls
|
||||
if link.is_vcs:
|
||||
unpack_vcs_link(link, location)
|
||||
return None
|
||||
|
||||
# file urls
|
||||
elif link.is_file:
|
||||
return unpack_file_url(link, location, download_dir, hashes=hashes)
|
||||
|
||||
# http urls
|
||||
else:
|
||||
return unpack_http_url(
|
||||
link,
|
||||
location,
|
||||
downloader,
|
||||
download_dir,
|
||||
hashes=hashes,
|
||||
)
|
||||
|
||||
|
||||
def _download_http_url(
|
||||
link, # type: Link
|
||||
downloader, # type: Downloader
|
||||
temp_dir, # type: str
|
||||
hashes, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Tuple[str, str]
|
||||
"""Download link url into temp_dir using provided session"""
|
||||
download = downloader(link)
|
||||
|
||||
file_path = os.path.join(temp_dir, download.filename)
|
||||
with open(file_path, 'wb') as content_file:
|
||||
for chunk in download.chunks:
|
||||
content_file.write(chunk)
|
||||
|
||||
if hashes:
|
||||
hashes.check_against_path(file_path)
|
||||
|
||||
return file_path, download.response.headers.get('content-type', '')
|
||||
|
||||
|
||||
def _check_download_dir(link, download_dir, hashes):
|
||||
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
||||
""" Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
"""
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
|
||||
if not os.path.exists(download_path):
|
||||
return None
|
||||
|
||||
# If already downloaded, does its hash match?
|
||||
logger.info('File was already downloaded %s', download_path)
|
||||
if hashes:
|
||||
try:
|
||||
hashes.check_against_path(download_path)
|
||||
except HashMismatch:
|
||||
logger.warning(
|
||||
'Previously-downloaded file %s has bad hash. '
|
||||
'Re-downloading.',
|
||||
download_path
|
||||
)
|
||||
os.unlink(download_path)
|
||||
return None
|
||||
return download_path
|
||||
|
||||
|
||||
class RequirementPreparer(object):
|
||||
"""Prepares a Requirement
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
build_dir, # type: str
|
||||
download_dir, # type: Optional[str]
|
||||
src_dir, # type: str
|
||||
wheel_download_dir, # type: Optional[str]
|
||||
build_isolation, # type: bool
|
||||
req_tracker, # type: RequirementTracker
|
||||
downloader, # type: Downloader
|
||||
finder, # type: PackageFinder
|
||||
require_hashes, # type: bool
|
||||
use_user_site, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
super(RequirementPreparer, self).__init__()
|
||||
|
||||
self.src_dir = src_dir
|
||||
self.build_dir = build_dir
|
||||
self.req_tracker = req_tracker
|
||||
self.downloader = downloader
|
||||
self.finder = finder
|
||||
|
||||
# Where still-packed archives should be written to. If None, they are
|
||||
# not saved, and are deleted immediately after unpacking.
|
||||
self.download_dir = download_dir
|
||||
|
||||
# Where still-packed .whl files should be written to. If None, they are
|
||||
# written to the download_dir parameter. Separate to download_dir to
|
||||
# permit only keeping wheel archives for pip wheel.
|
||||
self.wheel_download_dir = wheel_download_dir
|
||||
|
||||
# NOTE
|
||||
# download_dir and wheel_download_dir overlap semantically and may
|
||||
# be combined if we're willing to have non-wheel archives present in
|
||||
# the wheelhouse output by 'pip wheel'.
|
||||
|
||||
# Is build isolation allowed?
|
||||
self.build_isolation = build_isolation
|
||||
|
||||
# Should hash-checking be required?
|
||||
self.require_hashes = require_hashes
|
||||
|
||||
# Should install in user site-packages?
|
||||
self.use_user_site = use_user_site
|
||||
|
||||
@property
|
||||
def _download_should_save(self):
|
||||
# type: () -> bool
|
||||
if not self.download_dir:
|
||||
return False
|
||||
|
||||
if os.path.exists(self.download_dir):
|
||||
return True
|
||||
|
||||
logger.critical('Could not find download directory')
|
||||
raise InstallationError(
|
||||
"Could not find or access download directory '{}'"
|
||||
.format(self.download_dir))
|
||||
|
||||
def prepare_linked_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare a requirement that would be obtained from req.link
|
||||
"""
|
||||
assert req.link
|
||||
link = req.link
|
||||
|
||||
# TODO: Breakup into smaller functions
|
||||
if link.scheme == 'file':
|
||||
path = link.file_path
|
||||
logger.info('Processing %s', display_path(path))
|
||||
else:
|
||||
logger.info('Collecting %s', req.req or req)
|
||||
|
||||
with indent_log():
|
||||
# @@ if filesystem packages are not marked
|
||||
# editable in a req, a non deterministic error
|
||||
# occurs when the script attempts to unpack the
|
||||
# build directory
|
||||
# Since source_dir is only set for editable requirements.
|
||||
assert req.source_dir is None
|
||||
req.ensure_has_source_dir(self.build_dir)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
" pre-existing build directory ({}). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
.format(req, req.source_dir)
|
||||
)
|
||||
|
||||
# Now that we have the real link, we can tell what kind of
|
||||
# requirements we have and raise some more informative errors
|
||||
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||
# for a VCS URL rather than HashMissing.)
|
||||
if self.require_hashes:
|
||||
# We could check these first 2 conditions inside
|
||||
# unpack_url and save repetition of conditions, but then
|
||||
# we would report less-useful error messages for
|
||||
# unhashable requirements, complaining that there's no
|
||||
# hash provided.
|
||||
if link.is_vcs:
|
||||
raise VcsHashUnsupported()
|
||||
elif link.is_existing_dir():
|
||||
raise DirectoryUrlHashUnsupported()
|
||||
if not req.original_link and not req.is_pinned:
|
||||
# Unpinned packages are asking for trouble when a new
|
||||
# version is uploaded. This isn't a security check, but
|
||||
# it saves users a surprising hash mismatch in the
|
||||
# future.
|
||||
#
|
||||
# file:/// URLs aren't pinnable, so don't complain
|
||||
# about them not being pinned.
|
||||
raise HashUnpinned()
|
||||
|
||||
hashes = req.hashes(trust_internet=not self.require_hashes)
|
||||
if self.require_hashes and not hashes:
|
||||
# Known-good hashes are missing for this requirement, so
|
||||
# shim it with a facade object that will provoke hash
|
||||
# computation and then raise a HashMissing exception
|
||||
# showing the user what the hash should be.
|
||||
hashes = MissingHashes()
|
||||
|
||||
download_dir = self.download_dir
|
||||
if link.is_wheel and self.wheel_download_dir:
|
||||
# when doing 'pip wheel` we download wheels to a
|
||||
# dedicated dir.
|
||||
download_dir = self.wheel_download_dir
|
||||
|
||||
try:
|
||||
local_path = unpack_url(
|
||||
link, req.source_dir, self.downloader, download_dir,
|
||||
hashes=hashes,
|
||||
)
|
||||
except requests.HTTPError as exc:
|
||||
logger.critical(
|
||||
'Could not install requirement %s because of error %s',
|
||||
req,
|
||||
exc,
|
||||
)
|
||||
raise InstallationError(
|
||||
'Could not install requirement {} because of HTTP '
|
||||
'error {} for URL {}'.format(req, exc, link)
|
||||
)
|
||||
|
||||
# For use in later processing, preserve the file path on the
|
||||
# requirement.
|
||||
if local_path:
|
||||
req.local_file_path = local_path
|
||||
|
||||
if link.is_wheel:
|
||||
if download_dir:
|
||||
# When downloading, we only unpack wheels to get
|
||||
# metadata.
|
||||
autodelete_unpacked = True
|
||||
else:
|
||||
# When installing a wheel, we use the unpacked
|
||||
# wheel.
|
||||
autodelete_unpacked = False
|
||||
else:
|
||||
# We always delete unpacked sdists after pip runs.
|
||||
autodelete_unpacked = True
|
||||
if autodelete_unpacked:
|
||||
write_delete_marker_file(req.source_dir)
|
||||
|
||||
abstract_dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
)
|
||||
|
||||
if download_dir:
|
||||
if link.is_existing_dir():
|
||||
logger.info('Link is a directory, ignoring download_dir')
|
||||
elif local_path and not os.path.exists(
|
||||
os.path.join(download_dir, link.filename)
|
||||
):
|
||||
_copy_file(local_path, download_dir, link)
|
||||
|
||||
if self._download_should_save:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if link.is_vcs:
|
||||
req.archive(self.download_dir)
|
||||
return abstract_dist
|
||||
|
||||
def prepare_editable_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare an editable requirement
|
||||
"""
|
||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||
|
||||
logger.info('Obtaining %s', req)
|
||||
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement {} cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.'.format(req)
|
||||
)
|
||||
req.ensure_has_source_dir(self.src_dir)
|
||||
req.update_editable(not self._download_should_save)
|
||||
|
||||
abstract_dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
)
|
||||
|
||||
if self._download_should_save:
|
||||
req.archive(self.download_dir)
|
||||
req.check_if_exists(self.use_user_site)
|
||||
|
||||
return abstract_dist
|
||||
|
||||
def prepare_installed_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
skip_reason # type: str
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare an already-installed requirement
|
||||
"""
|
||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||
assert skip_reason is not None, (
|
||||
"did not get skip reason skipped but req.satisfied_by "
|
||||
"is set to {}".format(req.satisfied_by)
|
||||
)
|
||||
logger.info(
|
||||
'Requirement %s: %s (%s)',
|
||||
skip_reason, req, req.satisfied_by.version
|
||||
)
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.'
|
||||
)
|
||||
abstract_dist = InstalledDistribution(req)
|
||||
|
||||
return abstract_dist
|
Reference in New Issue
Block a user