comparison env/lib/python3.9/site-packages/pip/_internal/req/req_install.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:4f3585e2f14b
1 # The following comment should be removed at some point in the future.
2 # mypy: strict-optional=False
3
4 import logging
5 import os
6 import shutil
7 import sys
8 import uuid
9 import zipfile
10
11 from pip._vendor import pkg_resources, six
12 from pip._vendor.packaging.requirements import Requirement
13 from pip._vendor.packaging.utils import canonicalize_name
14 from pip._vendor.packaging.version import Version
15 from pip._vendor.packaging.version import parse as parse_version
16 from pip._vendor.pep517.wrappers import Pep517HookCaller
17
18 from pip._internal.build_env import NoOpBuildEnvironment
19 from pip._internal.exceptions import InstallationError
20 from pip._internal.locations import get_scheme
21 from pip._internal.models.link import Link
22 from pip._internal.operations.build.metadata import generate_metadata
23 from pip._internal.operations.build.metadata_legacy import (
24 generate_metadata as generate_metadata_legacy,
25 )
26 from pip._internal.operations.install.editable_legacy import (
27 install_editable as install_editable_legacy,
28 )
29 from pip._internal.operations.install.legacy import LegacyInstallFailure
30 from pip._internal.operations.install.legacy import install as install_legacy
31 from pip._internal.operations.install.wheel import install_wheel
32 from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
33 from pip._internal.req.req_uninstall import UninstallPathSet
34 from pip._internal.utils.deprecation import deprecated
35 from pip._internal.utils.direct_url_helpers import direct_url_from_link
36 from pip._internal.utils.hashes import Hashes
37 from pip._internal.utils.logging import indent_log
38 from pip._internal.utils.misc import (
39 ask_path_exists,
40 backup_dir,
41 display_path,
42 dist_in_site_packages,
43 dist_in_usersite,
44 get_distribution,
45 get_installed_version,
46 hide_url,
47 redact_auth_from_url,
48 )
49 from pip._internal.utils.packaging import get_metadata
50 from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
51 from pip._internal.utils.typing import MYPY_CHECK_RUNNING
52 from pip._internal.utils.virtualenv import running_under_virtualenv
53 from pip._internal.vcs import vcs
54
55 if MYPY_CHECK_RUNNING:
56 from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
57
58 from pip._vendor.packaging.markers import Marker
59 from pip._vendor.packaging.specifiers import SpecifierSet
60 from pip._vendor.pkg_resources import Distribution
61
62 from pip._internal.build_env import BuildEnvironment
63
64
65 logger = logging.getLogger(__name__)
66
67
68 def _get_dist(metadata_directory):
69 # type: (str) -> Distribution
70 """Return a pkg_resources.Distribution for the provided
71 metadata directory.
72 """
73 dist_dir = metadata_directory.rstrip(os.sep)
74
75 # Build a PathMetadata object, from path to metadata. :wink:
76 base_dir, dist_dir_name = os.path.split(dist_dir)
77 metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
78
79 # Determine the correct Distribution object type.
80 if dist_dir.endswith(".egg-info"):
81 dist_cls = pkg_resources.Distribution
82 dist_name = os.path.splitext(dist_dir_name)[0]
83 else:
84 assert dist_dir.endswith(".dist-info")
85 dist_cls = pkg_resources.DistInfoDistribution
86 dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
87
88 return dist_cls(
89 base_dir,
90 project_name=dist_name,
91 metadata=metadata,
92 )
93
94
95 class InstallRequirement:
96 """
97 Represents something that may be installed later on, may have information
98 about where to fetch the relevant requirement and also contains logic for
99 installing the said requirement.
100 """
101
102 def __init__(
103 self,
104 req, # type: Optional[Requirement]
105 comes_from, # type: Optional[Union[str, InstallRequirement]]
106 editable=False, # type: bool
107 link=None, # type: Optional[Link]
108 markers=None, # type: Optional[Marker]
109 use_pep517=None, # type: Optional[bool]
110 isolated=False, # type: bool
111 install_options=None, # type: Optional[List[str]]
112 global_options=None, # type: Optional[List[str]]
113 hash_options=None, # type: Optional[Dict[str, List[str]]]
114 constraint=False, # type: bool
115 extras=(), # type: Iterable[str]
116 user_supplied=False, # type: bool
117 ):
118 # type: (...) -> None
119 assert req is None or isinstance(req, Requirement), req
120 self.req = req
121 self.comes_from = comes_from
122 self.constraint = constraint
123 self.editable = editable
124 self.legacy_install_reason = None # type: Optional[int]
125
126 # source_dir is the local directory where the linked requirement is
127 # located, or unpacked. In case unpacking is needed, creating and
128 # populating source_dir is done by the RequirementPreparer. Note this
129 # is not necessarily the directory where pyproject.toml or setup.py is
130 # located - that one is obtained via unpacked_source_directory.
131 self.source_dir = None # type: Optional[str]
132 if self.editable:
133 assert link
134 if link.is_file:
135 self.source_dir = os.path.normpath(
136 os.path.abspath(link.file_path)
137 )
138
139 if link is None and req and req.url:
140 # PEP 508 URL requirement
141 link = Link(req.url)
142 self.link = self.original_link = link
143 self.original_link_is_in_wheel_cache = False
144
145 # Path to any downloaded or already-existing package.
146 self.local_file_path = None # type: Optional[str]
147 if self.link and self.link.is_file:
148 self.local_file_path = self.link.file_path
149
150 if extras:
151 self.extras = extras
152 elif req:
153 self.extras = {
154 pkg_resources.safe_extra(extra) for extra in req.extras
155 }
156 else:
157 self.extras = set()
158 if markers is None and req:
159 markers = req.marker
160 self.markers = markers
161
162 # This holds the pkg_resources.Distribution object if this requirement
163 # is already available:
164 self.satisfied_by = None # type: Optional[Distribution]
165 # Whether the installation process should try to uninstall an existing
166 # distribution before installing this requirement.
167 self.should_reinstall = False
168 # Temporary build location
169 self._temp_build_dir = None # type: Optional[TempDirectory]
170 # Set to True after successful installation
171 self.install_succeeded = None # type: Optional[bool]
172 # Supplied options
173 self.install_options = install_options if install_options else []
174 self.global_options = global_options if global_options else []
175 self.hash_options = hash_options if hash_options else {}
176 # Set to True after successful preparation of this requirement
177 self.prepared = False
178 # User supplied requirement are explicitly requested for installation
179 # by the user via CLI arguments or requirements files, as opposed to,
180 # e.g. dependencies, extras or constraints.
181 self.user_supplied = user_supplied
182
183 self.isolated = isolated
184 self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
185
186 # For PEP 517, the directory where we request the project metadata
187 # gets stored. We need this to pass to build_wheel, so the backend
188 # can ensure that the wheel matches the metadata (see the PEP for
189 # details).
190 self.metadata_directory = None # type: Optional[str]
191
192 # The static build requirements (from pyproject.toml)
193 self.pyproject_requires = None # type: Optional[List[str]]
194
195 # Build requirements that we will check are available
196 self.requirements_to_check = [] # type: List[str]
197
198 # The PEP 517 backend we should use to build the project
199 self.pep517_backend = None # type: Optional[Pep517HookCaller]
200
201 # Are we using PEP 517 for this requirement?
202 # After pyproject.toml has been loaded, the only valid values are True
203 # and False. Before loading, None is valid (meaning "use the default").
204 # Setting an explicit value before loading pyproject.toml is supported,
205 # but after loading this flag should be treated as read only.
206 self.use_pep517 = use_pep517
207
208 # This requirement needs more preparation before it can be built
209 self.needs_more_preparation = False
210
211 def __str__(self):
212 # type: () -> str
213 if self.req:
214 s = str(self.req)
215 if self.link:
216 s += ' from {}'.format(redact_auth_from_url(self.link.url))
217 elif self.link:
218 s = redact_auth_from_url(self.link.url)
219 else:
220 s = '<InstallRequirement>'
221 if self.satisfied_by is not None:
222 s += ' in {}'.format(display_path(self.satisfied_by.location))
223 if self.comes_from:
224 if isinstance(self.comes_from, str):
225 comes_from = self.comes_from # type: Optional[str]
226 else:
227 comes_from = self.comes_from.from_path()
228 if comes_from:
229 s += f' (from {comes_from})'
230 return s
231
232 def __repr__(self):
233 # type: () -> str
234 return '<{} object: {} editable={!r}>'.format(
235 self.__class__.__name__, str(self), self.editable)
236
237 def format_debug(self):
238 # type: () -> str
239 """An un-tested helper for getting state, for debugging.
240 """
241 attributes = vars(self)
242 names = sorted(attributes)
243
244 state = (
245 "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
246 )
247 return '<{name} object: {{{state}}}>'.format(
248 name=self.__class__.__name__,
249 state=", ".join(state),
250 )
251
252 # Things that are valid for all kinds of requirements?
253 @property
254 def name(self):
255 # type: () -> Optional[str]
256 if self.req is None:
257 return None
258 return six.ensure_str(pkg_resources.safe_name(self.req.name))
259
260 @property
261 def specifier(self):
262 # type: () -> SpecifierSet
263 return self.req.specifier
264
265 @property
266 def is_pinned(self):
267 # type: () -> bool
268 """Return whether I am pinned to an exact version.
269
270 For example, some-package==1.2 is pinned; some-package>1.2 is not.
271 """
272 specifiers = self.specifier
273 return (len(specifiers) == 1 and
274 next(iter(specifiers)).operator in {'==', '==='})
275
276 @property
277 def installed_version(self):
278 # type: () -> Optional[str]
279 return get_installed_version(self.name)
280
281 def match_markers(self, extras_requested=None):
282 # type: (Optional[Iterable[str]]) -> bool
283 if not extras_requested:
284 # Provide an extra to safely evaluate the markers
285 # without matching any extra
286 extras_requested = ('',)
287 if self.markers is not None:
288 return any(
289 self.markers.evaluate({'extra': extra})
290 for extra in extras_requested)
291 else:
292 return True
293
294 @property
295 def has_hash_options(self):
296 # type: () -> bool
297 """Return whether any known-good hashes are specified as options.
298
299 These activate --require-hashes mode; hashes specified as part of a
300 URL do not.
301
302 """
303 return bool(self.hash_options)
304
305 def hashes(self, trust_internet=True):
306 # type: (bool) -> Hashes
307 """Return a hash-comparer that considers my option- and URL-based
308 hashes to be known-good.
309
310 Hashes in URLs--ones embedded in the requirements file, not ones
311 downloaded from an index server--are almost peers with ones from
312 flags. They satisfy --require-hashes (whether it was implicitly or
313 explicitly activated) but do not activate it. md5 and sha224 are not
314 allowed in flags, which should nudge people toward good algos. We
315 always OR all hashes together, even ones from URLs.
316
317 :param trust_internet: Whether to trust URL-based (#md5=...) hashes
318 downloaded from the internet, as by populate_link()
319
320 """
321 good_hashes = self.hash_options.copy()
322 link = self.link if trust_internet else self.original_link
323 if link and link.hash:
324 good_hashes.setdefault(link.hash_name, []).append(link.hash)
325 return Hashes(good_hashes)
326
327 def from_path(self):
328 # type: () -> Optional[str]
329 """Format a nice indicator to show where this "comes from"
330 """
331 if self.req is None:
332 return None
333 s = str(self.req)
334 if self.comes_from:
335 if isinstance(self.comes_from, str):
336 comes_from = self.comes_from
337 else:
338 comes_from = self.comes_from.from_path()
339 if comes_from:
340 s += '->' + comes_from
341 return s
342
343 def ensure_build_location(self, build_dir, autodelete, parallel_builds):
344 # type: (str, bool, bool) -> str
345 assert build_dir is not None
346 if self._temp_build_dir is not None:
347 assert self._temp_build_dir.path
348 return self._temp_build_dir.path
349 if self.req is None:
350 # Some systems have /tmp as a symlink which confuses custom
351 # builds (such as numpy). Thus, we ensure that the real path
352 # is returned.
353 self._temp_build_dir = TempDirectory(
354 kind=tempdir_kinds.REQ_BUILD, globally_managed=True
355 )
356
357 return self._temp_build_dir.path
358
359 # This is the only remaining place where we manually determine the path
360 # for the temporary directory. It is only needed for editables where
361 # it is the value of the --src option.
362
363 # When parallel builds are enabled, add a UUID to the build directory
364 # name so multiple builds do not interfere with each other.
365 dir_name = canonicalize_name(self.name)
366 if parallel_builds:
367 dir_name = f"{dir_name}_{uuid.uuid4().hex}"
368
369 # FIXME: Is there a better place to create the build_dir? (hg and bzr
370 # need this)
371 if not os.path.exists(build_dir):
372 logger.debug('Creating directory %s', build_dir)
373 os.makedirs(build_dir)
374 actual_build_dir = os.path.join(build_dir, dir_name)
375 # `None` indicates that we respect the globally-configured deletion
376 # settings, which is what we actually want when auto-deleting.
377 delete_arg = None if autodelete else False
378 return TempDirectory(
379 path=actual_build_dir,
380 delete=delete_arg,
381 kind=tempdir_kinds.REQ_BUILD,
382 globally_managed=True,
383 ).path
384
385 def _set_requirement(self):
386 # type: () -> None
387 """Set requirement after generating metadata.
388 """
389 assert self.req is None
390 assert self.metadata is not None
391 assert self.source_dir is not None
392
393 # Construct a Requirement object from the generated metadata
394 if isinstance(parse_version(self.metadata["Version"]), Version):
395 op = "=="
396 else:
397 op = "==="
398
399 self.req = Requirement(
400 "".join([
401 self.metadata["Name"],
402 op,
403 self.metadata["Version"],
404 ])
405 )
406
407 def warn_on_mismatching_name(self):
408 # type: () -> None
409 metadata_name = canonicalize_name(self.metadata["Name"])
410 if canonicalize_name(self.req.name) == metadata_name:
411 # Everything is fine.
412 return
413
414 # If we're here, there's a mismatch. Log a warning about it.
415 logger.warning(
416 'Generating metadata for package %s '
417 'produced metadata for project name %s. Fix your '
418 '#egg=%s fragments.',
419 self.name, metadata_name, self.name
420 )
421 self.req = Requirement(metadata_name)
422
423 def check_if_exists(self, use_user_site):
424 # type: (bool) -> None
425 """Find an installed distribution that satisfies or conflicts
426 with this requirement, and set self.satisfied_by or
427 self.should_reinstall appropriately.
428 """
429 if self.req is None:
430 return
431 existing_dist = get_distribution(self.req.name)
432 if not existing_dist:
433 return
434
435 existing_version = existing_dist.parsed_version
436 if not self.req.specifier.contains(existing_version, prereleases=True):
437 self.satisfied_by = None
438 if use_user_site:
439 if dist_in_usersite(existing_dist):
440 self.should_reinstall = True
441 elif (running_under_virtualenv() and
442 dist_in_site_packages(existing_dist)):
443 raise InstallationError(
444 "Will not install to the user site because it will "
445 "lack sys.path precedence to {} in {}".format(
446 existing_dist.project_name, existing_dist.location)
447 )
448 else:
449 self.should_reinstall = True
450 else:
451 if self.editable:
452 self.should_reinstall = True
453 # when installing editables, nothing pre-existing should ever
454 # satisfy
455 self.satisfied_by = None
456 else:
457 self.satisfied_by = existing_dist
458
459 # Things valid for wheels
460 @property
461 def is_wheel(self):
462 # type: () -> bool
463 if not self.link:
464 return False
465 return self.link.is_wheel
466
467 # Things valid for sdists
468 @property
469 def unpacked_source_directory(self):
470 # type: () -> str
471 return os.path.join(
472 self.source_dir,
473 self.link and self.link.subdirectory_fragment or '')
474
475 @property
476 def setup_py_path(self):
477 # type: () -> str
478 assert self.source_dir, f"No source dir for {self}"
479 setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
480
481 return setup_py
482
483 @property
484 def pyproject_toml_path(self):
485 # type: () -> str
486 assert self.source_dir, f"No source dir for {self}"
487 return make_pyproject_path(self.unpacked_source_directory)
488
489 def load_pyproject_toml(self):
490 # type: () -> None
491 """Load the pyproject.toml file.
492
493 After calling this routine, all of the attributes related to PEP 517
494 processing for this requirement have been set. In particular, the
495 use_pep517 attribute can be used to determine whether we should
496 follow the PEP 517 or legacy (setup.py) code path.
497 """
498 pyproject_toml_data = load_pyproject_toml(
499 self.use_pep517,
500 self.pyproject_toml_path,
501 self.setup_py_path,
502 str(self)
503 )
504
505 if pyproject_toml_data is None:
506 self.use_pep517 = False
507 return
508
509 self.use_pep517 = True
510 requires, backend, check, backend_path = pyproject_toml_data
511 self.requirements_to_check = check
512 self.pyproject_requires = requires
513 self.pep517_backend = Pep517HookCaller(
514 self.unpacked_source_directory, backend, backend_path=backend_path,
515 )
516
517 def _generate_metadata(self):
518 # type: () -> str
519 """Invokes metadata generator functions, with the required arguments.
520 """
521 if not self.use_pep517:
522 assert self.unpacked_source_directory
523
524 return generate_metadata_legacy(
525 build_env=self.build_env,
526 setup_py_path=self.setup_py_path,
527 source_dir=self.unpacked_source_directory,
528 isolated=self.isolated,
529 details=self.name or f"from {self.link}"
530 )
531
532 assert self.pep517_backend is not None
533
534 return generate_metadata(
535 build_env=self.build_env,
536 backend=self.pep517_backend,
537 )
538
539 def prepare_metadata(self):
540 # type: () -> None
541 """Ensure that project metadata is available.
542
543 Under PEP 517, call the backend hook to prepare the metadata.
544 Under legacy processing, call setup.py egg-info.
545 """
546 assert self.source_dir
547
548 with indent_log():
549 self.metadata_directory = self._generate_metadata()
550
551 # Act on the newly generated metadata, based on the name and version.
552 if not self.name:
553 self._set_requirement()
554 else:
555 self.warn_on_mismatching_name()
556
557 self.assert_source_matches_version()
558
559 @property
560 def metadata(self):
561 # type: () -> Any
562 if not hasattr(self, '_metadata'):
563 self._metadata = get_metadata(self.get_dist())
564
565 return self._metadata
566
567 def get_dist(self):
568 # type: () -> Distribution
569 return _get_dist(self.metadata_directory)
570
571 def assert_source_matches_version(self):
572 # type: () -> None
573 assert self.source_dir
574 version = self.metadata['version']
575 if self.req.specifier and version not in self.req.specifier:
576 logger.warning(
577 'Requested %s, but installing version %s',
578 self,
579 version,
580 )
581 else:
582 logger.debug(
583 'Source in %s has version %s, which satisfies requirement %s',
584 display_path(self.source_dir),
585 version,
586 self,
587 )
588
589 # For both source distributions and editables
590 def ensure_has_source_dir(
591 self,
592 parent_dir,
593 autodelete=False,
594 parallel_builds=False,
595 ):
596 # type: (str, bool, bool) -> None
597 """Ensure that a source_dir is set.
598
599 This will create a temporary build dir if the name of the requirement
600 isn't known yet.
601
602 :param parent_dir: The ideal pip parent_dir for the source_dir.
603 Generally src_dir for editables and build_dir for sdists.
604 :return: self.source_dir
605 """
606 if self.source_dir is None:
607 self.source_dir = self.ensure_build_location(
608 parent_dir,
609 autodelete=autodelete,
610 parallel_builds=parallel_builds,
611 )
612
613 # For editable installations
614 def update_editable(self):
615 # type: () -> None
616 if not self.link:
617 logger.debug(
618 "Cannot update repository at %s; repository location is "
619 "unknown",
620 self.source_dir,
621 )
622 return
623 assert self.editable
624 assert self.source_dir
625 if self.link.scheme == 'file':
626 # Static paths don't get updated
627 return
628 vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
629 # Editable requirements are validated in Requirement constructors.
630 # So here, if it's neither a path nor a valid VCS URL, it's a bug.
631 assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
632 hidden_url = hide_url(self.link.url)
633 vcs_backend.obtain(self.source_dir, url=hidden_url)
634
635 # Top-level Actions
636 def uninstall(self, auto_confirm=False, verbose=False):
637 # type: (bool, bool) -> Optional[UninstallPathSet]
638 """
639 Uninstall the distribution currently satisfying this requirement.
640
641 Prompts before removing or modifying files unless
642 ``auto_confirm`` is True.
643
644 Refuses to delete or modify files outside of ``sys.prefix`` -
645 thus uninstallation within a virtual environment can only
646 modify that virtual environment, even if the virtualenv is
647 linked to global site-packages.
648
649 """
650 assert self.req
651 dist = get_distribution(self.req.name)
652 if not dist:
653 logger.warning("Skipping %s as it is not installed.", self.name)
654 return None
655 logger.info('Found existing installation: %s', dist)
656
657 uninstalled_pathset = UninstallPathSet.from_dist(dist)
658 uninstalled_pathset.remove(auto_confirm, verbose)
659 return uninstalled_pathset
660
661 def _get_archive_name(self, path, parentdir, rootdir):
662 # type: (str, str, str) -> str
663
664 def _clean_zip_name(name, prefix):
665 # type: (str, str) -> str
666 assert name.startswith(prefix + os.path.sep), (
667 "name {name!r} doesn't start with prefix {prefix!r}"
668 .format(**locals())
669 )
670 name = name[len(prefix) + 1:]
671 name = name.replace(os.path.sep, '/')
672 return name
673
674 path = os.path.join(parentdir, path)
675 name = _clean_zip_name(path, rootdir)
676 return self.name + '/' + name
677
678 def archive(self, build_dir):
679 # type: (Optional[str]) -> None
680 """Saves archive to provided build_dir.
681
682 Used for saving downloaded VCS requirements as part of `pip download`.
683 """
684 assert self.source_dir
685 if build_dir is None:
686 return
687
688 create_archive = True
689 archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
690 archive_path = os.path.join(build_dir, archive_name)
691
692 if os.path.exists(archive_path):
693 response = ask_path_exists(
694 'The file {} exists. (i)gnore, (w)ipe, '
695 '(b)ackup, (a)bort '.format(
696 display_path(archive_path)),
697 ('i', 'w', 'b', 'a'))
698 if response == 'i':
699 create_archive = False
700 elif response == 'w':
701 logger.warning('Deleting %s', display_path(archive_path))
702 os.remove(archive_path)
703 elif response == 'b':
704 dest_file = backup_dir(archive_path)
705 logger.warning(
706 'Backing up %s to %s',
707 display_path(archive_path),
708 display_path(dest_file),
709 )
710 shutil.move(archive_path, dest_file)
711 elif response == 'a':
712 sys.exit(-1)
713
714 if not create_archive:
715 return
716
717 zip_output = zipfile.ZipFile(
718 archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
719 )
720 with zip_output:
721 dir = os.path.normcase(
722 os.path.abspath(self.unpacked_source_directory)
723 )
724 for dirpath, dirnames, filenames in os.walk(dir):
725 for dirname in dirnames:
726 dir_arcname = self._get_archive_name(
727 dirname, parentdir=dirpath, rootdir=dir,
728 )
729 zipdir = zipfile.ZipInfo(dir_arcname + '/')
730 zipdir.external_attr = 0x1ED << 16 # 0o755
731 zip_output.writestr(zipdir, '')
732 for filename in filenames:
733 file_arcname = self._get_archive_name(
734 filename, parentdir=dirpath, rootdir=dir,
735 )
736 filename = os.path.join(dirpath, filename)
737 zip_output.write(filename, file_arcname)
738
739 logger.info('Saved %s', display_path(archive_path))
740
741 def install(
742 self,
743 install_options, # type: List[str]
744 global_options=None, # type: Optional[Sequence[str]]
745 root=None, # type: Optional[str]
746 home=None, # type: Optional[str]
747 prefix=None, # type: Optional[str]
748 warn_script_location=True, # type: bool
749 use_user_site=False, # type: bool
750 pycompile=True # type: bool
751 ):
752 # type: (...) -> None
753 scheme = get_scheme(
754 self.name,
755 user=use_user_site,
756 home=home,
757 root=root,
758 isolated=self.isolated,
759 prefix=prefix,
760 )
761
762 global_options = global_options if global_options is not None else []
763 if self.editable:
764 install_editable_legacy(
765 install_options,
766 global_options,
767 prefix=prefix,
768 home=home,
769 use_user_site=use_user_site,
770 name=self.name,
771 setup_py_path=self.setup_py_path,
772 isolated=self.isolated,
773 build_env=self.build_env,
774 unpacked_source_directory=self.unpacked_source_directory,
775 )
776 self.install_succeeded = True
777 return
778
779 if self.is_wheel:
780 assert self.local_file_path
781 direct_url = None
782 if self.original_link:
783 direct_url = direct_url_from_link(
784 self.original_link,
785 self.source_dir,
786 self.original_link_is_in_wheel_cache,
787 )
788 install_wheel(
789 self.name,
790 self.local_file_path,
791 scheme=scheme,
792 req_description=str(self.req),
793 pycompile=pycompile,
794 warn_script_location=warn_script_location,
795 direct_url=direct_url,
796 requested=self.user_supplied,
797 )
798 self.install_succeeded = True
799 return
800
801 # TODO: Why don't we do this for editable installs?
802
803 # Extend the list of global and install options passed on to
804 # the setup.py call with the ones from the requirements file.
805 # Options specified in requirements file override those
806 # specified on the command line, since the last option given
807 # to setup.py is the one that is used.
808 global_options = list(global_options) + self.global_options
809 install_options = list(install_options) + self.install_options
810
811 try:
812 success = install_legacy(
813 install_options=install_options,
814 global_options=global_options,
815 root=root,
816 home=home,
817 prefix=prefix,
818 use_user_site=use_user_site,
819 pycompile=pycompile,
820 scheme=scheme,
821 setup_py_path=self.setup_py_path,
822 isolated=self.isolated,
823 req_name=self.name,
824 build_env=self.build_env,
825 unpacked_source_directory=self.unpacked_source_directory,
826 req_description=str(self.req),
827 )
828 except LegacyInstallFailure as exc:
829 self.install_succeeded = False
830 six.reraise(*exc.parent)
831 except Exception:
832 self.install_succeeded = True
833 raise
834
835 self.install_succeeded = success
836
837 if success and self.legacy_install_reason == 8368:
838 deprecated(
839 reason=(
840 "{} was installed using the legacy 'setup.py install' "
841 "method, because a wheel could not be built for it.".
842 format(self.name)
843 ),
844 replacement="to fix the wheel build issue reported above",
845 gone_in=None,
846 issue=8368,
847 )
848
849
850 def check_invalid_constraint_type(req):
851 # type: (InstallRequirement) -> str
852
853 # Check for unsupported forms
854 problem = ""
855 if not req.name:
856 problem = "Unnamed requirements are not allowed as constraints"
857 elif req.link:
858 problem = "Links are not allowed as constraints"
859 elif req.extras:
860 problem = "Constraints cannot have extras"
861
862 if problem:
863 deprecated(
864 reason=(
865 "Constraints are only allowed to take the form of a package "
866 "name and a version specifier. Other forms were originally "
867 "permitted as an accident of the implementation, but were "
868 "undocumented. The new implementation of the resolver no "
869 "longer supports these forms."
870 ),
871 replacement=(
872 "replacing the constraint with a requirement."
873 ),
874 # No plan yet for when the new resolver becomes default
875 gone_in=None,
876 issue=8210
877 )
878
879 return problem