Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/pip/_internal/wheel.py @ 1:56ad4e20f292 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author | guerler |
---|---|
date | Fri, 31 Jul 2020 00:32:28 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
0:d30785e31577 | 1:56ad4e20f292 |
---|---|
1 """ | |
2 Support for installing and building the "wheel" binary package format. | |
3 """ | |
4 from __future__ import absolute_import | |
5 | |
6 import collections | |
7 import compileall | |
8 import csv | |
9 import hashlib | |
10 import logging | |
11 import os.path | |
12 import re | |
13 import shutil | |
14 import stat | |
15 import sys | |
16 import warnings | |
17 from base64 import urlsafe_b64encode | |
18 from email.parser import Parser | |
19 | |
20 from pip._vendor import pkg_resources | |
21 from pip._vendor.distlib.scripts import ScriptMaker | |
22 from pip._vendor.packaging.utils import canonicalize_name | |
23 from pip._vendor.six import StringIO | |
24 | |
25 from pip._internal import pep425tags | |
26 from pip._internal.download import unpack_url | |
27 from pip._internal.exceptions import ( | |
28 InstallationError, InvalidWheelFilename, UnsupportedWheel, | |
29 ) | |
30 from pip._internal.locations import distutils_scheme | |
31 from pip._internal.models.link import Link | |
32 from pip._internal.utils.logging import indent_log | |
33 from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME | |
34 from pip._internal.utils.misc import ( | |
35 LOG_DIVIDER, call_subprocess, captured_stdout, ensure_dir, | |
36 format_command_args, path_to_url, read_chunks, | |
37 ) | |
38 from pip._internal.utils.setuptools_build import make_setuptools_shim_args | |
39 from pip._internal.utils.temp_dir import TempDirectory | |
40 from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
41 from pip._internal.utils.ui import open_spinner | |
42 | |
43 if MYPY_CHECK_RUNNING: | |
44 from typing import ( | |
45 Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, Iterable | |
46 ) | |
47 from pip._vendor.packaging.requirements import Requirement | |
48 from pip._internal.req.req_install import InstallRequirement | |
49 from pip._internal.download import PipSession | |
50 from pip._internal.index import FormatControl, PackageFinder | |
51 from pip._internal.operations.prepare import ( | |
52 RequirementPreparer | |
53 ) | |
54 from pip._internal.cache import WheelCache | |
55 from pip._internal.pep425tags import Pep425Tag | |
56 | |
57 InstalledCSVRow = Tuple[str, ...] | |
58 | |
59 | |
60 VERSION_COMPATIBLE = (1, 0) | |
61 | |
62 | |
63 logger = logging.getLogger(__name__) | |
64 | |
65 | |
66 def normpath(src, p): | |
67 return os.path.relpath(src, p).replace(os.path.sep, '/') | |
68 | |
69 | |
70 def hash_file(path, blocksize=1 << 20): | |
71 # type: (str, int) -> Tuple[Any, int] | |
72 """Return (hash, length) for path using hashlib.sha256()""" | |
73 h = hashlib.sha256() | |
74 length = 0 | |
75 with open(path, 'rb') as f: | |
76 for block in read_chunks(f, size=blocksize): | |
77 length += len(block) | |
78 h.update(block) | |
79 return (h, length) # type: ignore | |
80 | |
81 | |
82 def rehash(path, blocksize=1 << 20): | |
83 # type: (str, int) -> Tuple[str, str] | |
84 """Return (encoded_digest, length) for path using hashlib.sha256()""" | |
85 h, length = hash_file(path, blocksize) | |
86 digest = 'sha256=' + urlsafe_b64encode( | |
87 h.digest() | |
88 ).decode('latin1').rstrip('=') | |
89 # unicode/str python2 issues | |
90 return (digest, str(length)) # type: ignore | |
91 | |
92 | |
93 def open_for_csv(name, mode): | |
94 # type: (str, Text) -> IO | |
95 if sys.version_info[0] < 3: | |
96 nl = {} # type: Dict[str, Any] | |
97 bin = 'b' | |
98 else: | |
99 nl = {'newline': ''} # type: Dict[str, Any] | |
100 bin = '' | |
101 return open(name, mode + bin, **nl) | |
102 | |
103 | |
104 def replace_python_tag(wheelname, new_tag): | |
105 # type: (str, str) -> str | |
106 """Replace the Python tag in a wheel file name with a new value. | |
107 """ | |
108 parts = wheelname.split('-') | |
109 parts[-3] = new_tag | |
110 return '-'.join(parts) | |
111 | |
112 | |
113 def fix_script(path): | |
114 # type: (str) -> Optional[bool] | |
115 """Replace #!python with #!/path/to/python | |
116 Return True if file was changed.""" | |
117 # XXX RECORD hashes will need to be updated | |
118 if os.path.isfile(path): | |
119 with open(path, 'rb') as script: | |
120 firstline = script.readline() | |
121 if not firstline.startswith(b'#!python'): | |
122 return False | |
123 exename = sys.executable.encode(sys.getfilesystemencoding()) | |
124 firstline = b'#!' + exename + os.linesep.encode("ascii") | |
125 rest = script.read() | |
126 with open(path, 'wb') as script: | |
127 script.write(firstline) | |
128 script.write(rest) | |
129 return True | |
130 return None | |
131 | |
132 | |
133 dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) | |
134 \.dist-info$""", re.VERBOSE) | |
135 | |
136 | |
137 def root_is_purelib(name, wheeldir): | |
138 # type: (str, str) -> bool | |
139 """ | |
140 Return True if the extracted wheel in wheeldir should go into purelib. | |
141 """ | |
142 name_folded = name.replace("-", "_") | |
143 for item in os.listdir(wheeldir): | |
144 match = dist_info_re.match(item) | |
145 if match and match.group('name') == name_folded: | |
146 with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: | |
147 for line in wheel: | |
148 line = line.lower().rstrip() | |
149 if line == "root-is-purelib: true": | |
150 return True | |
151 return False | |
152 | |
153 | |
154 def get_entrypoints(filename): | |
155 # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] | |
156 if not os.path.exists(filename): | |
157 return {}, {} | |
158 | |
159 # This is done because you can pass a string to entry_points wrappers which | |
160 # means that they may or may not be valid INI files. The attempt here is to | |
161 # strip leading and trailing whitespace in order to make them valid INI | |
162 # files. | |
163 with open(filename) as fp: | |
164 data = StringIO() | |
165 for line in fp: | |
166 data.write(line.strip()) | |
167 data.write("\n") | |
168 data.seek(0) | |
169 | |
170 # get the entry points and then the script names | |
171 entry_points = pkg_resources.EntryPoint.parse_map(data) | |
172 console = entry_points.get('console_scripts', {}) | |
173 gui = entry_points.get('gui_scripts', {}) | |
174 | |
175 def _split_ep(s): | |
176 """get the string representation of EntryPoint, remove space and split | |
177 on '='""" | |
178 return str(s).replace(" ", "").split("=") | |
179 | |
180 # convert the EntryPoint objects into strings with module:function | |
181 console = dict(_split_ep(v) for v in console.values()) | |
182 gui = dict(_split_ep(v) for v in gui.values()) | |
183 return console, gui | |
184 | |
185 | |
186 def message_about_scripts_not_on_PATH(scripts): | |
187 # type: (Sequence[str]) -> Optional[str] | |
188 """Determine if any scripts are not on PATH and format a warning. | |
189 | |
190 Returns a warning message if one or more scripts are not on PATH, | |
191 otherwise None. | |
192 """ | |
193 if not scripts: | |
194 return None | |
195 | |
196 # Group scripts by the path they were installed in | |
197 grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] | |
198 for destfile in scripts: | |
199 parent_dir = os.path.dirname(destfile) | |
200 script_name = os.path.basename(destfile) | |
201 grouped_by_dir[parent_dir].add(script_name) | |
202 | |
203 # We don't want to warn for directories that are on PATH. | |
204 not_warn_dirs = [ | |
205 os.path.normcase(i).rstrip(os.sep) for i in | |
206 os.environ.get("PATH", "").split(os.pathsep) | |
207 ] | |
208 # If an executable sits with sys.executable, we don't warn for it. | |
209 # This covers the case of venv invocations without activating the venv. | |
210 not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) | |
211 warn_for = { | |
212 parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() | |
213 if os.path.normcase(parent_dir) not in not_warn_dirs | |
214 } | |
215 if not warn_for: | |
216 return None | |
217 | |
218 # Format a message | |
219 msg_lines = [] | |
220 for parent_dir, scripts in warn_for.items(): | |
221 sorted_scripts = sorted(scripts) # type: List[str] | |
222 if len(sorted_scripts) == 1: | |
223 start_text = "script {} is".format(sorted_scripts[0]) | |
224 else: | |
225 start_text = "scripts {} are".format( | |
226 ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] | |
227 ) | |
228 | |
229 msg_lines.append( | |
230 "The {} installed in '{}' which is not on PATH." | |
231 .format(start_text, parent_dir) | |
232 ) | |
233 | |
234 last_line_fmt = ( | |
235 "Consider adding {} to PATH or, if you prefer " | |
236 "to suppress this warning, use --no-warn-script-location." | |
237 ) | |
238 if len(msg_lines) == 1: | |
239 msg_lines.append(last_line_fmt.format("this directory")) | |
240 else: | |
241 msg_lines.append(last_line_fmt.format("these directories")) | |
242 | |
243 # Returns the formatted multiline message | |
244 return "\n".join(msg_lines) | |
245 | |
246 | |
247 def sorted_outrows(outrows): | |
248 # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] | |
249 """ | |
250 Return the given rows of a RECORD file in sorted order. | |
251 | |
252 Each row is a 3-tuple (path, hash, size) and corresponds to a record of | |
253 a RECORD file (see PEP 376 and PEP 427 for details). For the rows | |
254 passed to this function, the size can be an integer as an int or string, | |
255 or the empty string. | |
256 """ | |
257 # Normally, there should only be one row per path, in which case the | |
258 # second and third elements don't come into play when sorting. | |
259 # However, in cases in the wild where a path might happen to occur twice, | |
260 # we don't want the sort operation to trigger an error (but still want | |
261 # determinism). Since the third element can be an int or string, we | |
262 # coerce each element to a string to avoid a TypeError in this case. | |
263 # For additional background, see-- | |
264 # https://github.com/pypa/pip/issues/5868 | |
265 return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) | |
266 | |
267 | |
268 def get_csv_rows_for_installed( | |
269 old_csv_rows, # type: Iterable[List[str]] | |
270 installed, # type: Dict[str, str] | |
271 changed, # type: set | |
272 generated, # type: List[str] | |
273 lib_dir, # type: str | |
274 ): | |
275 # type: (...) -> List[InstalledCSVRow] | |
276 """ | |
277 :param installed: A map from archive RECORD path to installation RECORD | |
278 path. | |
279 """ | |
280 installed_rows = [] # type: List[InstalledCSVRow] | |
281 for row in old_csv_rows: | |
282 if len(row) > 3: | |
283 logger.warning( | |
284 'RECORD line has more than three elements: {}'.format(row) | |
285 ) | |
286 # Make a copy because we are mutating the row. | |
287 row = list(row) | |
288 old_path = row[0] | |
289 new_path = installed.pop(old_path, old_path) | |
290 row[0] = new_path | |
291 if new_path in changed: | |
292 digest, length = rehash(new_path) | |
293 row[1] = digest | |
294 row[2] = length | |
295 installed_rows.append(tuple(row)) | |
296 for f in generated: | |
297 digest, length = rehash(f) | |
298 installed_rows.append((normpath(f, lib_dir), digest, str(length))) | |
299 for f in installed: | |
300 installed_rows.append((installed[f], '', '')) | |
301 return installed_rows | |
302 | |
303 | |
304 def move_wheel_files( | |
305 name, # type: str | |
306 req, # type: Requirement | |
307 wheeldir, # type: str | |
308 user=False, # type: bool | |
309 home=None, # type: Optional[str] | |
310 root=None, # type: Optional[str] | |
311 pycompile=True, # type: bool | |
312 scheme=None, # type: Optional[Mapping[str, str]] | |
313 isolated=False, # type: bool | |
314 prefix=None, # type: Optional[str] | |
315 warn_script_location=True # type: bool | |
316 ): | |
317 # type: (...) -> None | |
318 """Install a wheel""" | |
319 # TODO: Investigate and break this up. | |
320 # TODO: Look into moving this into a dedicated class for representing an | |
321 # installation. | |
322 | |
323 if not scheme: | |
324 scheme = distutils_scheme( | |
325 name, user=user, home=home, root=root, isolated=isolated, | |
326 prefix=prefix, | |
327 ) | |
328 | |
329 if root_is_purelib(name, wheeldir): | |
330 lib_dir = scheme['purelib'] | |
331 else: | |
332 lib_dir = scheme['platlib'] | |
333 | |
334 info_dir = [] # type: List[str] | |
335 data_dirs = [] | |
336 source = wheeldir.rstrip(os.path.sep) + os.path.sep | |
337 | |
338 # Record details of the files moved | |
339 # installed = files copied from the wheel to the destination | |
340 # changed = files changed while installing (scripts #! line typically) | |
341 # generated = files newly generated during the install (script wrappers) | |
342 installed = {} # type: Dict[str, str] | |
343 changed = set() | |
344 generated = [] # type: List[str] | |
345 | |
346 # Compile all of the pyc files that we're going to be installing | |
347 if pycompile: | |
348 with captured_stdout() as stdout: | |
349 with warnings.catch_warnings(): | |
350 warnings.filterwarnings('ignore') | |
351 compileall.compile_dir(source, force=True, quiet=True) | |
352 logger.debug(stdout.getvalue()) | |
353 | |
354 def record_installed(srcfile, destfile, modified=False): | |
355 """Map archive RECORD paths to installation RECORD paths.""" | |
356 oldpath = normpath(srcfile, wheeldir) | |
357 newpath = normpath(destfile, lib_dir) | |
358 installed[oldpath] = newpath | |
359 if modified: | |
360 changed.add(destfile) | |
361 | |
362 def clobber(source, dest, is_base, fixer=None, filter=None): | |
363 ensure_dir(dest) # common for the 'include' path | |
364 | |
365 for dir, subdirs, files in os.walk(source): | |
366 basedir = dir[len(source):].lstrip(os.path.sep) | |
367 destdir = os.path.join(dest, basedir) | |
368 if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): | |
369 continue | |
370 for s in subdirs: | |
371 destsubdir = os.path.join(dest, basedir, s) | |
372 if is_base and basedir == '' and destsubdir.endswith('.data'): | |
373 data_dirs.append(s) | |
374 continue | |
375 elif (is_base and | |
376 s.endswith('.dist-info') and | |
377 canonicalize_name(s).startswith( | |
378 canonicalize_name(req.name))): | |
379 assert not info_dir, ('Multiple .dist-info directories: ' + | |
380 destsubdir + ', ' + | |
381 ', '.join(info_dir)) | |
382 info_dir.append(destsubdir) | |
383 for f in files: | |
384 # Skip unwanted files | |
385 if filter and filter(f): | |
386 continue | |
387 srcfile = os.path.join(dir, f) | |
388 destfile = os.path.join(dest, basedir, f) | |
389 # directory creation is lazy and after the file filtering above | |
390 # to ensure we don't install empty dirs; empty dirs can't be | |
391 # uninstalled. | |
392 ensure_dir(destdir) | |
393 | |
394 # copyfile (called below) truncates the destination if it | |
395 # exists and then writes the new contents. This is fine in most | |
396 # cases, but can cause a segfault if pip has loaded a shared | |
397 # object (e.g. from pyopenssl through its vendored urllib3) | |
398 # Since the shared object is mmap'd an attempt to call a | |
399 # symbol in it will then cause a segfault. Unlinking the file | |
400 # allows writing of new contents while allowing the process to | |
401 # continue to use the old copy. | |
402 if os.path.exists(destfile): | |
403 os.unlink(destfile) | |
404 | |
405 # We use copyfile (not move, copy, or copy2) to be extra sure | |
406 # that we are not moving directories over (copyfile fails for | |
407 # directories) as well as to ensure that we are not copying | |
408 # over any metadata because we want more control over what | |
409 # metadata we actually copy over. | |
410 shutil.copyfile(srcfile, destfile) | |
411 | |
412 # Copy over the metadata for the file, currently this only | |
413 # includes the atime and mtime. | |
414 st = os.stat(srcfile) | |
415 if hasattr(os, "utime"): | |
416 os.utime(destfile, (st.st_atime, st.st_mtime)) | |
417 | |
418 # If our file is executable, then make our destination file | |
419 # executable. | |
420 if os.access(srcfile, os.X_OK): | |
421 st = os.stat(srcfile) | |
422 permissions = ( | |
423 st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | |
424 ) | |
425 os.chmod(destfile, permissions) | |
426 | |
427 changed = False | |
428 if fixer: | |
429 changed = fixer(destfile) | |
430 record_installed(srcfile, destfile, changed) | |
431 | |
432 clobber(source, lib_dir, True) | |
433 | |
434 assert info_dir, "%s .dist-info directory not found" % req | |
435 | |
436 # Get the defined entry points | |
437 ep_file = os.path.join(info_dir[0], 'entry_points.txt') | |
438 console, gui = get_entrypoints(ep_file) | |
439 | |
440 def is_entrypoint_wrapper(name): | |
441 # EP, EP.exe and EP-script.py are scripts generated for | |
442 # entry point EP by setuptools | |
443 if name.lower().endswith('.exe'): | |
444 matchname = name[:-4] | |
445 elif name.lower().endswith('-script.py'): | |
446 matchname = name[:-10] | |
447 elif name.lower().endswith(".pya"): | |
448 matchname = name[:-4] | |
449 else: | |
450 matchname = name | |
451 # Ignore setuptools-generated scripts | |
452 return (matchname in console or matchname in gui) | |
453 | |
454 for datadir in data_dirs: | |
455 fixer = None | |
456 filter = None | |
457 for subdir in os.listdir(os.path.join(wheeldir, datadir)): | |
458 fixer = None | |
459 if subdir == 'scripts': | |
460 fixer = fix_script | |
461 filter = is_entrypoint_wrapper | |
462 source = os.path.join(wheeldir, datadir, subdir) | |
463 dest = scheme[subdir] | |
464 clobber(source, dest, False, fixer=fixer, filter=filter) | |
465 | |
466 maker = ScriptMaker(None, scheme['scripts']) | |
467 | |
468 # Ensure old scripts are overwritten. | |
469 # See https://github.com/pypa/pip/issues/1800 | |
470 maker.clobber = True | |
471 | |
472 # Ensure we don't generate any variants for scripts because this is almost | |
473 # never what somebody wants. | |
474 # See https://bitbucket.org/pypa/distlib/issue/35/ | |
475 maker.variants = {''} | |
476 | |
477 # This is required because otherwise distlib creates scripts that are not | |
478 # executable. | |
479 # See https://bitbucket.org/pypa/distlib/issue/32/ | |
480 maker.set_mode = True | |
481 | |
482 # Simplify the script and fix the fact that the default script swallows | |
483 # every single stack trace. | |
484 # See https://bitbucket.org/pypa/distlib/issue/34/ | |
485 # See https://bitbucket.org/pypa/distlib/issue/33/ | |
486 def _get_script_text(entry): | |
487 if entry.suffix is None: | |
488 raise InstallationError( | |
489 "Invalid script entry point: %s for req: %s - A callable " | |
490 "suffix is required. Cf https://packaging.python.org/en/" | |
491 "latest/distributing.html#console-scripts for more " | |
492 "information." % (entry, req) | |
493 ) | |
494 return maker.script_template % { | |
495 "module": entry.prefix, | |
496 "import_name": entry.suffix.split(".")[0], | |
497 "func": entry.suffix, | |
498 } | |
499 # ignore type, because mypy disallows assigning to a method, | |
500 # see https://github.com/python/mypy/issues/2427 | |
501 maker._get_script_text = _get_script_text # type: ignore | |
502 maker.script_template = r"""# -*- coding: utf-8 -*- | |
503 import re | |
504 import sys | |
505 | |
506 from %(module)s import %(import_name)s | |
507 | |
508 if __name__ == '__main__': | |
509 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |
510 sys.exit(%(func)s()) | |
511 """ | |
512 | |
513 # Special case pip and setuptools to generate versioned wrappers | |
514 # | |
515 # The issue is that some projects (specifically, pip and setuptools) use | |
516 # code in setup.py to create "versioned" entry points - pip2.7 on Python | |
517 # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into | |
518 # the wheel metadata at build time, and so if the wheel is installed with | |
519 # a *different* version of Python the entry points will be wrong. The | |
520 # correct fix for this is to enhance the metadata to be able to describe | |
521 # such versioned entry points, but that won't happen till Metadata 2.0 is | |
522 # available. | |
523 # In the meantime, projects using versioned entry points will either have | |
524 # incorrect versioned entry points, or they will not be able to distribute | |
525 # "universal" wheels (i.e., they will need a wheel per Python version). | |
526 # | |
527 # Because setuptools and pip are bundled with _ensurepip and virtualenv, | |
528 # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we | |
529 # override the versioned entry points in the wheel and generate the | |
530 # correct ones. This code is purely a short-term measure until Metadata 2.0 | |
531 # is available. | |
532 # | |
533 # To add the level of hack in this section of code, in order to support | |
534 # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment | |
535 # variable which will control which version scripts get installed. | |
536 # | |
537 # ENSUREPIP_OPTIONS=altinstall | |
538 # - Only pipX.Y and easy_install-X.Y will be generated and installed | |
539 # ENSUREPIP_OPTIONS=install | |
540 # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note | |
541 # that this option is technically if ENSUREPIP_OPTIONS is set and is | |
542 # not altinstall | |
543 # DEFAULT | |
544 # - The default behavior is to install pip, pipX, pipX.Y, easy_install | |
545 # and easy_install-X.Y. | |
546 pip_script = console.pop('pip', None) | |
547 if pip_script: | |
548 if "ENSUREPIP_OPTIONS" not in os.environ: | |
549 spec = 'pip = ' + pip_script | |
550 generated.extend(maker.make(spec)) | |
551 | |
552 if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": | |
553 spec = 'pip%s = %s' % (sys.version[:1], pip_script) | |
554 generated.extend(maker.make(spec)) | |
555 | |
556 spec = 'pip%s = %s' % (sys.version[:3], pip_script) | |
557 generated.extend(maker.make(spec)) | |
558 # Delete any other versioned pip entry points | |
559 pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] | |
560 for k in pip_ep: | |
561 del console[k] | |
562 easy_install_script = console.pop('easy_install', None) | |
563 if easy_install_script: | |
564 if "ENSUREPIP_OPTIONS" not in os.environ: | |
565 spec = 'easy_install = ' + easy_install_script | |
566 generated.extend(maker.make(spec)) | |
567 | |
568 spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) | |
569 generated.extend(maker.make(spec)) | |
570 # Delete any other versioned easy_install entry points | |
571 easy_install_ep = [ | |
572 k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) | |
573 ] | |
574 for k in easy_install_ep: | |
575 del console[k] | |
576 | |
577 # Generate the console and GUI entry points specified in the wheel | |
578 if len(console) > 0: | |
579 generated_console_scripts = maker.make_multiple( | |
580 ['%s = %s' % kv for kv in console.items()] | |
581 ) | |
582 generated.extend(generated_console_scripts) | |
583 | |
584 if warn_script_location: | |
585 msg = message_about_scripts_not_on_PATH(generated_console_scripts) | |
586 if msg is not None: | |
587 logger.warning(msg) | |
588 | |
589 if len(gui) > 0: | |
590 generated.extend( | |
591 maker.make_multiple( | |
592 ['%s = %s' % kv for kv in gui.items()], | |
593 {'gui': True} | |
594 ) | |
595 ) | |
596 | |
597 # Record pip as the installer | |
598 installer = os.path.join(info_dir[0], 'INSTALLER') | |
599 temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') | |
600 with open(temp_installer, 'wb') as installer_file: | |
601 installer_file.write(b'pip\n') | |
602 shutil.move(temp_installer, installer) | |
603 generated.append(installer) | |
604 | |
605 # Record details of all files installed | |
606 record = os.path.join(info_dir[0], 'RECORD') | |
607 temp_record = os.path.join(info_dir[0], 'RECORD.pip') | |
608 with open_for_csv(record, 'r') as record_in: | |
609 with open_for_csv(temp_record, 'w+') as record_out: | |
610 reader = csv.reader(record_in) | |
611 outrows = get_csv_rows_for_installed( | |
612 reader, installed=installed, changed=changed, | |
613 generated=generated, lib_dir=lib_dir, | |
614 ) | |
615 writer = csv.writer(record_out) | |
616 # Sort to simplify testing. | |
617 for row in sorted_outrows(outrows): | |
618 writer.writerow(row) | |
619 shutil.move(temp_record, record) | |
620 | |
621 | |
622 def wheel_version(source_dir): | |
623 # type: (Optional[str]) -> Optional[Tuple[int, ...]] | |
624 """ | |
625 Return the Wheel-Version of an extracted wheel, if possible. | |
626 | |
627 Otherwise, return None if we couldn't parse / extract it. | |
628 """ | |
629 try: | |
630 dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] | |
631 | |
632 wheel_data = dist.get_metadata('WHEEL') | |
633 wheel_data = Parser().parsestr(wheel_data) | |
634 | |
635 version = wheel_data['Wheel-Version'].strip() | |
636 version = tuple(map(int, version.split('.'))) | |
637 return version | |
638 except Exception: | |
639 return None | |
640 | |
641 | |
642 def check_compatibility(version, name): | |
643 # type: (Optional[Tuple[int, ...]], str) -> None | |
644 """ | |
645 Raises errors or warns if called with an incompatible Wheel-Version. | |
646 | |
647 Pip should refuse to install a Wheel-Version that's a major series | |
648 ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when | |
649 installing a version only minor version ahead (e.g 1.2 > 1.1). | |
650 | |
651 version: a 2-tuple representing a Wheel-Version (Major, Minor) | |
652 name: name of wheel or package to raise exception about | |
653 | |
654 :raises UnsupportedWheel: when an incompatible Wheel-Version is given | |
655 """ | |
656 if not version: | |
657 raise UnsupportedWheel( | |
658 "%s is in an unsupported or invalid wheel" % name | |
659 ) | |
660 if version[0] > VERSION_COMPATIBLE[0]: | |
661 raise UnsupportedWheel( | |
662 "%s's Wheel-Version (%s) is not compatible with this version " | |
663 "of pip" % (name, '.'.join(map(str, version))) | |
664 ) | |
665 elif version > VERSION_COMPATIBLE: | |
666 logger.warning( | |
667 'Installing from a newer Wheel-Version (%s)', | |
668 '.'.join(map(str, version)), | |
669 ) | |
670 | |
671 | |
672 def format_tag(file_tag): | |
673 # type: (Tuple[str, ...]) -> str | |
674 """ | |
675 Format three tags in the form "<python_tag>-<abi_tag>-<platform_tag>". | |
676 | |
677 :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag). | |
678 """ | |
679 return '-'.join(file_tag) | |
680 | |
681 | |
682 class Wheel(object): | |
683 """A wheel file""" | |
684 | |
685 # TODO: Maybe move the class into the models sub-package | |
686 # TODO: Maybe move the install code into this class | |
687 | |
688 wheel_file_re = re.compile( | |
689 r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) | |
690 ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) | |
691 \.whl|\.dist-info)$""", | |
692 re.VERBOSE | |
693 ) | |
694 | |
695 def __init__(self, filename): | |
696 # type: (str) -> None | |
697 """ | |
698 :raises InvalidWheelFilename: when the filename is invalid for a wheel | |
699 """ | |
700 wheel_info = self.wheel_file_re.match(filename) | |
701 if not wheel_info: | |
702 raise InvalidWheelFilename( | |
703 "%s is not a valid wheel filename." % filename | |
704 ) | |
705 self.filename = filename | |
706 self.name = wheel_info.group('name').replace('_', '-') | |
707 # we'll assume "_" means "-" due to wheel naming scheme | |
708 # (https://github.com/pypa/pip/issues/1150) | |
709 self.version = wheel_info.group('ver').replace('_', '-') | |
710 self.build_tag = wheel_info.group('build') | |
711 self.pyversions = wheel_info.group('pyver').split('.') | |
712 self.abis = wheel_info.group('abi').split('.') | |
713 self.plats = wheel_info.group('plat').split('.') | |
714 | |
715 # All the tag combinations from this file | |
716 self.file_tags = { | |
717 (x, y, z) for x in self.pyversions | |
718 for y in self.abis for z in self.plats | |
719 } | |
720 | |
721 def get_formatted_file_tags(self): | |
722 # type: () -> List[str] | |
723 """ | |
724 Return the wheel's tags as a sorted list of strings. | |
725 """ | |
726 return sorted(format_tag(tag) for tag in self.file_tags) | |
727 | |
728 def support_index_min(self, tags=None): | |
729 # type: (Optional[List[Pep425Tag]]) -> Optional[int] | |
730 """ | |
731 Return the lowest index that one of the wheel's file_tag combinations | |
732 achieves in the supported_tags list e.g. if there are 8 supported tags, | |
733 and one of the file tags is first in the list, then return 0. Returns | |
734 None is the wheel is not supported. | |
735 """ | |
736 if tags is None: # for mock | |
737 tags = pep425tags.get_supported() | |
738 indexes = [tags.index(c) for c in self.file_tags if c in tags] | |
739 return min(indexes) if indexes else None | |
740 | |
741 def supported(self, tags=None): | |
742 # type: (Optional[List[Pep425Tag]]) -> bool | |
743 """Is this wheel supported on this system?""" | |
744 if tags is None: # for mock | |
745 tags = pep425tags.get_supported() | |
746 return bool(set(tags).intersection(self.file_tags)) | |
747 | |
748 | |
749 def _contains_egg_info( | |
750 s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): | |
751 """Determine whether the string looks like an egg_info. | |
752 | |
753 :param s: The string to parse. E.g. foo-2.1 | |
754 """ | |
755 return bool(_egg_info_re.search(s)) | |
756 | |
757 | |
758 def should_use_ephemeral_cache( | |
759 req, # type: InstallRequirement | |
760 format_control, # type: FormatControl | |
761 autobuilding, # type: bool | |
762 cache_available # type: bool | |
763 ): | |
764 # type: (...) -> Optional[bool] | |
765 """ | |
766 Return whether to build an InstallRequirement object using the | |
767 ephemeral cache. | |
768 | |
769 :param cache_available: whether a cache directory is available for the | |
770 autobuilding=True case. | |
771 | |
772 :return: True or False to build the requirement with ephem_cache=True | |
773 or False, respectively; or None not to build the requirement. | |
774 """ | |
775 if req.constraint: | |
776 return None | |
777 if req.is_wheel: | |
778 if not autobuilding: | |
779 logger.info( | |
780 'Skipping %s, due to already being wheel.', req.name, | |
781 ) | |
782 return None | |
783 if not autobuilding: | |
784 return False | |
785 | |
786 if req.editable or not req.source_dir: | |
787 return None | |
788 | |
789 if "binary" not in format_control.get_allowed_formats( | |
790 canonicalize_name(req.name)): | |
791 logger.info( | |
792 "Skipping bdist_wheel for %s, due to binaries " | |
793 "being disabled for it.", req.name, | |
794 ) | |
795 return None | |
796 | |
797 if req.link and not req.link.is_artifact: | |
798 # VCS checkout. Build wheel just for this run. | |
799 return True | |
800 | |
801 link = req.link | |
802 base, ext = link.splitext() | |
803 if cache_available and _contains_egg_info(base): | |
804 return False | |
805 | |
806 # Otherwise, build the wheel just for this run using the ephemeral | |
807 # cache since we are either in the case of e.g. a local directory, or | |
808 # no cache directory is available to use. | |
809 return True | |
810 | |
811 | |
812 def format_command_result( | |
813 command_args, # type: List[str] | |
814 command_output, # type: str | |
815 ): | |
816 # type: (...) -> str | |
817 """ | |
818 Format command information for logging. | |
819 """ | |
820 command_desc = format_command_args(command_args) | |
821 text = 'Command arguments: {}\n'.format(command_desc) | |
822 | |
823 if not command_output: | |
824 text += 'Command output: None' | |
825 elif logger.getEffectiveLevel() > logging.DEBUG: | |
826 text += 'Command output: [use --verbose to show]' | |
827 else: | |
828 if not command_output.endswith('\n'): | |
829 command_output += '\n' | |
830 text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) | |
831 | |
832 return text | |
833 | |
834 | |
835 def get_legacy_build_wheel_path( | |
836 names, # type: List[str] | |
837 temp_dir, # type: str | |
838 req, # type: InstallRequirement | |
839 command_args, # type: List[str] | |
840 command_output, # type: str | |
841 ): | |
842 # type: (...) -> Optional[str] | |
843 """ | |
844 Return the path to the wheel in the temporary build directory. | |
845 """ | |
846 # Sort for determinism. | |
847 names = sorted(names) | |
848 if not names: | |
849 msg = ( | |
850 'Legacy build of wheel for {!r} created no files.\n' | |
851 ).format(req.name) | |
852 msg += format_command_result(command_args, command_output) | |
853 logger.warning(msg) | |
854 return None | |
855 | |
856 if len(names) > 1: | |
857 msg = ( | |
858 'Legacy build of wheel for {!r} created more than one file.\n' | |
859 'Filenames (choosing first): {}\n' | |
860 ).format(req.name, names) | |
861 msg += format_command_result(command_args, command_output) | |
862 logger.warning(msg) | |
863 | |
864 return os.path.join(temp_dir, names[0]) | |
865 | |
866 | |
867 class WheelBuilder(object): | |
868 """Build wheels from a RequirementSet.""" | |
869 | |
870 def __init__( | |
871 self, | |
872 finder, # type: PackageFinder | |
873 preparer, # type: RequirementPreparer | |
874 wheel_cache, # type: WheelCache | |
875 build_options=None, # type: Optional[List[str]] | |
876 global_options=None, # type: Optional[List[str]] | |
877 no_clean=False # type: bool | |
878 ): | |
879 # type: (...) -> None | |
880 self.finder = finder | |
881 self.preparer = preparer | |
882 self.wheel_cache = wheel_cache | |
883 | |
884 self._wheel_dir = preparer.wheel_download_dir | |
885 | |
886 self.build_options = build_options or [] | |
887 self.global_options = global_options or [] | |
888 self.no_clean = no_clean | |
889 | |
890 def _build_one(self, req, output_dir, python_tag=None): | |
891 """Build one wheel. | |
892 | |
893 :return: The filename of the built wheel, or None if the build failed. | |
894 """ | |
895 # Install build deps into temporary directory (PEP 518) | |
896 with req.build_env: | |
897 return self._build_one_inside_env(req, output_dir, | |
898 python_tag=python_tag) | |
899 | |
900 def _build_one_inside_env(self, req, output_dir, python_tag=None): | |
901 with TempDirectory(kind="wheel") as temp_dir: | |
902 if req.use_pep517: | |
903 builder = self._build_one_pep517 | |
904 else: | |
905 builder = self._build_one_legacy | |
906 wheel_path = builder(req, temp_dir.path, python_tag=python_tag) | |
907 if wheel_path is not None: | |
908 wheel_name = os.path.basename(wheel_path) | |
909 dest_path = os.path.join(output_dir, wheel_name) | |
910 try: | |
911 wheel_hash, length = hash_file(wheel_path) | |
912 shutil.move(wheel_path, dest_path) | |
913 logger.info('Created wheel for %s: ' | |
914 'filename=%s size=%d sha256=%s', | |
915 req.name, wheel_name, length, | |
916 wheel_hash.hexdigest()) | |
917 logger.info('Stored in directory: %s', output_dir) | |
918 return dest_path | |
919 except Exception: | |
920 pass | |
921 # Ignore return, we can't do anything else useful. | |
922 self._clean_one(req) | |
923 return None | |
924 | |
925 def _base_setup_args(self, req): | |
926 # NOTE: Eventually, we'd want to also -S to the flags here, when we're | |
927 # isolating. Currently, it breaks Python in virtualenvs, because it | |
928 # relies on site.py to find parts of the standard library outside the | |
929 # virtualenv. | |
930 base_cmd = make_setuptools_shim_args(req.setup_py_path, | |
931 unbuffered_output=True) | |
932 return base_cmd + list(self.global_options) | |
933 | |
934 def _build_one_pep517(self, req, tempd, python_tag=None): | |
935 """Build one InstallRequirement using the PEP 517 build process. | |
936 | |
937 Returns path to wheel if successfully built. Otherwise, returns None. | |
938 """ | |
939 assert req.metadata_directory is not None | |
940 if self.build_options: | |
941 # PEP 517 does not support --build-options | |
942 logger.error('Cannot build wheel for %s using PEP 517 when ' | |
943 '--build-options is present' % (req.name,)) | |
944 return None | |
945 try: | |
946 req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,) | |
947 logger.debug('Destination directory: %s', tempd) | |
948 wheel_name = req.pep517_backend.build_wheel( | |
949 tempd, | |
950 metadata_directory=req.metadata_directory | |
951 ) | |
952 if python_tag: | |
953 # General PEP 517 backends don't necessarily support | |
954 # a "--python-tag" option, so we rename the wheel | |
955 # file directly. | |
956 new_name = replace_python_tag(wheel_name, python_tag) | |
957 os.rename( | |
958 os.path.join(tempd, wheel_name), | |
959 os.path.join(tempd, new_name) | |
960 ) | |
961 # Reassign to simplify the return at the end of function | |
962 wheel_name = new_name | |
963 except Exception: | |
964 logger.error('Failed building wheel for %s', req.name) | |
965 return None | |
966 return os.path.join(tempd, wheel_name) | |
967 | |
968 def _build_one_legacy(self, req, tempd, python_tag=None): | |
969 """Build one InstallRequirement using the "legacy" build process. | |
970 | |
971 Returns path to wheel if successfully built. Otherwise, returns None. | |
972 """ | |
973 base_args = self._base_setup_args(req) | |
974 | |
975 spin_message = 'Building wheel for %s (setup.py)' % (req.name,) | |
976 with open_spinner(spin_message) as spinner: | |
977 logger.debug('Destination directory: %s', tempd) | |
978 wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ | |
979 + self.build_options | |
980 | |
981 if python_tag is not None: | |
982 wheel_args += ["--python-tag", python_tag] | |
983 | |
984 try: | |
985 output = call_subprocess(wheel_args, cwd=req.setup_py_dir, | |
986 spinner=spinner) | |
987 except Exception: | |
988 spinner.finish("error") | |
989 logger.error('Failed building wheel for %s', req.name) | |
990 return None | |
991 names = os.listdir(tempd) | |
992 wheel_path = get_legacy_build_wheel_path( | |
993 names=names, | |
994 temp_dir=tempd, | |
995 req=req, | |
996 command_args=wheel_args, | |
997 command_output=output, | |
998 ) | |
999 return wheel_path | |
1000 | |
1001 def _clean_one(self, req): | |
1002 base_args = self._base_setup_args(req) | |
1003 | |
1004 logger.info('Running setup.py clean for %s', req.name) | |
1005 clean_args = base_args + ['clean', '--all'] | |
1006 try: | |
1007 call_subprocess(clean_args, cwd=req.source_dir) | |
1008 return True | |
1009 except Exception: | |
1010 logger.error('Failed cleaning build dir for %s', req.name) | |
1011 return False | |
1012 | |
1013 def build( | |
1014 self, | |
1015 requirements, # type: Iterable[InstallRequirement] | |
1016 session, # type: PipSession | |
1017 autobuilding=False # type: bool | |
1018 ): | |
1019 # type: (...) -> List[InstallRequirement] | |
1020 """Build wheels. | |
1021 | |
1022 :param unpack: If True, replace the sdist we built from with the | |
1023 newly built wheel, in preparation for installation. | |
1024 :return: True if all the wheels built correctly. | |
1025 """ | |
1026 buildset = [] | |
1027 format_control = self.finder.format_control | |
1028 # Whether a cache directory is available for autobuilding=True. | |
1029 cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) | |
1030 | |
1031 for req in requirements: | |
1032 ephem_cache = should_use_ephemeral_cache( | |
1033 req, format_control=format_control, autobuilding=autobuilding, | |
1034 cache_available=cache_available, | |
1035 ) | |
1036 if ephem_cache is None: | |
1037 continue | |
1038 | |
1039 buildset.append((req, ephem_cache)) | |
1040 | |
1041 if not buildset: | |
1042 return [] | |
1043 | |
1044 # Is any wheel build not using the ephemeral cache? | |
1045 if any(not ephem_cache for _, ephem_cache in buildset): | |
1046 have_directory_for_build = self._wheel_dir or ( | |
1047 autobuilding and self.wheel_cache.cache_dir | |
1048 ) | |
1049 assert have_directory_for_build | |
1050 | |
1051 # TODO by @pradyunsg | |
1052 # Should break up this method into 2 separate methods. | |
1053 | |
1054 # Build the wheels. | |
1055 logger.info( | |
1056 'Building wheels for collected packages: %s', | |
1057 ', '.join([req.name for (req, _) in buildset]), | |
1058 ) | |
1059 _cache = self.wheel_cache # shorter name | |
1060 with indent_log(): | |
1061 build_success, build_failure = [], [] | |
1062 for req, ephem in buildset: | |
1063 python_tag = None | |
1064 if autobuilding: | |
1065 python_tag = pep425tags.implementation_tag | |
1066 if ephem: | |
1067 output_dir = _cache.get_ephem_path_for_link(req.link) | |
1068 else: | |
1069 output_dir = _cache.get_path_for_link(req.link) | |
1070 try: | |
1071 ensure_dir(output_dir) | |
1072 except OSError as e: | |
1073 logger.warning("Building wheel for %s failed: %s", | |
1074 req.name, e) | |
1075 build_failure.append(req) | |
1076 continue | |
1077 else: | |
1078 output_dir = self._wheel_dir | |
1079 wheel_file = self._build_one( | |
1080 req, output_dir, | |
1081 python_tag=python_tag, | |
1082 ) | |
1083 if wheel_file: | |
1084 build_success.append(req) | |
1085 if autobuilding: | |
1086 # XXX: This is mildly duplicative with prepare_files, | |
1087 # but not close enough to pull out to a single common | |
1088 # method. | |
1089 # The code below assumes temporary source dirs - | |
1090 # prevent it doing bad things. | |
1091 if req.source_dir and not os.path.exists(os.path.join( | |
1092 req.source_dir, PIP_DELETE_MARKER_FILENAME)): | |
1093 raise AssertionError( | |
1094 "bad source dir - missing marker") | |
1095 # Delete the source we built the wheel from | |
1096 req.remove_temporary_source() | |
1097 # set the build directory again - name is known from | |
1098 # the work prepare_files did. | |
1099 req.source_dir = req.build_location( | |
1100 self.preparer.build_dir | |
1101 ) | |
1102 # Update the link for this. | |
1103 req.link = Link(path_to_url(wheel_file)) | |
1104 assert req.link.is_wheel | |
1105 # extract the wheel into the dir | |
1106 unpack_url( | |
1107 req.link, req.source_dir, None, False, | |
1108 session=session, | |
1109 ) | |
1110 else: | |
1111 build_failure.append(req) | |
1112 | |
1113 # notify success/failure | |
1114 if build_success: | |
1115 logger.info( | |
1116 'Successfully built %s', | |
1117 ' '.join([req.name for req in build_success]), | |
1118 ) | |
1119 if build_failure: | |
1120 logger.info( | |
1121 'Failed to build %s', | |
1122 ' '.join([req.name for req in build_failure]), | |
1123 ) | |
1124 # Return a list of requirements that failed to build | |
1125 return build_failure |