Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/pkg_resources/__init__.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 """ | |
2 Package resource API | |
3 -------------------- | |
4 | |
5 A resource is a logical file contained within a package, or a logical | |
6 subdirectory thereof. The package resource API expects resource names | |
7 to have their path parts separated with ``/``, *not* whatever the local | |
8 path separator is. Do not use os.path operations to manipulate resource | |
9 names being passed into the API. | |
10 | |
11 The package resource API is designed to work with normal filesystem packages, | |
12 .egg files, and unpacked .egg files. It can also work in a limited way with | |
13 .zip files and with custom PEP 302 loaders that support the ``get_data()`` | |
14 method. | |
15 """ | |
16 | |
17 import sys | |
18 import os | |
19 import io | |
20 import time | |
21 import re | |
22 import types | |
23 import zipfile | |
24 import zipimport | |
25 import warnings | |
26 import stat | |
27 import functools | |
28 import pkgutil | |
29 import operator | |
30 import platform | |
31 import collections | |
32 import plistlib | |
33 import email.parser | |
34 import errno | |
35 import tempfile | |
36 import textwrap | |
37 import itertools | |
38 import inspect | |
39 import ntpath | |
40 import posixpath | |
41 import importlib | |
42 from pkgutil import get_importer | |
43 | |
44 try: | |
45 import _imp | |
46 except ImportError: | |
47 # Python 3.2 compatibility | |
48 import imp as _imp | |
49 | |
50 try: | |
51 FileExistsError | |
52 except NameError: | |
53 FileExistsError = OSError | |
54 | |
55 # capture these to bypass sandboxing | |
56 from os import utime | |
57 try: | |
58 from os import mkdir, rename, unlink | |
59 WRITE_SUPPORT = True | |
60 except ImportError: | |
61 # no write support, probably under GAE | |
62 WRITE_SUPPORT = False | |
63 | |
64 from os import open as os_open | |
65 from os.path import isdir, split | |
66 | |
67 try: | |
68 import importlib.machinery as importlib_machinery | |
69 # access attribute to force import under delayed import mechanisms. | |
70 importlib_machinery.__name__ | |
71 except ImportError: | |
72 importlib_machinery = None | |
73 | |
74 from pkg_resources.extern import appdirs | |
75 from pkg_resources.extern import packaging | |
76 __import__('pkg_resources.extern.packaging.version') | |
77 __import__('pkg_resources.extern.packaging.specifiers') | |
78 __import__('pkg_resources.extern.packaging.requirements') | |
79 __import__('pkg_resources.extern.packaging.markers') | |
80 | |
81 if sys.version_info < (3, 5): | |
82 raise RuntimeError("Python 3.5 or later is required") | |
83 | |
84 # declare some globals that will be defined later to | |
85 # satisfy the linters. | |
86 require = None | |
87 working_set = None | |
88 add_activation_listener = None | |
89 resources_stream = None | |
90 cleanup_resources = None | |
91 resource_dir = None | |
92 resource_stream = None | |
93 set_extraction_path = None | |
94 resource_isdir = None | |
95 resource_string = None | |
96 iter_entry_points = None | |
97 resource_listdir = None | |
98 resource_filename = None | |
99 resource_exists = None | |
100 _distribution_finders = None | |
101 _namespace_handlers = None | |
102 _namespace_packages = None | |
103 | |
104 | |
105 class PEP440Warning(RuntimeWarning): | |
106 """ | |
107 Used when there is an issue with a version or specifier not complying with | |
108 PEP 440. | |
109 """ | |
110 | |
111 | |
112 def parse_version(v): | |
113 try: | |
114 return packaging.version.Version(v) | |
115 except packaging.version.InvalidVersion: | |
116 return packaging.version.LegacyVersion(v) | |
117 | |
118 | |
119 _state_vars = {} | |
120 | |
121 | |
122 def _declare_state(vartype, **kw): | |
123 globals().update(kw) | |
124 _state_vars.update(dict.fromkeys(kw, vartype)) | |
125 | |
126 | |
127 def __getstate__(): | |
128 state = {} | |
129 g = globals() | |
130 for k, v in _state_vars.items(): | |
131 state[k] = g['_sget_' + v](g[k]) | |
132 return state | |
133 | |
134 | |
135 def __setstate__(state): | |
136 g = globals() | |
137 for k, v in state.items(): | |
138 g['_sset_' + _state_vars[k]](k, g[k], v) | |
139 return state | |
140 | |
141 | |
142 def _sget_dict(val): | |
143 return val.copy() | |
144 | |
145 | |
146 def _sset_dict(key, ob, state): | |
147 ob.clear() | |
148 ob.update(state) | |
149 | |
150 | |
151 def _sget_object(val): | |
152 return val.__getstate__() | |
153 | |
154 | |
155 def _sset_object(key, ob, state): | |
156 ob.__setstate__(state) | |
157 | |
158 | |
159 _sget_none = _sset_none = lambda *args: None | |
160 | |
161 | |
162 def get_supported_platform(): | |
163 """Return this platform's maximum compatible version. | |
164 | |
165 distutils.util.get_platform() normally reports the minimum version | |
166 of macOS that would be required to *use* extensions produced by | |
167 distutils. But what we want when checking compatibility is to know the | |
168 version of macOS that we are *running*. To allow usage of packages that | |
169 explicitly require a newer version of macOS, we must also know the | |
170 current version of the OS. | |
171 | |
172 If this condition occurs for any other platform with a version in its | |
173 platform strings, this function should be extended accordingly. | |
174 """ | |
175 plat = get_build_platform() | |
176 m = macosVersionString.match(plat) | |
177 if m is not None and sys.platform == "darwin": | |
178 try: | |
179 plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) | |
180 except ValueError: | |
181 # not macOS | |
182 pass | |
183 return plat | |
184 | |
185 | |
186 __all__ = [ | |
187 # Basic resource access and distribution/entry point discovery | |
188 'require', 'run_script', 'get_provider', 'get_distribution', | |
189 'load_entry_point', 'get_entry_map', 'get_entry_info', | |
190 'iter_entry_points', | |
191 'resource_string', 'resource_stream', 'resource_filename', | |
192 'resource_listdir', 'resource_exists', 'resource_isdir', | |
193 | |
194 # Environmental control | |
195 'declare_namespace', 'working_set', 'add_activation_listener', | |
196 'find_distributions', 'set_extraction_path', 'cleanup_resources', | |
197 'get_default_cache', | |
198 | |
199 # Primary implementation classes | |
200 'Environment', 'WorkingSet', 'ResourceManager', | |
201 'Distribution', 'Requirement', 'EntryPoint', | |
202 | |
203 # Exceptions | |
204 'ResolutionError', 'VersionConflict', 'DistributionNotFound', | |
205 'UnknownExtra', 'ExtractionError', | |
206 | |
207 # Warnings | |
208 'PEP440Warning', | |
209 | |
210 # Parsing functions and string utilities | |
211 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', | |
212 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', | |
213 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', | |
214 | |
215 # filesystem utilities | |
216 'ensure_directory', 'normalize_path', | |
217 | |
218 # Distribution "precedence" constants | |
219 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', | |
220 | |
221 # "Provider" interfaces, implementations, and registration/lookup APIs | |
222 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', | |
223 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', | |
224 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', | |
225 'register_finder', 'register_namespace_handler', 'register_loader_type', | |
226 'fixup_namespace_packages', 'get_importer', | |
227 | |
228 # Warnings | |
229 'PkgResourcesDeprecationWarning', | |
230 | |
231 # Deprecated/backward compatibility only | |
232 'run_main', 'AvailableDistributions', | |
233 ] | |
234 | |
235 | |
236 class ResolutionError(Exception): | |
237 """Abstract base for dependency resolution errors""" | |
238 | |
239 def __repr__(self): | |
240 return self.__class__.__name__ + repr(self.args) | |
241 | |
242 | |
243 class VersionConflict(ResolutionError): | |
244 """ | |
245 An already-installed version conflicts with the requested version. | |
246 | |
247 Should be initialized with the installed Distribution and the requested | |
248 Requirement. | |
249 """ | |
250 | |
251 _template = "{self.dist} is installed but {self.req} is required" | |
252 | |
253 @property | |
254 def dist(self): | |
255 return self.args[0] | |
256 | |
257 @property | |
258 def req(self): | |
259 return self.args[1] | |
260 | |
261 def report(self): | |
262 return self._template.format(**locals()) | |
263 | |
264 def with_context(self, required_by): | |
265 """ | |
266 If required_by is non-empty, return a version of self that is a | |
267 ContextualVersionConflict. | |
268 """ | |
269 if not required_by: | |
270 return self | |
271 args = self.args + (required_by,) | |
272 return ContextualVersionConflict(*args) | |
273 | |
274 | |
275 class ContextualVersionConflict(VersionConflict): | |
276 """ | |
277 A VersionConflict that accepts a third parameter, the set of the | |
278 requirements that required the installed Distribution. | |
279 """ | |
280 | |
281 _template = VersionConflict._template + ' by {self.required_by}' | |
282 | |
283 @property | |
284 def required_by(self): | |
285 return self.args[2] | |
286 | |
287 | |
288 class DistributionNotFound(ResolutionError): | |
289 """A requested distribution was not found""" | |
290 | |
291 _template = ("The '{self.req}' distribution was not found " | |
292 "and is required by {self.requirers_str}") | |
293 | |
294 @property | |
295 def req(self): | |
296 return self.args[0] | |
297 | |
298 @property | |
299 def requirers(self): | |
300 return self.args[1] | |
301 | |
302 @property | |
303 def requirers_str(self): | |
304 if not self.requirers: | |
305 return 'the application' | |
306 return ', '.join(self.requirers) | |
307 | |
308 def report(self): | |
309 return self._template.format(**locals()) | |
310 | |
311 def __str__(self): | |
312 return self.report() | |
313 | |
314 | |
315 class UnknownExtra(ResolutionError): | |
316 """Distribution doesn't have an "extra feature" of the given name""" | |
317 | |
318 | |
319 _provider_factories = {} | |
320 | |
321 PY_MAJOR = '{}.{}'.format(*sys.version_info) | |
322 EGG_DIST = 3 | |
323 BINARY_DIST = 2 | |
324 SOURCE_DIST = 1 | |
325 CHECKOUT_DIST = 0 | |
326 DEVELOP_DIST = -1 | |
327 | |
328 | |
329 def register_loader_type(loader_type, provider_factory): | |
330 """Register `provider_factory` to make providers for `loader_type` | |
331 | |
332 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, | |
333 and `provider_factory` is a function that, passed a *module* object, | |
334 returns an ``IResourceProvider`` for that module. | |
335 """ | |
336 _provider_factories[loader_type] = provider_factory | |
337 | |
338 | |
339 def get_provider(moduleOrReq): | |
340 """Return an IResourceProvider for the named module or requirement""" | |
341 if isinstance(moduleOrReq, Requirement): | |
342 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] | |
343 try: | |
344 module = sys.modules[moduleOrReq] | |
345 except KeyError: | |
346 __import__(moduleOrReq) | |
347 module = sys.modules[moduleOrReq] | |
348 loader = getattr(module, '__loader__', None) | |
349 return _find_adapter(_provider_factories, loader)(module) | |
350 | |
351 | |
352 def _macos_vers(_cache=[]): | |
353 if not _cache: | |
354 version = platform.mac_ver()[0] | |
355 # fallback for MacPorts | |
356 if version == '': | |
357 plist = '/System/Library/CoreServices/SystemVersion.plist' | |
358 if os.path.exists(plist): | |
359 if hasattr(plistlib, 'readPlist'): | |
360 plist_content = plistlib.readPlist(plist) | |
361 if 'ProductVersion' in plist_content: | |
362 version = plist_content['ProductVersion'] | |
363 | |
364 _cache.append(version.split('.')) | |
365 return _cache[0] | |
366 | |
367 | |
368 def _macos_arch(machine): | |
369 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) | |
370 | |
371 | |
372 def get_build_platform(): | |
373 """Return this platform's string for platform-specific distributions | |
374 | |
375 XXX Currently this is the same as ``distutils.util.get_platform()``, but it | |
376 needs some hacks for Linux and macOS. | |
377 """ | |
378 from sysconfig import get_platform | |
379 | |
380 plat = get_platform() | |
381 if sys.platform == "darwin" and not plat.startswith('macosx-'): | |
382 try: | |
383 version = _macos_vers() | |
384 machine = os.uname()[4].replace(" ", "_") | |
385 return "macosx-%d.%d-%s" % ( | |
386 int(version[0]), int(version[1]), | |
387 _macos_arch(machine), | |
388 ) | |
389 except ValueError: | |
390 # if someone is running a non-Mac darwin system, this will fall | |
391 # through to the default implementation | |
392 pass | |
393 return plat | |
394 | |
395 | |
396 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") | |
397 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") | |
398 # XXX backward compat | |
399 get_platform = get_build_platform | |
400 | |
401 | |
402 def compatible_platforms(provided, required): | |
403 """Can code for the `provided` platform run on the `required` platform? | |
404 | |
405 Returns true if either platform is ``None``, or the platforms are equal. | |
406 | |
407 XXX Needs compatibility checks for Linux and other unixy OSes. | |
408 """ | |
409 if provided is None or required is None or provided == required: | |
410 # easy case | |
411 return True | |
412 | |
413 # macOS special cases | |
414 reqMac = macosVersionString.match(required) | |
415 if reqMac: | |
416 provMac = macosVersionString.match(provided) | |
417 | |
418 # is this a Mac package? | |
419 if not provMac: | |
420 # this is backwards compatibility for packages built before | |
421 # setuptools 0.6. All packages built after this point will | |
422 # use the new macOS designation. | |
423 provDarwin = darwinVersionString.match(provided) | |
424 if provDarwin: | |
425 dversion = int(provDarwin.group(1)) | |
426 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) | |
427 if dversion == 7 and macosversion >= "10.3" or \ | |
428 dversion == 8 and macosversion >= "10.4": | |
429 return True | |
430 # egg isn't macOS or legacy darwin | |
431 return False | |
432 | |
433 # are they the same major version and machine type? | |
434 if provMac.group(1) != reqMac.group(1) or \ | |
435 provMac.group(3) != reqMac.group(3): | |
436 return False | |
437 | |
438 # is the required OS major update >= the provided one? | |
439 if int(provMac.group(2)) > int(reqMac.group(2)): | |
440 return False | |
441 | |
442 return True | |
443 | |
444 # XXX Linux and other platforms' special cases should go here | |
445 return False | |
446 | |
447 | |
448 def run_script(dist_spec, script_name): | |
449 """Locate distribution `dist_spec` and run its `script_name` script""" | |
450 ns = sys._getframe(1).f_globals | |
451 name = ns['__name__'] | |
452 ns.clear() | |
453 ns['__name__'] = name | |
454 require(dist_spec)[0].run_script(script_name, ns) | |
455 | |
456 | |
457 # backward compatibility | |
458 run_main = run_script | |
459 | |
460 | |
461 def get_distribution(dist): | |
462 """Return a current distribution object for a Requirement or string""" | |
463 if isinstance(dist, str): | |
464 dist = Requirement.parse(dist) | |
465 if isinstance(dist, Requirement): | |
466 dist = get_provider(dist) | |
467 if not isinstance(dist, Distribution): | |
468 raise TypeError("Expected string, Requirement, or Distribution", dist) | |
469 return dist | |
470 | |
471 | |
472 def load_entry_point(dist, group, name): | |
473 """Return `name` entry point of `group` for `dist` or raise ImportError""" | |
474 return get_distribution(dist).load_entry_point(group, name) | |
475 | |
476 | |
477 def get_entry_map(dist, group=None): | |
478 """Return the entry point map for `group`, or the full entry map""" | |
479 return get_distribution(dist).get_entry_map(group) | |
480 | |
481 | |
482 def get_entry_info(dist, group, name): | |
483 """Return the EntryPoint object for `group`+`name`, or ``None``""" | |
484 return get_distribution(dist).get_entry_info(group, name) | |
485 | |
486 | |
487 class IMetadataProvider: | |
488 def has_metadata(name): | |
489 """Does the package's distribution contain the named metadata?""" | |
490 | |
491 def get_metadata(name): | |
492 """The named metadata resource as a string""" | |
493 | |
494 def get_metadata_lines(name): | |
495 """Yield named metadata resource as list of non-blank non-comment lines | |
496 | |
497 Leading and trailing whitespace is stripped from each line, and lines | |
498 with ``#`` as the first non-blank character are omitted.""" | |
499 | |
500 def metadata_isdir(name): | |
501 """Is the named metadata a directory? (like ``os.path.isdir()``)""" | |
502 | |
503 def metadata_listdir(name): | |
504 """List of metadata names in the directory (like ``os.listdir()``)""" | |
505 | |
506 def run_script(script_name, namespace): | |
507 """Execute the named script in the supplied namespace dictionary""" | |
508 | |
509 | |
510 class IResourceProvider(IMetadataProvider): | |
511 """An object that provides access to package resources""" | |
512 | |
513 def get_resource_filename(manager, resource_name): | |
514 """Return a true filesystem path for `resource_name` | |
515 | |
516 `manager` must be an ``IResourceManager``""" | |
517 | |
518 def get_resource_stream(manager, resource_name): | |
519 """Return a readable file-like object for `resource_name` | |
520 | |
521 `manager` must be an ``IResourceManager``""" | |
522 | |
523 def get_resource_string(manager, resource_name): | |
524 """Return a string containing the contents of `resource_name` | |
525 | |
526 `manager` must be an ``IResourceManager``""" | |
527 | |
528 def has_resource(resource_name): | |
529 """Does the package contain the named resource?""" | |
530 | |
531 def resource_isdir(resource_name): | |
532 """Is the named resource a directory? (like ``os.path.isdir()``)""" | |
533 | |
534 def resource_listdir(resource_name): | |
535 """List of resource names in the directory (like ``os.listdir()``)""" | |
536 | |
537 | |
538 class WorkingSet: | |
539 """A collection of active distributions on sys.path (or a similar list)""" | |
540 | |
541 def __init__(self, entries=None): | |
542 """Create working set from list of path entries (default=sys.path)""" | |
543 self.entries = [] | |
544 self.entry_keys = {} | |
545 self.by_key = {} | |
546 self.callbacks = [] | |
547 | |
548 if entries is None: | |
549 entries = sys.path | |
550 | |
551 for entry in entries: | |
552 self.add_entry(entry) | |
553 | |
554 @classmethod | |
555 def _build_master(cls): | |
556 """ | |
557 Prepare the master working set. | |
558 """ | |
559 ws = cls() | |
560 try: | |
561 from __main__ import __requires__ | |
562 except ImportError: | |
563 # The main program does not list any requirements | |
564 return ws | |
565 | |
566 # ensure the requirements are met | |
567 try: | |
568 ws.require(__requires__) | |
569 except VersionConflict: | |
570 return cls._build_from_requirements(__requires__) | |
571 | |
572 return ws | |
573 | |
574 @classmethod | |
575 def _build_from_requirements(cls, req_spec): | |
576 """ | |
577 Build a working set from a requirement spec. Rewrites sys.path. | |
578 """ | |
579 # try it without defaults already on sys.path | |
580 # by starting with an empty path | |
581 ws = cls([]) | |
582 reqs = parse_requirements(req_spec) | |
583 dists = ws.resolve(reqs, Environment()) | |
584 for dist in dists: | |
585 ws.add(dist) | |
586 | |
587 # add any missing entries from sys.path | |
588 for entry in sys.path: | |
589 if entry not in ws.entries: | |
590 ws.add_entry(entry) | |
591 | |
592 # then copy back to sys.path | |
593 sys.path[:] = ws.entries | |
594 return ws | |
595 | |
596 def add_entry(self, entry): | |
597 """Add a path item to ``.entries``, finding any distributions on it | |
598 | |
599 ``find_distributions(entry, True)`` is used to find distributions | |
600 corresponding to the path entry, and they are added. `entry` is | |
601 always appended to ``.entries``, even if it is already present. | |
602 (This is because ``sys.path`` can contain the same value more than | |
603 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always | |
604 equal ``sys.path``.) | |
605 """ | |
606 self.entry_keys.setdefault(entry, []) | |
607 self.entries.append(entry) | |
608 for dist in find_distributions(entry, True): | |
609 self.add(dist, entry, False) | |
610 | |
611 def __contains__(self, dist): | |
612 """True if `dist` is the active distribution for its project""" | |
613 return self.by_key.get(dist.key) == dist | |
614 | |
615 def find(self, req): | |
616 """Find a distribution matching requirement `req` | |
617 | |
618 If there is an active distribution for the requested project, this | |
619 returns it as long as it meets the version requirement specified by | |
620 `req`. But, if there is an active distribution for the project and it | |
621 does *not* meet the `req` requirement, ``VersionConflict`` is raised. | |
622 If there is no active distribution for the requested project, ``None`` | |
623 is returned. | |
624 """ | |
625 dist = self.by_key.get(req.key) | |
626 if dist is not None and dist not in req: | |
627 # XXX add more info | |
628 raise VersionConflict(dist, req) | |
629 return dist | |
630 | |
631 def iter_entry_points(self, group, name=None): | |
632 """Yield entry point objects from `group` matching `name` | |
633 | |
634 If `name` is None, yields all entry points in `group` from all | |
635 distributions in the working set, otherwise only ones matching | |
636 both `group` and `name` are yielded (in distribution order). | |
637 """ | |
638 return ( | |
639 entry | |
640 for dist in self | |
641 for entry in dist.get_entry_map(group).values() | |
642 if name is None or name == entry.name | |
643 ) | |
644 | |
645 def run_script(self, requires, script_name): | |
646 """Locate distribution for `requires` and run `script_name` script""" | |
647 ns = sys._getframe(1).f_globals | |
648 name = ns['__name__'] | |
649 ns.clear() | |
650 ns['__name__'] = name | |
651 self.require(requires)[0].run_script(script_name, ns) | |
652 | |
653 def __iter__(self): | |
654 """Yield distributions for non-duplicate projects in the working set | |
655 | |
656 The yield order is the order in which the items' path entries were | |
657 added to the working set. | |
658 """ | |
659 seen = {} | |
660 for item in self.entries: | |
661 if item not in self.entry_keys: | |
662 # workaround a cache issue | |
663 continue | |
664 | |
665 for key in self.entry_keys[item]: | |
666 if key not in seen: | |
667 seen[key] = 1 | |
668 yield self.by_key[key] | |
669 | |
670 def add(self, dist, entry=None, insert=True, replace=False): | |
671 """Add `dist` to working set, associated with `entry` | |
672 | |
673 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. | |
674 On exit from this routine, `entry` is added to the end of the working | |
675 set's ``.entries`` (if it wasn't already present). | |
676 | |
677 `dist` is only added to the working set if it's for a project that | |
678 doesn't already have a distribution in the set, unless `replace=True`. | |
679 If it's added, any callbacks registered with the ``subscribe()`` method | |
680 will be called. | |
681 """ | |
682 if insert: | |
683 dist.insert_on(self.entries, entry, replace=replace) | |
684 | |
685 if entry is None: | |
686 entry = dist.location | |
687 keys = self.entry_keys.setdefault(entry, []) | |
688 keys2 = self.entry_keys.setdefault(dist.location, []) | |
689 if not replace and dist.key in self.by_key: | |
690 # ignore hidden distros | |
691 return | |
692 | |
693 self.by_key[dist.key] = dist | |
694 if dist.key not in keys: | |
695 keys.append(dist.key) | |
696 if dist.key not in keys2: | |
697 keys2.append(dist.key) | |
698 self._added_new(dist) | |
699 | |
700 # FIXME: 'WorkingSet.resolve' is too complex (11) | |
701 def resolve(self, requirements, env=None, installer=None, # noqa: C901 | |
702 replace_conflicting=False, extras=None): | |
703 """List all distributions needed to (recursively) meet `requirements` | |
704 | |
705 `requirements` must be a sequence of ``Requirement`` objects. `env`, | |
706 if supplied, should be an ``Environment`` instance. If | |
707 not supplied, it defaults to all distributions available within any | |
708 entry or distribution in the working set. `installer`, if supplied, | |
709 will be invoked with each requirement that cannot be met by an | |
710 already-installed distribution; it should return a ``Distribution`` or | |
711 ``None``. | |
712 | |
713 Unless `replace_conflicting=True`, raises a VersionConflict exception | |
714 if | |
715 any requirements are found on the path that have the correct name but | |
716 the wrong version. Otherwise, if an `installer` is supplied it will be | |
717 invoked to obtain the correct version of the requirement and activate | |
718 it. | |
719 | |
720 `extras` is a list of the extras to be used with these requirements. | |
721 This is important because extra requirements may look like `my_req; | |
722 extra = "my_extra"`, which would otherwise be interpreted as a purely | |
723 optional requirement. Instead, we want to be able to assert that these | |
724 requirements are truly required. | |
725 """ | |
726 | |
727 # set up the stack | |
728 requirements = list(requirements)[::-1] | |
729 # set of processed requirements | |
730 processed = {} | |
731 # key -> dist | |
732 best = {} | |
733 to_activate = [] | |
734 | |
735 req_extras = _ReqExtras() | |
736 | |
737 # Mapping of requirement to set of distributions that required it; | |
738 # useful for reporting info about conflicts. | |
739 required_by = collections.defaultdict(set) | |
740 | |
741 while requirements: | |
742 # process dependencies breadth-first | |
743 req = requirements.pop(0) | |
744 if req in processed: | |
745 # Ignore cyclic or redundant dependencies | |
746 continue | |
747 | |
748 if not req_extras.markers_pass(req, extras): | |
749 continue | |
750 | |
751 dist = best.get(req.key) | |
752 if dist is None: | |
753 # Find the best distribution and add it to the map | |
754 dist = self.by_key.get(req.key) | |
755 if dist is None or (dist not in req and replace_conflicting): | |
756 ws = self | |
757 if env is None: | |
758 if dist is None: | |
759 env = Environment(self.entries) | |
760 else: | |
761 # Use an empty environment and workingset to avoid | |
762 # any further conflicts with the conflicting | |
763 # distribution | |
764 env = Environment([]) | |
765 ws = WorkingSet([]) | |
766 dist = best[req.key] = env.best_match( | |
767 req, ws, installer, | |
768 replace_conflicting=replace_conflicting | |
769 ) | |
770 if dist is None: | |
771 requirers = required_by.get(req, None) | |
772 raise DistributionNotFound(req, requirers) | |
773 to_activate.append(dist) | |
774 if dist not in req: | |
775 # Oops, the "best" so far conflicts with a dependency | |
776 dependent_req = required_by[req] | |
777 raise VersionConflict(dist, req).with_context(dependent_req) | |
778 | |
779 # push the new requirements onto the stack | |
780 new_requirements = dist.requires(req.extras)[::-1] | |
781 requirements.extend(new_requirements) | |
782 | |
783 # Register the new requirements needed by req | |
784 for new_requirement in new_requirements: | |
785 required_by[new_requirement].add(req.project_name) | |
786 req_extras[new_requirement] = req.extras | |
787 | |
788 processed[req] = True | |
789 | |
790 # return list of distros to activate | |
791 return to_activate | |
792 | |
793 def find_plugins( | |
794 self, plugin_env, full_env=None, installer=None, fallback=True): | |
795 """Find all activatable distributions in `plugin_env` | |
796 | |
797 Example usage:: | |
798 | |
799 distributions, errors = working_set.find_plugins( | |
800 Environment(plugin_dirlist) | |
801 ) | |
802 # add plugins+libs to sys.path | |
803 map(working_set.add, distributions) | |
804 # display errors | |
805 print('Could not load', errors) | |
806 | |
807 The `plugin_env` should be an ``Environment`` instance that contains | |
808 only distributions that are in the project's "plugin directory" or | |
809 directories. The `full_env`, if supplied, should be an ``Environment`` | |
810 contains all currently-available distributions. If `full_env` is not | |
811 supplied, one is created automatically from the ``WorkingSet`` this | |
812 method is called on, which will typically mean that every directory on | |
813 ``sys.path`` will be scanned for distributions. | |
814 | |
815 `installer` is a standard installer callback as used by the | |
816 ``resolve()`` method. The `fallback` flag indicates whether we should | |
817 attempt to resolve older versions of a plugin if the newest version | |
818 cannot be resolved. | |
819 | |
820 This method returns a 2-tuple: (`distributions`, `error_info`), where | |
821 `distributions` is a list of the distributions found in `plugin_env` | |
822 that were loadable, along with any other distributions that are needed | |
823 to resolve their dependencies. `error_info` is a dictionary mapping | |
824 unloadable plugin distributions to an exception instance describing the | |
825 error that occurred. Usually this will be a ``DistributionNotFound`` or | |
826 ``VersionConflict`` instance. | |
827 """ | |
828 | |
829 plugin_projects = list(plugin_env) | |
830 # scan project names in alphabetic order | |
831 plugin_projects.sort() | |
832 | |
833 error_info = {} | |
834 distributions = {} | |
835 | |
836 if full_env is None: | |
837 env = Environment(self.entries) | |
838 env += plugin_env | |
839 else: | |
840 env = full_env + plugin_env | |
841 | |
842 shadow_set = self.__class__([]) | |
843 # put all our entries in shadow_set | |
844 list(map(shadow_set.add, self)) | |
845 | |
846 for project_name in plugin_projects: | |
847 | |
848 for dist in plugin_env[project_name]: | |
849 | |
850 req = [dist.as_requirement()] | |
851 | |
852 try: | |
853 resolvees = shadow_set.resolve(req, env, installer) | |
854 | |
855 except ResolutionError as v: | |
856 # save error info | |
857 error_info[dist] = v | |
858 if fallback: | |
859 # try the next older version of project | |
860 continue | |
861 else: | |
862 # give up on this project, keep going | |
863 break | |
864 | |
865 else: | |
866 list(map(shadow_set.add, resolvees)) | |
867 distributions.update(dict.fromkeys(resolvees)) | |
868 | |
869 # success, no need to try any more versions of this project | |
870 break | |
871 | |
872 distributions = list(distributions) | |
873 distributions.sort() | |
874 | |
875 return distributions, error_info | |
876 | |
877 def require(self, *requirements): | |
878 """Ensure that distributions matching `requirements` are activated | |
879 | |
880 `requirements` must be a string or a (possibly-nested) sequence | |
881 thereof, specifying the distributions and versions required. The | |
882 return value is a sequence of the distributions that needed to be | |
883 activated to fulfill the requirements; all relevant distributions are | |
884 included, even if they were already activated in this working set. | |
885 """ | |
886 needed = self.resolve(parse_requirements(requirements)) | |
887 | |
888 for dist in needed: | |
889 self.add(dist) | |
890 | |
891 return needed | |
892 | |
893 def subscribe(self, callback, existing=True): | |
894 """Invoke `callback` for all distributions | |
895 | |
896 If `existing=True` (default), | |
897 call on all existing ones, as well. | |
898 """ | |
899 if callback in self.callbacks: | |
900 return | |
901 self.callbacks.append(callback) | |
902 if not existing: | |
903 return | |
904 for dist in self: | |
905 callback(dist) | |
906 | |
907 def _added_new(self, dist): | |
908 for callback in self.callbacks: | |
909 callback(dist) | |
910 | |
911 def __getstate__(self): | |
912 return ( | |
913 self.entries[:], self.entry_keys.copy(), self.by_key.copy(), | |
914 self.callbacks[:] | |
915 ) | |
916 | |
917 def __setstate__(self, e_k_b_c): | |
918 entries, keys, by_key, callbacks = e_k_b_c | |
919 self.entries = entries[:] | |
920 self.entry_keys = keys.copy() | |
921 self.by_key = by_key.copy() | |
922 self.callbacks = callbacks[:] | |
923 | |
924 | |
925 class _ReqExtras(dict): | |
926 """ | |
927 Map each requirement to the extras that demanded it. | |
928 """ | |
929 | |
930 def markers_pass(self, req, extras=None): | |
931 """ | |
932 Evaluate markers for req against each extra that | |
933 demanded it. | |
934 | |
935 Return False if the req has a marker and fails | |
936 evaluation. Otherwise, return True. | |
937 """ | |
938 extra_evals = ( | |
939 req.marker.evaluate({'extra': extra}) | |
940 for extra in self.get(req, ()) + (extras or (None,)) | |
941 ) | |
942 return not req.marker or any(extra_evals) | |
943 | |
944 | |
945 class Environment: | |
946 """Searchable snapshot of distributions on a search path""" | |
947 | |
948 def __init__( | |
949 self, search_path=None, platform=get_supported_platform(), | |
950 python=PY_MAJOR): | |
951 """Snapshot distributions available on a search path | |
952 | |
953 Any distributions found on `search_path` are added to the environment. | |
954 `search_path` should be a sequence of ``sys.path`` items. If not | |
955 supplied, ``sys.path`` is used. | |
956 | |
957 `platform` is an optional string specifying the name of the platform | |
958 that platform-specific distributions must be compatible with. If | |
959 unspecified, it defaults to the current platform. `python` is an | |
960 optional string naming the desired version of Python (e.g. ``'3.6'``); | |
961 it defaults to the current version. | |
962 | |
963 You may explicitly set `platform` (and/or `python`) to ``None`` if you | |
964 wish to map *all* distributions, not just those compatible with the | |
965 running platform or Python version. | |
966 """ | |
967 self._distmap = {} | |
968 self.platform = platform | |
969 self.python = python | |
970 self.scan(search_path) | |
971 | |
972 def can_add(self, dist): | |
973 """Is distribution `dist` acceptable for this environment? | |
974 | |
975 The distribution must match the platform and python version | |
976 requirements specified when this environment was created, or False | |
977 is returned. | |
978 """ | |
979 py_compat = ( | |
980 self.python is None | |
981 or dist.py_version is None | |
982 or dist.py_version == self.python | |
983 ) | |
984 return py_compat and compatible_platforms(dist.platform, self.platform) | |
985 | |
986 def remove(self, dist): | |
987 """Remove `dist` from the environment""" | |
988 self._distmap[dist.key].remove(dist) | |
989 | |
990 def scan(self, search_path=None): | |
991 """Scan `search_path` for distributions usable in this environment | |
992 | |
993 Any distributions found are added to the environment. | |
994 `search_path` should be a sequence of ``sys.path`` items. If not | |
995 supplied, ``sys.path`` is used. Only distributions conforming to | |
996 the platform/python version defined at initialization are added. | |
997 """ | |
998 if search_path is None: | |
999 search_path = sys.path | |
1000 | |
1001 for item in search_path: | |
1002 for dist in find_distributions(item): | |
1003 self.add(dist) | |
1004 | |
1005 def __getitem__(self, project_name): | |
1006 """Return a newest-to-oldest list of distributions for `project_name` | |
1007 | |
1008 Uses case-insensitive `project_name` comparison, assuming all the | |
1009 project's distributions use their project's name converted to all | |
1010 lowercase as their key. | |
1011 | |
1012 """ | |
1013 distribution_key = project_name.lower() | |
1014 return self._distmap.get(distribution_key, []) | |
1015 | |
1016 def add(self, dist): | |
1017 """Add `dist` if we ``can_add()`` it and it has not already been added | |
1018 """ | |
1019 if self.can_add(dist) and dist.has_version(): | |
1020 dists = self._distmap.setdefault(dist.key, []) | |
1021 if dist not in dists: | |
1022 dists.append(dist) | |
1023 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) | |
1024 | |
1025 def best_match( | |
1026 self, req, working_set, installer=None, replace_conflicting=False): | |
1027 """Find distribution best matching `req` and usable on `working_set` | |
1028 | |
1029 This calls the ``find(req)`` method of the `working_set` to see if a | |
1030 suitable distribution is already active. (This may raise | |
1031 ``VersionConflict`` if an unsuitable version of the project is already | |
1032 active in the specified `working_set`.) If a suitable distribution | |
1033 isn't active, this method returns the newest distribution in the | |
1034 environment that meets the ``Requirement`` in `req`. If no suitable | |
1035 distribution is found, and `installer` is supplied, then the result of | |
1036 calling the environment's ``obtain(req, installer)`` method will be | |
1037 returned. | |
1038 """ | |
1039 try: | |
1040 dist = working_set.find(req) | |
1041 except VersionConflict: | |
1042 if not replace_conflicting: | |
1043 raise | |
1044 dist = None | |
1045 if dist is not None: | |
1046 return dist | |
1047 for dist in self[req.key]: | |
1048 if dist in req: | |
1049 return dist | |
1050 # try to download/install | |
1051 return self.obtain(req, installer) | |
1052 | |
1053 def obtain(self, requirement, installer=None): | |
1054 """Obtain a distribution matching `requirement` (e.g. via download) | |
1055 | |
1056 Obtain a distro that matches requirement (e.g. via download). In the | |
1057 base ``Environment`` class, this routine just returns | |
1058 ``installer(requirement)``, unless `installer` is None, in which case | |
1059 None is returned instead. This method is a hook that allows subclasses | |
1060 to attempt other ways of obtaining a distribution before falling back | |
1061 to the `installer` argument.""" | |
1062 if installer is not None: | |
1063 return installer(requirement) | |
1064 | |
1065 def __iter__(self): | |
1066 """Yield the unique project names of the available distributions""" | |
1067 for key in self._distmap.keys(): | |
1068 if self[key]: | |
1069 yield key | |
1070 | |
1071 def __iadd__(self, other): | |
1072 """In-place addition of a distribution or environment""" | |
1073 if isinstance(other, Distribution): | |
1074 self.add(other) | |
1075 elif isinstance(other, Environment): | |
1076 for project in other: | |
1077 for dist in other[project]: | |
1078 self.add(dist) | |
1079 else: | |
1080 raise TypeError("Can't add %r to environment" % (other,)) | |
1081 return self | |
1082 | |
1083 def __add__(self, other): | |
1084 """Add an environment or distribution to an environment""" | |
1085 new = self.__class__([], platform=None, python=None) | |
1086 for env in self, other: | |
1087 new += env | |
1088 return new | |
1089 | |
1090 | |
1091 # XXX backward compatibility | |
1092 AvailableDistributions = Environment | |
1093 | |
1094 | |
1095 class ExtractionError(RuntimeError): | |
1096 """An error occurred extracting a resource | |
1097 | |
1098 The following attributes are available from instances of this exception: | |
1099 | |
1100 manager | |
1101 The resource manager that raised this exception | |
1102 | |
1103 cache_path | |
1104 The base directory for resource extraction | |
1105 | |
1106 original_error | |
1107 The exception instance that caused extraction to fail | |
1108 """ | |
1109 | |
1110 | |
1111 class ResourceManager: | |
1112 """Manage resource extraction and packages""" | |
1113 extraction_path = None | |
1114 | |
1115 def __init__(self): | |
1116 self.cached_files = {} | |
1117 | |
1118 def resource_exists(self, package_or_requirement, resource_name): | |
1119 """Does the named resource exist?""" | |
1120 return get_provider(package_or_requirement).has_resource(resource_name) | |
1121 | |
1122 def resource_isdir(self, package_or_requirement, resource_name): | |
1123 """Is the named resource an existing directory?""" | |
1124 return get_provider(package_or_requirement).resource_isdir( | |
1125 resource_name | |
1126 ) | |
1127 | |
1128 def resource_filename(self, package_or_requirement, resource_name): | |
1129 """Return a true filesystem path for specified resource""" | |
1130 return get_provider(package_or_requirement).get_resource_filename( | |
1131 self, resource_name | |
1132 ) | |
1133 | |
1134 def resource_stream(self, package_or_requirement, resource_name): | |
1135 """Return a readable file-like object for specified resource""" | |
1136 return get_provider(package_or_requirement).get_resource_stream( | |
1137 self, resource_name | |
1138 ) | |
1139 | |
1140 def resource_string(self, package_or_requirement, resource_name): | |
1141 """Return specified resource as a string""" | |
1142 return get_provider(package_or_requirement).get_resource_string( | |
1143 self, resource_name | |
1144 ) | |
1145 | |
1146 def resource_listdir(self, package_or_requirement, resource_name): | |
1147 """List the contents of the named resource directory""" | |
1148 return get_provider(package_or_requirement).resource_listdir( | |
1149 resource_name | |
1150 ) | |
1151 | |
1152 def extraction_error(self): | |
1153 """Give an error message for problems extracting file(s)""" | |
1154 | |
1155 old_exc = sys.exc_info()[1] | |
1156 cache_path = self.extraction_path or get_default_cache() | |
1157 | |
1158 tmpl = textwrap.dedent(""" | |
1159 Can't extract file(s) to egg cache | |
1160 | |
1161 The following error occurred while trying to extract file(s) | |
1162 to the Python egg cache: | |
1163 | |
1164 {old_exc} | |
1165 | |
1166 The Python egg cache directory is currently set to: | |
1167 | |
1168 {cache_path} | |
1169 | |
1170 Perhaps your account does not have write access to this directory? | |
1171 You can change the cache directory by setting the PYTHON_EGG_CACHE | |
1172 environment variable to point to an accessible directory. | |
1173 """).lstrip() | |
1174 err = ExtractionError(tmpl.format(**locals())) | |
1175 err.manager = self | |
1176 err.cache_path = cache_path | |
1177 err.original_error = old_exc | |
1178 raise err | |
1179 | |
1180 def get_cache_path(self, archive_name, names=()): | |
1181 """Return absolute location in cache for `archive_name` and `names` | |
1182 | |
1183 The parent directory of the resulting path will be created if it does | |
1184 not already exist. `archive_name` should be the base filename of the | |
1185 enclosing egg (which may not be the name of the enclosing zipfile!), | |
1186 including its ".egg" extension. `names`, if provided, should be a | |
1187 sequence of path name parts "under" the egg's extraction location. | |
1188 | |
1189 This method should only be called by resource providers that need to | |
1190 obtain an extraction location, and only for names they intend to | |
1191 extract, as it tracks the generated names for possible cleanup later. | |
1192 """ | |
1193 extract_path = self.extraction_path or get_default_cache() | |
1194 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) | |
1195 try: | |
1196 _bypass_ensure_directory(target_path) | |
1197 except Exception: | |
1198 self.extraction_error() | |
1199 | |
1200 self._warn_unsafe_extraction_path(extract_path) | |
1201 | |
1202 self.cached_files[target_path] = 1 | |
1203 return target_path | |
1204 | |
1205 @staticmethod | |
1206 def _warn_unsafe_extraction_path(path): | |
1207 """ | |
1208 If the default extraction path is overridden and set to an insecure | |
1209 location, such as /tmp, it opens up an opportunity for an attacker to | |
1210 replace an extracted file with an unauthorized payload. Warn the user | |
1211 if a known insecure location is used. | |
1212 | |
1213 See Distribute #375 for more details. | |
1214 """ | |
1215 if os.name == 'nt' and not path.startswith(os.environ['windir']): | |
1216 # On Windows, permissions are generally restrictive by default | |
1217 # and temp directories are not writable by other users, so | |
1218 # bypass the warning. | |
1219 return | |
1220 mode = os.stat(path).st_mode | |
1221 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: | |
1222 msg = ( | |
1223 "Extraction path is writable by group/others " | |
1224 "and vulnerable to attack when " | |
1225 "used with get_resource_filename ({path}). " | |
1226 "Consider a more secure " | |
1227 "location (set with .set_extraction_path or the " | |
1228 "PYTHON_EGG_CACHE environment variable)." | |
1229 ).format(**locals()) | |
1230 warnings.warn(msg, UserWarning) | |
1231 | |
1232 def postprocess(self, tempname, filename): | |
1233 """Perform any platform-specific postprocessing of `tempname` | |
1234 | |
1235 This is where Mac header rewrites should be done; other platforms don't | |
1236 have anything special they should do. | |
1237 | |
1238 Resource providers should call this method ONLY after successfully | |
1239 extracting a compressed resource. They must NOT call it on resources | |
1240 that are already in the filesystem. | |
1241 | |
1242 `tempname` is the current (temporary) name of the file, and `filename` | |
1243 is the name it will be renamed to by the caller after this routine | |
1244 returns. | |
1245 """ | |
1246 | |
1247 if os.name == 'posix': | |
1248 # Make the resource executable | |
1249 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 | |
1250 os.chmod(tempname, mode) | |
1251 | |
1252 def set_extraction_path(self, path): | |
1253 """Set the base path where resources will be extracted to, if needed. | |
1254 | |
1255 If you do not call this routine before any extractions take place, the | |
1256 path defaults to the return value of ``get_default_cache()``. (Which | |
1257 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various | |
1258 platform-specific fallbacks. See that routine's documentation for more | |
1259 details.) | |
1260 | |
1261 Resources are extracted to subdirectories of this path based upon | |
1262 information given by the ``IResourceProvider``. You may set this to a | |
1263 temporary directory, but then you must call ``cleanup_resources()`` to | |
1264 delete the extracted files when done. There is no guarantee that | |
1265 ``cleanup_resources()`` will be able to remove all extracted files. | |
1266 | |
1267 (Note: you may not change the extraction path for a given resource | |
1268 manager once resources have been extracted, unless you first call | |
1269 ``cleanup_resources()``.) | |
1270 """ | |
1271 if self.cached_files: | |
1272 raise ValueError( | |
1273 "Can't change extraction path, files already extracted" | |
1274 ) | |
1275 | |
1276 self.extraction_path = path | |
1277 | |
1278 def cleanup_resources(self, force=False): | |
1279 """ | |
1280 Delete all extracted resource files and directories, returning a list | |
1281 of the file and directory names that could not be successfully removed. | |
1282 This function does not have any concurrency protection, so it should | |
1283 generally only be called when the extraction path is a temporary | |
1284 directory exclusive to a single process. This method is not | |
1285 automatically called; you must call it explicitly or register it as an | |
1286 ``atexit`` function if you wish to ensure cleanup of a temporary | |
1287 directory used for extractions. | |
1288 """ | |
1289 # XXX | |
1290 | |
1291 | |
1292 def get_default_cache(): | |
1293 """ | |
1294 Return the ``PYTHON_EGG_CACHE`` environment variable | |
1295 or a platform-relevant user cache dir for an app | |
1296 named "Python-Eggs". | |
1297 """ | |
1298 return ( | |
1299 os.environ.get('PYTHON_EGG_CACHE') | |
1300 or appdirs.user_cache_dir(appname='Python-Eggs') | |
1301 ) | |
1302 | |
1303 | |
1304 def safe_name(name): | |
1305 """Convert an arbitrary string to a standard distribution name | |
1306 | |
1307 Any runs of non-alphanumeric/. characters are replaced with a single '-'. | |
1308 """ | |
1309 return re.sub('[^A-Za-z0-9.]+', '-', name) | |
1310 | |
1311 | |
1312 def safe_version(version): | |
1313 """ | |
1314 Convert an arbitrary string to a standard version string | |
1315 """ | |
1316 try: | |
1317 # normalize the version | |
1318 return str(packaging.version.Version(version)) | |
1319 except packaging.version.InvalidVersion: | |
1320 version = version.replace(' ', '.') | |
1321 return re.sub('[^A-Za-z0-9.]+', '-', version) | |
1322 | |
1323 | |
1324 def safe_extra(extra): | |
1325 """Convert an arbitrary string to a standard 'extra' name | |
1326 | |
1327 Any runs of non-alphanumeric characters are replaced with a single '_', | |
1328 and the result is always lowercased. | |
1329 """ | |
1330 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() | |
1331 | |
1332 | |
1333 def to_filename(name): | |
1334 """Convert a project or version name to its filename-escaped form | |
1335 | |
1336 Any '-' characters are currently replaced with '_'. | |
1337 """ | |
1338 return name.replace('-', '_') | |
1339 | |
1340 | |
1341 def invalid_marker(text): | |
1342 """ | |
1343 Validate text as a PEP 508 environment marker; return an exception | |
1344 if invalid or False otherwise. | |
1345 """ | |
1346 try: | |
1347 evaluate_marker(text) | |
1348 except SyntaxError as e: | |
1349 e.filename = None | |
1350 e.lineno = None | |
1351 return e | |
1352 return False | |
1353 | |
1354 | |
1355 def evaluate_marker(text, extra=None): | |
1356 """ | |
1357 Evaluate a PEP 508 environment marker. | |
1358 Return a boolean indicating the marker result in this environment. | |
1359 Raise SyntaxError if marker is invalid. | |
1360 | |
1361 This implementation uses the 'pyparsing' module. | |
1362 """ | |
1363 try: | |
1364 marker = packaging.markers.Marker(text) | |
1365 return marker.evaluate() | |
1366 except packaging.markers.InvalidMarker as e: | |
1367 raise SyntaxError(e) from e | |
1368 | |
1369 | |
1370 class NullProvider: | |
1371 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" | |
1372 | |
1373 egg_name = None | |
1374 egg_info = None | |
1375 loader = None | |
1376 | |
1377 def __init__(self, module): | |
1378 self.loader = getattr(module, '__loader__', None) | |
1379 self.module_path = os.path.dirname(getattr(module, '__file__', '')) | |
1380 | |
1381 def get_resource_filename(self, manager, resource_name): | |
1382 return self._fn(self.module_path, resource_name) | |
1383 | |
1384 def get_resource_stream(self, manager, resource_name): | |
1385 return io.BytesIO(self.get_resource_string(manager, resource_name)) | |
1386 | |
1387 def get_resource_string(self, manager, resource_name): | |
1388 return self._get(self._fn(self.module_path, resource_name)) | |
1389 | |
1390 def has_resource(self, resource_name): | |
1391 return self._has(self._fn(self.module_path, resource_name)) | |
1392 | |
1393 def _get_metadata_path(self, name): | |
1394 return self._fn(self.egg_info, name) | |
1395 | |
1396 def has_metadata(self, name): | |
1397 if not self.egg_info: | |
1398 return self.egg_info | |
1399 | |
1400 path = self._get_metadata_path(name) | |
1401 return self._has(path) | |
1402 | |
1403 def get_metadata(self, name): | |
1404 if not self.egg_info: | |
1405 return "" | |
1406 path = self._get_metadata_path(name) | |
1407 value = self._get(path) | |
1408 try: | |
1409 return value.decode('utf-8') | |
1410 except UnicodeDecodeError as exc: | |
1411 # Include the path in the error message to simplify | |
1412 # troubleshooting, and without changing the exception type. | |
1413 exc.reason += ' in {} file at path: {}'.format(name, path) | |
1414 raise | |
1415 | |
1416 def get_metadata_lines(self, name): | |
1417 return yield_lines(self.get_metadata(name)) | |
1418 | |
1419 def resource_isdir(self, resource_name): | |
1420 return self._isdir(self._fn(self.module_path, resource_name)) | |
1421 | |
1422 def metadata_isdir(self, name): | |
1423 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) | |
1424 | |
1425 def resource_listdir(self, resource_name): | |
1426 return self._listdir(self._fn(self.module_path, resource_name)) | |
1427 | |
1428 def metadata_listdir(self, name): | |
1429 if self.egg_info: | |
1430 return self._listdir(self._fn(self.egg_info, name)) | |
1431 return [] | |
1432 | |
1433 def run_script(self, script_name, namespace): | |
1434 script = 'scripts/' + script_name | |
1435 if not self.has_metadata(script): | |
1436 raise ResolutionError( | |
1437 "Script {script!r} not found in metadata at {self.egg_info!r}" | |
1438 .format(**locals()), | |
1439 ) | |
1440 script_text = self.get_metadata(script).replace('\r\n', '\n') | |
1441 script_text = script_text.replace('\r', '\n') | |
1442 script_filename = self._fn(self.egg_info, script) | |
1443 namespace['__file__'] = script_filename | |
1444 if os.path.exists(script_filename): | |
1445 with open(script_filename) as fid: | |
1446 source = fid.read() | |
1447 code = compile(source, script_filename, 'exec') | |
1448 exec(code, namespace, namespace) | |
1449 else: | |
1450 from linecache import cache | |
1451 cache[script_filename] = ( | |
1452 len(script_text), 0, script_text.split('\n'), script_filename | |
1453 ) | |
1454 script_code = compile(script_text, script_filename, 'exec') | |
1455 exec(script_code, namespace, namespace) | |
1456 | |
1457 def _has(self, path): | |
1458 raise NotImplementedError( | |
1459 "Can't perform this operation for unregistered loader type" | |
1460 ) | |
1461 | |
1462 def _isdir(self, path): | |
1463 raise NotImplementedError( | |
1464 "Can't perform this operation for unregistered loader type" | |
1465 ) | |
1466 | |
1467 def _listdir(self, path): | |
1468 raise NotImplementedError( | |
1469 "Can't perform this operation for unregistered loader type" | |
1470 ) | |
1471 | |
1472 def _fn(self, base, resource_name): | |
1473 self._validate_resource_path(resource_name) | |
1474 if resource_name: | |
1475 return os.path.join(base, *resource_name.split('/')) | |
1476 return base | |
1477 | |
1478 @staticmethod | |
1479 def _validate_resource_path(path): | |
1480 """ | |
1481 Validate the resource paths according to the docs. | |
1482 https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access | |
1483 | |
1484 >>> warned = getfixture('recwarn') | |
1485 >>> warnings.simplefilter('always') | |
1486 >>> vrp = NullProvider._validate_resource_path | |
1487 >>> vrp('foo/bar.txt') | |
1488 >>> bool(warned) | |
1489 False | |
1490 >>> vrp('../foo/bar.txt') | |
1491 >>> bool(warned) | |
1492 True | |
1493 >>> warned.clear() | |
1494 >>> vrp('/foo/bar.txt') | |
1495 >>> bool(warned) | |
1496 True | |
1497 >>> vrp('foo/../../bar.txt') | |
1498 >>> bool(warned) | |
1499 True | |
1500 >>> warned.clear() | |
1501 >>> vrp('foo/f../bar.txt') | |
1502 >>> bool(warned) | |
1503 False | |
1504 | |
1505 Windows path separators are straight-up disallowed. | |
1506 >>> vrp(r'\\foo/bar.txt') | |
1507 Traceback (most recent call last): | |
1508 ... | |
1509 ValueError: Use of .. or absolute path in a resource path \ | |
1510 is not allowed. | |
1511 | |
1512 >>> vrp(r'C:\\foo/bar.txt') | |
1513 Traceback (most recent call last): | |
1514 ... | |
1515 ValueError: Use of .. or absolute path in a resource path \ | |
1516 is not allowed. | |
1517 | |
1518 Blank values are allowed | |
1519 | |
1520 >>> vrp('') | |
1521 >>> bool(warned) | |
1522 False | |
1523 | |
1524 Non-string values are not. | |
1525 | |
1526 >>> vrp(None) | |
1527 Traceback (most recent call last): | |
1528 ... | |
1529 AttributeError: ... | |
1530 """ | |
1531 invalid = ( | |
1532 os.path.pardir in path.split(posixpath.sep) or | |
1533 posixpath.isabs(path) or | |
1534 ntpath.isabs(path) | |
1535 ) | |
1536 if not invalid: | |
1537 return | |
1538 | |
1539 msg = "Use of .. or absolute path in a resource path is not allowed." | |
1540 | |
1541 # Aggressively disallow Windows absolute paths | |
1542 if ntpath.isabs(path) and not posixpath.isabs(path): | |
1543 raise ValueError(msg) | |
1544 | |
1545 # for compatibility, warn; in future | |
1546 # raise ValueError(msg) | |
1547 warnings.warn( | |
1548 msg[:-1] + " and will raise exceptions in a future release.", | |
1549 DeprecationWarning, | |
1550 stacklevel=4, | |
1551 ) | |
1552 | |
1553 def _get(self, path): | |
1554 if hasattr(self.loader, 'get_data'): | |
1555 return self.loader.get_data(path) | |
1556 raise NotImplementedError( | |
1557 "Can't perform this operation for loaders without 'get_data()'" | |
1558 ) | |
1559 | |
1560 | |
1561 register_loader_type(object, NullProvider) | |
1562 | |
1563 | |
1564 def _parents(path): | |
1565 """ | |
1566 yield all parents of path including path | |
1567 """ | |
1568 last = None | |
1569 while path != last: | |
1570 yield path | |
1571 last = path | |
1572 path, _ = os.path.split(path) | |
1573 | |
1574 | |
1575 class EggProvider(NullProvider): | |
1576 """Provider based on a virtual filesystem""" | |
1577 | |
1578 def __init__(self, module): | |
1579 NullProvider.__init__(self, module) | |
1580 self._setup_prefix() | |
1581 | |
1582 def _setup_prefix(self): | |
1583 # Assume that metadata may be nested inside a "basket" | |
1584 # of multiple eggs and use module_path instead of .archive. | |
1585 eggs = filter(_is_egg_path, _parents(self.module_path)) | |
1586 egg = next(eggs, None) | |
1587 egg and self._set_egg(egg) | |
1588 | |
1589 def _set_egg(self, path): | |
1590 self.egg_name = os.path.basename(path) | |
1591 self.egg_info = os.path.join(path, 'EGG-INFO') | |
1592 self.egg_root = path | |
1593 | |
1594 | |
1595 class DefaultProvider(EggProvider): | |
1596 """Provides access to package resources in the filesystem""" | |
1597 | |
1598 def _has(self, path): | |
1599 return os.path.exists(path) | |
1600 | |
1601 def _isdir(self, path): | |
1602 return os.path.isdir(path) | |
1603 | |
1604 def _listdir(self, path): | |
1605 return os.listdir(path) | |
1606 | |
1607 def get_resource_stream(self, manager, resource_name): | |
1608 return open(self._fn(self.module_path, resource_name), 'rb') | |
1609 | |
1610 def _get(self, path): | |
1611 with open(path, 'rb') as stream: | |
1612 return stream.read() | |
1613 | |
1614 @classmethod | |
1615 def _register(cls): | |
1616 loader_names = 'SourceFileLoader', 'SourcelessFileLoader', | |
1617 for name in loader_names: | |
1618 loader_cls = getattr(importlib_machinery, name, type(None)) | |
1619 register_loader_type(loader_cls, cls) | |
1620 | |
1621 | |
1622 DefaultProvider._register() | |
1623 | |
1624 | |
1625 class EmptyProvider(NullProvider): | |
1626 """Provider that returns nothing for all requests""" | |
1627 | |
1628 module_path = None | |
1629 | |
1630 _isdir = _has = lambda self, path: False | |
1631 | |
1632 def _get(self, path): | |
1633 return '' | |
1634 | |
1635 def _listdir(self, path): | |
1636 return [] | |
1637 | |
1638 def __init__(self): | |
1639 pass | |
1640 | |
1641 | |
1642 empty_provider = EmptyProvider() | |
1643 | |
1644 | |
1645 class ZipManifests(dict): | |
1646 """ | |
1647 zip manifest builder | |
1648 """ | |
1649 | |
1650 @classmethod | |
1651 def build(cls, path): | |
1652 """ | |
1653 Build a dictionary similar to the zipimport directory | |
1654 caches, except instead of tuples, store ZipInfo objects. | |
1655 | |
1656 Use a platform-specific path separator (os.sep) for the path keys | |
1657 for compatibility with pypy on Windows. | |
1658 """ | |
1659 with zipfile.ZipFile(path) as zfile: | |
1660 items = ( | |
1661 ( | |
1662 name.replace('/', os.sep), | |
1663 zfile.getinfo(name), | |
1664 ) | |
1665 for name in zfile.namelist() | |
1666 ) | |
1667 return dict(items) | |
1668 | |
1669 load = build | |
1670 | |
1671 | |
1672 class MemoizedZipManifests(ZipManifests): | |
1673 """ | |
1674 Memoized zipfile manifests. | |
1675 """ | |
1676 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') | |
1677 | |
1678 def load(self, path): | |
1679 """ | |
1680 Load a manifest at path or return a suitable manifest already loaded. | |
1681 """ | |
1682 path = os.path.normpath(path) | |
1683 mtime = os.stat(path).st_mtime | |
1684 | |
1685 if path not in self or self[path].mtime != mtime: | |
1686 manifest = self.build(path) | |
1687 self[path] = self.manifest_mod(manifest, mtime) | |
1688 | |
1689 return self[path].manifest | |
1690 | |
1691 | |
1692 class ZipProvider(EggProvider): | |
1693 """Resource support for zips and eggs""" | |
1694 | |
1695 eagers = None | |
1696 _zip_manifests = MemoizedZipManifests() | |
1697 | |
1698 def __init__(self, module): | |
1699 EggProvider.__init__(self, module) | |
1700 self.zip_pre = self.loader.archive + os.sep | |
1701 | |
1702 def _zipinfo_name(self, fspath): | |
1703 # Convert a virtual filename (full path to file) into a zipfile subpath | |
1704 # usable with the zipimport directory cache for our target archive | |
1705 fspath = fspath.rstrip(os.sep) | |
1706 if fspath == self.loader.archive: | |
1707 return '' | |
1708 if fspath.startswith(self.zip_pre): | |
1709 return fspath[len(self.zip_pre):] | |
1710 raise AssertionError( | |
1711 "%s is not a subpath of %s" % (fspath, self.zip_pre) | |
1712 ) | |
1713 | |
1714 def _parts(self, zip_path): | |
1715 # Convert a zipfile subpath into an egg-relative path part list. | |
1716 # pseudo-fs path | |
1717 fspath = self.zip_pre + zip_path | |
1718 if fspath.startswith(self.egg_root + os.sep): | |
1719 return fspath[len(self.egg_root) + 1:].split(os.sep) | |
1720 raise AssertionError( | |
1721 "%s is not a subpath of %s" % (fspath, self.egg_root) | |
1722 ) | |
1723 | |
1724 @property | |
1725 def zipinfo(self): | |
1726 return self._zip_manifests.load(self.loader.archive) | |
1727 | |
1728 def get_resource_filename(self, manager, resource_name): | |
1729 if not self.egg_name: | |
1730 raise NotImplementedError( | |
1731 "resource_filename() only supported for .egg, not .zip" | |
1732 ) | |
1733 # no need to lock for extraction, since we use temp names | |
1734 zip_path = self._resource_to_zip(resource_name) | |
1735 eagers = self._get_eager_resources() | |
1736 if '/'.join(self._parts(zip_path)) in eagers: | |
1737 for name in eagers: | |
1738 self._extract_resource(manager, self._eager_to_zip(name)) | |
1739 return self._extract_resource(manager, zip_path) | |
1740 | |
1741 @staticmethod | |
1742 def _get_date_and_size(zip_stat): | |
1743 size = zip_stat.file_size | |
1744 # ymdhms+wday, yday, dst | |
1745 date_time = zip_stat.date_time + (0, 0, -1) | |
1746 # 1980 offset already done | |
1747 timestamp = time.mktime(date_time) | |
1748 return timestamp, size | |
1749 | |
1750 # FIXME: 'ZipProvider._extract_resource' is too complex (12) | |
1751 def _extract_resource(self, manager, zip_path): # noqa: C901 | |
1752 | |
1753 if zip_path in self._index(): | |
1754 for name in self._index()[zip_path]: | |
1755 last = self._extract_resource( | |
1756 manager, os.path.join(zip_path, name) | |
1757 ) | |
1758 # return the extracted directory name | |
1759 return os.path.dirname(last) | |
1760 | |
1761 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) | |
1762 | |
1763 if not WRITE_SUPPORT: | |
1764 raise IOError('"os.rename" and "os.unlink" are not supported ' | |
1765 'on this platform') | |
1766 try: | |
1767 | |
1768 real_path = manager.get_cache_path( | |
1769 self.egg_name, self._parts(zip_path) | |
1770 ) | |
1771 | |
1772 if self._is_current(real_path, zip_path): | |
1773 return real_path | |
1774 | |
1775 outf, tmpnam = _mkstemp( | |
1776 ".$extract", | |
1777 dir=os.path.dirname(real_path), | |
1778 ) | |
1779 os.write(outf, self.loader.get_data(zip_path)) | |
1780 os.close(outf) | |
1781 utime(tmpnam, (timestamp, timestamp)) | |
1782 manager.postprocess(tmpnam, real_path) | |
1783 | |
1784 try: | |
1785 rename(tmpnam, real_path) | |
1786 | |
1787 except os.error: | |
1788 if os.path.isfile(real_path): | |
1789 if self._is_current(real_path, zip_path): | |
1790 # the file became current since it was checked above, | |
1791 # so proceed. | |
1792 return real_path | |
1793 # Windows, del old file and retry | |
1794 elif os.name == 'nt': | |
1795 unlink(real_path) | |
1796 rename(tmpnam, real_path) | |
1797 return real_path | |
1798 raise | |
1799 | |
1800 except os.error: | |
1801 # report a user-friendly error | |
1802 manager.extraction_error() | |
1803 | |
1804 return real_path | |
1805 | |
1806 def _is_current(self, file_path, zip_path): | |
1807 """ | |
1808 Return True if the file_path is current for this zip_path | |
1809 """ | |
1810 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) | |
1811 if not os.path.isfile(file_path): | |
1812 return False | |
1813 stat = os.stat(file_path) | |
1814 if stat.st_size != size or stat.st_mtime != timestamp: | |
1815 return False | |
1816 # check that the contents match | |
1817 zip_contents = self.loader.get_data(zip_path) | |
1818 with open(file_path, 'rb') as f: | |
1819 file_contents = f.read() | |
1820 return zip_contents == file_contents | |
1821 | |
1822 def _get_eager_resources(self): | |
1823 if self.eagers is None: | |
1824 eagers = [] | |
1825 for name in ('native_libs.txt', 'eager_resources.txt'): | |
1826 if self.has_metadata(name): | |
1827 eagers.extend(self.get_metadata_lines(name)) | |
1828 self.eagers = eagers | |
1829 return self.eagers | |
1830 | |
1831 def _index(self): | |
1832 try: | |
1833 return self._dirindex | |
1834 except AttributeError: | |
1835 ind = {} | |
1836 for path in self.zipinfo: | |
1837 parts = path.split(os.sep) | |
1838 while parts: | |
1839 parent = os.sep.join(parts[:-1]) | |
1840 if parent in ind: | |
1841 ind[parent].append(parts[-1]) | |
1842 break | |
1843 else: | |
1844 ind[parent] = [parts.pop()] | |
1845 self._dirindex = ind | |
1846 return ind | |
1847 | |
1848 def _has(self, fspath): | |
1849 zip_path = self._zipinfo_name(fspath) | |
1850 return zip_path in self.zipinfo or zip_path in self._index() | |
1851 | |
1852 def _isdir(self, fspath): | |
1853 return self._zipinfo_name(fspath) in self._index() | |
1854 | |
1855 def _listdir(self, fspath): | |
1856 return list(self._index().get(self._zipinfo_name(fspath), ())) | |
1857 | |
1858 def _eager_to_zip(self, resource_name): | |
1859 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) | |
1860 | |
1861 def _resource_to_zip(self, resource_name): | |
1862 return self._zipinfo_name(self._fn(self.module_path, resource_name)) | |
1863 | |
1864 | |
1865 register_loader_type(zipimport.zipimporter, ZipProvider) | |
1866 | |
1867 | |
1868 class FileMetadata(EmptyProvider): | |
1869 """Metadata handler for standalone PKG-INFO files | |
1870 | |
1871 Usage:: | |
1872 | |
1873 metadata = FileMetadata("/path/to/PKG-INFO") | |
1874 | |
1875 This provider rejects all data and metadata requests except for PKG-INFO, | |
1876 which is treated as existing, and will be the contents of the file at | |
1877 the provided location. | |
1878 """ | |
1879 | |
1880 def __init__(self, path): | |
1881 self.path = path | |
1882 | |
1883 def _get_metadata_path(self, name): | |
1884 return self.path | |
1885 | |
1886 def has_metadata(self, name): | |
1887 return name == 'PKG-INFO' and os.path.isfile(self.path) | |
1888 | |
1889 def get_metadata(self, name): | |
1890 if name != 'PKG-INFO': | |
1891 raise KeyError("No metadata except PKG-INFO is available") | |
1892 | |
1893 with io.open(self.path, encoding='utf-8', errors="replace") as f: | |
1894 metadata = f.read() | |
1895 self._warn_on_replacement(metadata) | |
1896 return metadata | |
1897 | |
1898 def _warn_on_replacement(self, metadata): | |
1899 replacement_char = '�' | |
1900 if replacement_char in metadata: | |
1901 tmpl = "{self.path} could not be properly decoded in UTF-8" | |
1902 msg = tmpl.format(**locals()) | |
1903 warnings.warn(msg) | |
1904 | |
1905 def get_metadata_lines(self, name): | |
1906 return yield_lines(self.get_metadata(name)) | |
1907 | |
1908 | |
1909 class PathMetadata(DefaultProvider): | |
1910 """Metadata provider for egg directories | |
1911 | |
1912 Usage:: | |
1913 | |
1914 # Development eggs: | |
1915 | |
1916 egg_info = "/path/to/PackageName.egg-info" | |
1917 base_dir = os.path.dirname(egg_info) | |
1918 metadata = PathMetadata(base_dir, egg_info) | |
1919 dist_name = os.path.splitext(os.path.basename(egg_info))[0] | |
1920 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) | |
1921 | |
1922 # Unpacked egg directories: | |
1923 | |
1924 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" | |
1925 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) | |
1926 dist = Distribution.from_filename(egg_path, metadata=metadata) | |
1927 """ | |
1928 | |
1929 def __init__(self, path, egg_info): | |
1930 self.module_path = path | |
1931 self.egg_info = egg_info | |
1932 | |
1933 | |
1934 class EggMetadata(ZipProvider): | |
1935 """Metadata provider for .egg files""" | |
1936 | |
1937 def __init__(self, importer): | |
1938 """Create a metadata provider from a zipimporter""" | |
1939 | |
1940 self.zip_pre = importer.archive + os.sep | |
1941 self.loader = importer | |
1942 if importer.prefix: | |
1943 self.module_path = os.path.join(importer.archive, importer.prefix) | |
1944 else: | |
1945 self.module_path = importer.archive | |
1946 self._setup_prefix() | |
1947 | |
1948 | |
1949 _declare_state('dict', _distribution_finders={}) | |
1950 | |
1951 | |
1952 def register_finder(importer_type, distribution_finder): | |
1953 """Register `distribution_finder` to find distributions in sys.path items | |
1954 | |
1955 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item | |
1956 handler), and `distribution_finder` is a callable that, passed a path | |
1957 item and the importer instance, yields ``Distribution`` instances found on | |
1958 that path item. See ``pkg_resources.find_on_path`` for an example.""" | |
1959 _distribution_finders[importer_type] = distribution_finder | |
1960 | |
1961 | |
1962 def find_distributions(path_item, only=False): | |
1963 """Yield distributions accessible via `path_item`""" | |
1964 importer = get_importer(path_item) | |
1965 finder = _find_adapter(_distribution_finders, importer) | |
1966 return finder(importer, path_item, only) | |
1967 | |
1968 | |
1969 def find_eggs_in_zip(importer, path_item, only=False): | |
1970 """ | |
1971 Find eggs in zip files; possibly multiple nested eggs. | |
1972 """ | |
1973 if importer.archive.endswith('.whl'): | |
1974 # wheels are not supported with this finder | |
1975 # they don't have PKG-INFO metadata, and won't ever contain eggs | |
1976 return | |
1977 metadata = EggMetadata(importer) | |
1978 if metadata.has_metadata('PKG-INFO'): | |
1979 yield Distribution.from_filename(path_item, metadata=metadata) | |
1980 if only: | |
1981 # don't yield nested distros | |
1982 return | |
1983 for subitem in metadata.resource_listdir(''): | |
1984 if _is_egg_path(subitem): | |
1985 subpath = os.path.join(path_item, subitem) | |
1986 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) | |
1987 for dist in dists: | |
1988 yield dist | |
1989 elif subitem.lower().endswith(('.dist-info', '.egg-info')): | |
1990 subpath = os.path.join(path_item, subitem) | |
1991 submeta = EggMetadata(zipimport.zipimporter(subpath)) | |
1992 submeta.egg_info = subpath | |
1993 yield Distribution.from_location(path_item, subitem, submeta) | |
1994 | |
1995 | |
1996 register_finder(zipimport.zipimporter, find_eggs_in_zip) | |
1997 | |
1998 | |
1999 def find_nothing(importer, path_item, only=False): | |
2000 return () | |
2001 | |
2002 | |
2003 register_finder(object, find_nothing) | |
2004 | |
2005 | |
2006 def _by_version_descending(names): | |
2007 """ | |
2008 Given a list of filenames, return them in descending order | |
2009 by version number. | |
2010 | |
2011 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' | |
2012 >>> _by_version_descending(names) | |
2013 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] | |
2014 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' | |
2015 >>> _by_version_descending(names) | |
2016 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] | |
2017 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' | |
2018 >>> _by_version_descending(names) | |
2019 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] | |
2020 """ | |
2021 def _by_version(name): | |
2022 """ | |
2023 Parse each component of the filename | |
2024 """ | |
2025 name, ext = os.path.splitext(name) | |
2026 parts = itertools.chain(name.split('-'), [ext]) | |
2027 return [packaging.version.parse(part) for part in parts] | |
2028 | |
2029 return sorted(names, key=_by_version, reverse=True) | |
2030 | |
2031 | |
2032 def find_on_path(importer, path_item, only=False): | |
2033 """Yield distributions accessible on a sys.path directory""" | |
2034 path_item = _normalize_cached(path_item) | |
2035 | |
2036 if _is_unpacked_egg(path_item): | |
2037 yield Distribution.from_filename( | |
2038 path_item, metadata=PathMetadata( | |
2039 path_item, os.path.join(path_item, 'EGG-INFO') | |
2040 ) | |
2041 ) | |
2042 return | |
2043 | |
2044 entries = ( | |
2045 os.path.join(path_item, child) | |
2046 for child in safe_listdir(path_item) | |
2047 ) | |
2048 | |
2049 # for performance, before sorting by version, | |
2050 # screen entries for only those that will yield | |
2051 # distributions | |
2052 filtered = ( | |
2053 entry | |
2054 for entry in entries | |
2055 if dist_factory(path_item, entry, only) | |
2056 ) | |
2057 | |
2058 # scan for .egg and .egg-info in directory | |
2059 path_item_entries = _by_version_descending(filtered) | |
2060 for entry in path_item_entries: | |
2061 fullpath = os.path.join(path_item, entry) | |
2062 factory = dist_factory(path_item, entry, only) | |
2063 for dist in factory(fullpath): | |
2064 yield dist | |
2065 | |
2066 | |
2067 def dist_factory(path_item, entry, only): | |
2068 """Return a dist_factory for the given entry.""" | |
2069 lower = entry.lower() | |
2070 is_egg_info = lower.endswith('.egg-info') | |
2071 is_dist_info = ( | |
2072 lower.endswith('.dist-info') and | |
2073 os.path.isdir(os.path.join(path_item, entry)) | |
2074 ) | |
2075 is_meta = is_egg_info or is_dist_info | |
2076 return ( | |
2077 distributions_from_metadata | |
2078 if is_meta else | |
2079 find_distributions | |
2080 if not only and _is_egg_path(entry) else | |
2081 resolve_egg_link | |
2082 if not only and lower.endswith('.egg-link') else | |
2083 NoDists() | |
2084 ) | |
2085 | |
2086 | |
2087 class NoDists: | |
2088 """ | |
2089 >>> bool(NoDists()) | |
2090 False | |
2091 | |
2092 >>> list(NoDists()('anything')) | |
2093 [] | |
2094 """ | |
2095 def __bool__(self): | |
2096 return False | |
2097 | |
2098 def __call__(self, fullpath): | |
2099 return iter(()) | |
2100 | |
2101 | |
2102 def safe_listdir(path): | |
2103 """ | |
2104 Attempt to list contents of path, but suppress some exceptions. | |
2105 """ | |
2106 try: | |
2107 return os.listdir(path) | |
2108 except (PermissionError, NotADirectoryError): | |
2109 pass | |
2110 except OSError as e: | |
2111 # Ignore the directory if does not exist, not a directory or | |
2112 # permission denied | |
2113 if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT): | |
2114 raise | |
2115 return () | |
2116 | |
2117 | |
2118 def distributions_from_metadata(path): | |
2119 root = os.path.dirname(path) | |
2120 if os.path.isdir(path): | |
2121 if len(os.listdir(path)) == 0: | |
2122 # empty metadata dir; skip | |
2123 return | |
2124 metadata = PathMetadata(root, path) | |
2125 else: | |
2126 metadata = FileMetadata(path) | |
2127 entry = os.path.basename(path) | |
2128 yield Distribution.from_location( | |
2129 root, entry, metadata, precedence=DEVELOP_DIST, | |
2130 ) | |
2131 | |
2132 | |
2133 def non_empty_lines(path): | |
2134 """ | |
2135 Yield non-empty lines from file at path | |
2136 """ | |
2137 with open(path) as f: | |
2138 for line in f: | |
2139 line = line.strip() | |
2140 if line: | |
2141 yield line | |
2142 | |
2143 | |
2144 def resolve_egg_link(path): | |
2145 """ | |
2146 Given a path to an .egg-link, resolve distributions | |
2147 present in the referenced path. | |
2148 """ | |
2149 referenced_paths = non_empty_lines(path) | |
2150 resolved_paths = ( | |
2151 os.path.join(os.path.dirname(path), ref) | |
2152 for ref in referenced_paths | |
2153 ) | |
2154 dist_groups = map(find_distributions, resolved_paths) | |
2155 return next(dist_groups, ()) | |
2156 | |
2157 | |
2158 register_finder(pkgutil.ImpImporter, find_on_path) | |
2159 | |
2160 if hasattr(importlib_machinery, 'FileFinder'): | |
2161 register_finder(importlib_machinery.FileFinder, find_on_path) | |
2162 | |
2163 _declare_state('dict', _namespace_handlers={}) | |
2164 _declare_state('dict', _namespace_packages={}) | |
2165 | |
2166 | |
2167 def register_namespace_handler(importer_type, namespace_handler): | |
2168 """Register `namespace_handler` to declare namespace packages | |
2169 | |
2170 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item | |
2171 handler), and `namespace_handler` is a callable like this:: | |
2172 | |
2173 def namespace_handler(importer, path_entry, moduleName, module): | |
2174 # return a path_entry to use for child packages | |
2175 | |
2176 Namespace handlers are only called if the importer object has already | |
2177 agreed that it can handle the relevant path item, and they should only | |
2178 return a subpath if the module __path__ does not already contain an | |
2179 equivalent subpath. For an example namespace handler, see | |
2180 ``pkg_resources.file_ns_handler``. | |
2181 """ | |
2182 _namespace_handlers[importer_type] = namespace_handler | |
2183 | |
2184 | |
2185 def _handle_ns(packageName, path_item): | |
2186 """Ensure that named package includes a subpath of path_item (if needed)""" | |
2187 | |
2188 importer = get_importer(path_item) | |
2189 if importer is None: | |
2190 return None | |
2191 | |
2192 # use find_spec (PEP 451) and fall-back to find_module (PEP 302) | |
2193 try: | |
2194 loader = importer.find_spec(packageName).loader | |
2195 except AttributeError: | |
2196 # capture warnings due to #1111 | |
2197 with warnings.catch_warnings(): | |
2198 warnings.simplefilter("ignore") | |
2199 loader = importer.find_module(packageName) | |
2200 | |
2201 if loader is None: | |
2202 return None | |
2203 module = sys.modules.get(packageName) | |
2204 if module is None: | |
2205 module = sys.modules[packageName] = types.ModuleType(packageName) | |
2206 module.__path__ = [] | |
2207 _set_parent_ns(packageName) | |
2208 elif not hasattr(module, '__path__'): | |
2209 raise TypeError("Not a package:", packageName) | |
2210 handler = _find_adapter(_namespace_handlers, importer) | |
2211 subpath = handler(importer, path_item, packageName, module) | |
2212 if subpath is not None: | |
2213 path = module.__path__ | |
2214 path.append(subpath) | |
2215 importlib.import_module(packageName) | |
2216 _rebuild_mod_path(path, packageName, module) | |
2217 return subpath | |
2218 | |
2219 | |
2220 def _rebuild_mod_path(orig_path, package_name, module): | |
2221 """ | |
2222 Rebuild module.__path__ ensuring that all entries are ordered | |
2223 corresponding to their sys.path order | |
2224 """ | |
2225 sys_path = [_normalize_cached(p) for p in sys.path] | |
2226 | |
2227 def safe_sys_path_index(entry): | |
2228 """ | |
2229 Workaround for #520 and #513. | |
2230 """ | |
2231 try: | |
2232 return sys_path.index(entry) | |
2233 except ValueError: | |
2234 return float('inf') | |
2235 | |
2236 def position_in_sys_path(path): | |
2237 """ | |
2238 Return the ordinal of the path based on its position in sys.path | |
2239 """ | |
2240 path_parts = path.split(os.sep) | |
2241 module_parts = package_name.count('.') + 1 | |
2242 parts = path_parts[:-module_parts] | |
2243 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) | |
2244 | |
2245 new_path = sorted(orig_path, key=position_in_sys_path) | |
2246 new_path = [_normalize_cached(p) for p in new_path] | |
2247 | |
2248 if isinstance(module.__path__, list): | |
2249 module.__path__[:] = new_path | |
2250 else: | |
2251 module.__path__ = new_path | |
2252 | |
2253 | |
2254 def declare_namespace(packageName): | |
2255 """Declare that package 'packageName' is a namespace package""" | |
2256 | |
2257 _imp.acquire_lock() | |
2258 try: | |
2259 if packageName in _namespace_packages: | |
2260 return | |
2261 | |
2262 path = sys.path | |
2263 parent, _, _ = packageName.rpartition('.') | |
2264 | |
2265 if parent: | |
2266 declare_namespace(parent) | |
2267 if parent not in _namespace_packages: | |
2268 __import__(parent) | |
2269 try: | |
2270 path = sys.modules[parent].__path__ | |
2271 except AttributeError as e: | |
2272 raise TypeError("Not a package:", parent) from e | |
2273 | |
2274 # Track what packages are namespaces, so when new path items are added, | |
2275 # they can be updated | |
2276 _namespace_packages.setdefault(parent or None, []).append(packageName) | |
2277 _namespace_packages.setdefault(packageName, []) | |
2278 | |
2279 for path_item in path: | |
2280 # Ensure all the parent's path items are reflected in the child, | |
2281 # if they apply | |
2282 _handle_ns(packageName, path_item) | |
2283 | |
2284 finally: | |
2285 _imp.release_lock() | |
2286 | |
2287 | |
2288 def fixup_namespace_packages(path_item, parent=None): | |
2289 """Ensure that previously-declared namespace packages include path_item""" | |
2290 _imp.acquire_lock() | |
2291 try: | |
2292 for package in _namespace_packages.get(parent, ()): | |
2293 subpath = _handle_ns(package, path_item) | |
2294 if subpath: | |
2295 fixup_namespace_packages(subpath, package) | |
2296 finally: | |
2297 _imp.release_lock() | |
2298 | |
2299 | |
2300 def file_ns_handler(importer, path_item, packageName, module): | |
2301 """Compute an ns-package subpath for a filesystem or zipfile importer""" | |
2302 | |
2303 subpath = os.path.join(path_item, packageName.split('.')[-1]) | |
2304 normalized = _normalize_cached(subpath) | |
2305 for item in module.__path__: | |
2306 if _normalize_cached(item) == normalized: | |
2307 break | |
2308 else: | |
2309 # Only return the path if it's not already there | |
2310 return subpath | |
2311 | |
2312 | |
2313 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) | |
2314 register_namespace_handler(zipimport.zipimporter, file_ns_handler) | |
2315 | |
2316 if hasattr(importlib_machinery, 'FileFinder'): | |
2317 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) | |
2318 | |
2319 | |
2320 def null_ns_handler(importer, path_item, packageName, module): | |
2321 return None | |
2322 | |
2323 | |
2324 register_namespace_handler(object, null_ns_handler) | |
2325 | |
2326 | |
2327 def normalize_path(filename): | |
2328 """Normalize a file/dir name for comparison purposes""" | |
2329 return os.path.normcase(os.path.realpath(os.path.normpath( | |
2330 _cygwin_patch(filename)))) | |
2331 | |
2332 | |
2333 def _cygwin_patch(filename): # pragma: nocover | |
2334 """ | |
2335 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains | |
2336 symlink components. Using | |
2337 os.path.abspath() works around this limitation. A fix in os.getcwd() | |
2338 would probably better, in Cygwin even more so, except | |
2339 that this seems to be by design... | |
2340 """ | |
2341 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename | |
2342 | |
2343 | |
2344 def _normalize_cached(filename, _cache={}): | |
2345 try: | |
2346 return _cache[filename] | |
2347 except KeyError: | |
2348 _cache[filename] = result = normalize_path(filename) | |
2349 return result | |
2350 | |
2351 | |
2352 def _is_egg_path(path): | |
2353 """ | |
2354 Determine if given path appears to be an egg. | |
2355 """ | |
2356 return _is_zip_egg(path) or _is_unpacked_egg(path) | |
2357 | |
2358 | |
2359 def _is_zip_egg(path): | |
2360 return ( | |
2361 path.lower().endswith('.egg') and | |
2362 os.path.isfile(path) and | |
2363 zipfile.is_zipfile(path) | |
2364 ) | |
2365 | |
2366 | |
2367 def _is_unpacked_egg(path): | |
2368 """ | |
2369 Determine if given path appears to be an unpacked egg. | |
2370 """ | |
2371 return ( | |
2372 path.lower().endswith('.egg') and | |
2373 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) | |
2374 ) | |
2375 | |
2376 | |
2377 def _set_parent_ns(packageName): | |
2378 parts = packageName.split('.') | |
2379 name = parts.pop() | |
2380 if parts: | |
2381 parent = '.'.join(parts) | |
2382 setattr(sys.modules[parent], name, sys.modules[packageName]) | |
2383 | |
2384 | |
2385 def yield_lines(strs): | |
2386 """Yield non-empty/non-comment lines of a string or sequence""" | |
2387 if isinstance(strs, str): | |
2388 for s in strs.splitlines(): | |
2389 s = s.strip() | |
2390 # skip blank lines/comments | |
2391 if s and not s.startswith('#'): | |
2392 yield s | |
2393 else: | |
2394 for ss in strs: | |
2395 for s in yield_lines(ss): | |
2396 yield s | |
2397 | |
2398 | |
2399 MODULE = re.compile(r"\w+(\.\w+)*$").match | |
2400 EGG_NAME = re.compile( | |
2401 r""" | |
2402 (?P<name>[^-]+) ( | |
2403 -(?P<ver>[^-]+) ( | |
2404 -py(?P<pyver>[^-]+) ( | |
2405 -(?P<plat>.+) | |
2406 )? | |
2407 )? | |
2408 )? | |
2409 """, | |
2410 re.VERBOSE | re.IGNORECASE, | |
2411 ).match | |
2412 | |
2413 | |
2414 class EntryPoint: | |
2415 """Object representing an advertised importable object""" | |
2416 | |
2417 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): | |
2418 if not MODULE(module_name): | |
2419 raise ValueError("Invalid module name", module_name) | |
2420 self.name = name | |
2421 self.module_name = module_name | |
2422 self.attrs = tuple(attrs) | |
2423 self.extras = tuple(extras) | |
2424 self.dist = dist | |
2425 | |
2426 def __str__(self): | |
2427 s = "%s = %s" % (self.name, self.module_name) | |
2428 if self.attrs: | |
2429 s += ':' + '.'.join(self.attrs) | |
2430 if self.extras: | |
2431 s += ' [%s]' % ','.join(self.extras) | |
2432 return s | |
2433 | |
2434 def __repr__(self): | |
2435 return "EntryPoint.parse(%r)" % str(self) | |
2436 | |
2437 def load(self, require=True, *args, **kwargs): | |
2438 """ | |
2439 Require packages for this EntryPoint, then resolve it. | |
2440 """ | |
2441 if not require or args or kwargs: | |
2442 warnings.warn( | |
2443 "Parameters to load are deprecated. Call .resolve and " | |
2444 ".require separately.", | |
2445 PkgResourcesDeprecationWarning, | |
2446 stacklevel=2, | |
2447 ) | |
2448 if require: | |
2449 self.require(*args, **kwargs) | |
2450 return self.resolve() | |
2451 | |
2452 def resolve(self): | |
2453 """ | |
2454 Resolve the entry point from its module and attrs. | |
2455 """ | |
2456 module = __import__(self.module_name, fromlist=['__name__'], level=0) | |
2457 try: | |
2458 return functools.reduce(getattr, self.attrs, module) | |
2459 except AttributeError as exc: | |
2460 raise ImportError(str(exc)) from exc | |
2461 | |
2462 def require(self, env=None, installer=None): | |
2463 if self.extras and not self.dist: | |
2464 raise UnknownExtra("Can't require() without a distribution", self) | |
2465 | |
2466 # Get the requirements for this entry point with all its extras and | |
2467 # then resolve them. We have to pass `extras` along when resolving so | |
2468 # that the working set knows what extras we want. Otherwise, for | |
2469 # dist-info distributions, the working set will assume that the | |
2470 # requirements for that extra are purely optional and skip over them. | |
2471 reqs = self.dist.requires(self.extras) | |
2472 items = working_set.resolve(reqs, env, installer, extras=self.extras) | |
2473 list(map(working_set.add, items)) | |
2474 | |
2475 pattern = re.compile( | |
2476 r'\s*' | |
2477 r'(?P<name>.+?)\s*' | |
2478 r'=\s*' | |
2479 r'(?P<module>[\w.]+)\s*' | |
2480 r'(:\s*(?P<attr>[\w.]+))?\s*' | |
2481 r'(?P<extras>\[.*\])?\s*$' | |
2482 ) | |
2483 | |
2484 @classmethod | |
2485 def parse(cls, src, dist=None): | |
2486 """Parse a single entry point from string `src` | |
2487 | |
2488 Entry point syntax follows the form:: | |
2489 | |
2490 name = some.module:some.attr [extra1, extra2] | |
2491 | |
2492 The entry name and module name are required, but the ``:attrs`` and | |
2493 ``[extras]`` parts are optional | |
2494 """ | |
2495 m = cls.pattern.match(src) | |
2496 if not m: | |
2497 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" | |
2498 raise ValueError(msg, src) | |
2499 res = m.groupdict() | |
2500 extras = cls._parse_extras(res['extras']) | |
2501 attrs = res['attr'].split('.') if res['attr'] else () | |
2502 return cls(res['name'], res['module'], attrs, extras, dist) | |
2503 | |
2504 @classmethod | |
2505 def _parse_extras(cls, extras_spec): | |
2506 if not extras_spec: | |
2507 return () | |
2508 req = Requirement.parse('x' + extras_spec) | |
2509 if req.specs: | |
2510 raise ValueError() | |
2511 return req.extras | |
2512 | |
2513 @classmethod | |
2514 def parse_group(cls, group, lines, dist=None): | |
2515 """Parse an entry point group""" | |
2516 if not MODULE(group): | |
2517 raise ValueError("Invalid group name", group) | |
2518 this = {} | |
2519 for line in yield_lines(lines): | |
2520 ep = cls.parse(line, dist) | |
2521 if ep.name in this: | |
2522 raise ValueError("Duplicate entry point", group, ep.name) | |
2523 this[ep.name] = ep | |
2524 return this | |
2525 | |
2526 @classmethod | |
2527 def parse_map(cls, data, dist=None): | |
2528 """Parse a map of entry point groups""" | |
2529 if isinstance(data, dict): | |
2530 data = data.items() | |
2531 else: | |
2532 data = split_sections(data) | |
2533 maps = {} | |
2534 for group, lines in data: | |
2535 if group is None: | |
2536 if not lines: | |
2537 continue | |
2538 raise ValueError("Entry points must be listed in groups") | |
2539 group = group.strip() | |
2540 if group in maps: | |
2541 raise ValueError("Duplicate group name", group) | |
2542 maps[group] = cls.parse_group(group, lines, dist) | |
2543 return maps | |
2544 | |
2545 | |
2546 def _version_from_file(lines): | |
2547 """ | |
2548 Given an iterable of lines from a Metadata file, return | |
2549 the value of the Version field, if present, or None otherwise. | |
2550 """ | |
2551 def is_version_line(line): | |
2552 return line.lower().startswith('version:') | |
2553 version_lines = filter(is_version_line, lines) | |
2554 line = next(iter(version_lines), '') | |
2555 _, _, value = line.partition(':') | |
2556 return safe_version(value.strip()) or None | |
2557 | |
2558 | |
2559 class Distribution: | |
2560 """Wrap an actual or potential sys.path entry w/metadata""" | |
2561 PKG_INFO = 'PKG-INFO' | |
2562 | |
2563 def __init__( | |
2564 self, location=None, metadata=None, project_name=None, | |
2565 version=None, py_version=PY_MAJOR, platform=None, | |
2566 precedence=EGG_DIST): | |
2567 self.project_name = safe_name(project_name or 'Unknown') | |
2568 if version is not None: | |
2569 self._version = safe_version(version) | |
2570 self.py_version = py_version | |
2571 self.platform = platform | |
2572 self.location = location | |
2573 self.precedence = precedence | |
2574 self._provider = metadata or empty_provider | |
2575 | |
2576 @classmethod | |
2577 def from_location(cls, location, basename, metadata=None, **kw): | |
2578 project_name, version, py_version, platform = [None] * 4 | |
2579 basename, ext = os.path.splitext(basename) | |
2580 if ext.lower() in _distributionImpl: | |
2581 cls = _distributionImpl[ext.lower()] | |
2582 | |
2583 match = EGG_NAME(basename) | |
2584 if match: | |
2585 project_name, version, py_version, platform = match.group( | |
2586 'name', 'ver', 'pyver', 'plat' | |
2587 ) | |
2588 return cls( | |
2589 location, metadata, project_name=project_name, version=version, | |
2590 py_version=py_version, platform=platform, **kw | |
2591 )._reload_version() | |
2592 | |
2593 def _reload_version(self): | |
2594 return self | |
2595 | |
2596 @property | |
2597 def hashcmp(self): | |
2598 return ( | |
2599 self.parsed_version, | |
2600 self.precedence, | |
2601 self.key, | |
2602 self.location, | |
2603 self.py_version or '', | |
2604 self.platform or '', | |
2605 ) | |
2606 | |
2607 def __hash__(self): | |
2608 return hash(self.hashcmp) | |
2609 | |
2610 def __lt__(self, other): | |
2611 return self.hashcmp < other.hashcmp | |
2612 | |
2613 def __le__(self, other): | |
2614 return self.hashcmp <= other.hashcmp | |
2615 | |
2616 def __gt__(self, other): | |
2617 return self.hashcmp > other.hashcmp | |
2618 | |
2619 def __ge__(self, other): | |
2620 return self.hashcmp >= other.hashcmp | |
2621 | |
2622 def __eq__(self, other): | |
2623 if not isinstance(other, self.__class__): | |
2624 # It's not a Distribution, so they are not equal | |
2625 return False | |
2626 return self.hashcmp == other.hashcmp | |
2627 | |
2628 def __ne__(self, other): | |
2629 return not self == other | |
2630 | |
2631 # These properties have to be lazy so that we don't have to load any | |
2632 # metadata until/unless it's actually needed. (i.e., some distributions | |
2633 # may not know their name or version without loading PKG-INFO) | |
2634 | |
2635 @property | |
2636 def key(self): | |
2637 try: | |
2638 return self._key | |
2639 except AttributeError: | |
2640 self._key = key = self.project_name.lower() | |
2641 return key | |
2642 | |
2643 @property | |
2644 def parsed_version(self): | |
2645 if not hasattr(self, "_parsed_version"): | |
2646 self._parsed_version = parse_version(self.version) | |
2647 | |
2648 return self._parsed_version | |
2649 | |
2650 def _warn_legacy_version(self): | |
2651 LV = packaging.version.LegacyVersion | |
2652 is_legacy = isinstance(self._parsed_version, LV) | |
2653 if not is_legacy: | |
2654 return | |
2655 | |
2656 # While an empty version is technically a legacy version and | |
2657 # is not a valid PEP 440 version, it's also unlikely to | |
2658 # actually come from someone and instead it is more likely that | |
2659 # it comes from setuptools attempting to parse a filename and | |
2660 # including it in the list. So for that we'll gate this warning | |
2661 # on if the version is anything at all or not. | |
2662 if not self.version: | |
2663 return | |
2664 | |
2665 tmpl = textwrap.dedent(""" | |
2666 '{project_name} ({version})' is being parsed as a legacy, | |
2667 non PEP 440, | |
2668 version. You may find odd behavior and sort order. | |
2669 In particular it will be sorted as less than 0.0. It | |
2670 is recommended to migrate to PEP 440 compatible | |
2671 versions. | |
2672 """).strip().replace('\n', ' ') | |
2673 | |
2674 warnings.warn(tmpl.format(**vars(self)), PEP440Warning) | |
2675 | |
2676 @property | |
2677 def version(self): | |
2678 try: | |
2679 return self._version | |
2680 except AttributeError as e: | |
2681 version = self._get_version() | |
2682 if version is None: | |
2683 path = self._get_metadata_path_for_display(self.PKG_INFO) | |
2684 msg = ( | |
2685 "Missing 'Version:' header and/or {} file at path: {}" | |
2686 ).format(self.PKG_INFO, path) | |
2687 raise ValueError(msg, self) from e | |
2688 | |
2689 return version | |
2690 | |
2691 @property | |
2692 def _dep_map(self): | |
2693 """ | |
2694 A map of extra to its list of (direct) requirements | |
2695 for this distribution, including the null extra. | |
2696 """ | |
2697 try: | |
2698 return self.__dep_map | |
2699 except AttributeError: | |
2700 self.__dep_map = self._filter_extras(self._build_dep_map()) | |
2701 return self.__dep_map | |
2702 | |
2703 @staticmethod | |
2704 def _filter_extras(dm): | |
2705 """ | |
2706 Given a mapping of extras to dependencies, strip off | |
2707 environment markers and filter out any dependencies | |
2708 not matching the markers. | |
2709 """ | |
2710 for extra in list(filter(None, dm)): | |
2711 new_extra = extra | |
2712 reqs = dm.pop(extra) | |
2713 new_extra, _, marker = extra.partition(':') | |
2714 fails_marker = marker and ( | |
2715 invalid_marker(marker) | |
2716 or not evaluate_marker(marker) | |
2717 ) | |
2718 if fails_marker: | |
2719 reqs = [] | |
2720 new_extra = safe_extra(new_extra) or None | |
2721 | |
2722 dm.setdefault(new_extra, []).extend(reqs) | |
2723 return dm | |
2724 | |
2725 def _build_dep_map(self): | |
2726 dm = {} | |
2727 for name in 'requires.txt', 'depends.txt': | |
2728 for extra, reqs in split_sections(self._get_metadata(name)): | |
2729 dm.setdefault(extra, []).extend(parse_requirements(reqs)) | |
2730 return dm | |
2731 | |
2732 def requires(self, extras=()): | |
2733 """List of Requirements needed for this distro if `extras` are used""" | |
2734 dm = self._dep_map | |
2735 deps = [] | |
2736 deps.extend(dm.get(None, ())) | |
2737 for ext in extras: | |
2738 try: | |
2739 deps.extend(dm[safe_extra(ext)]) | |
2740 except KeyError as e: | |
2741 raise UnknownExtra( | |
2742 "%s has no such extra feature %r" % (self, ext) | |
2743 ) from e | |
2744 return deps | |
2745 | |
2746 def _get_metadata_path_for_display(self, name): | |
2747 """ | |
2748 Return the path to the given metadata file, if available. | |
2749 """ | |
2750 try: | |
2751 # We need to access _get_metadata_path() on the provider object | |
2752 # directly rather than through this class's __getattr__() | |
2753 # since _get_metadata_path() is marked private. | |
2754 path = self._provider._get_metadata_path(name) | |
2755 | |
2756 # Handle exceptions e.g. in case the distribution's metadata | |
2757 # provider doesn't support _get_metadata_path(). | |
2758 except Exception: | |
2759 return '[could not detect]' | |
2760 | |
2761 return path | |
2762 | |
2763 def _get_metadata(self, name): | |
2764 if self.has_metadata(name): | |
2765 for line in self.get_metadata_lines(name): | |
2766 yield line | |
2767 | |
2768 def _get_version(self): | |
2769 lines = self._get_metadata(self.PKG_INFO) | |
2770 version = _version_from_file(lines) | |
2771 | |
2772 return version | |
2773 | |
2774 def activate(self, path=None, replace=False): | |
2775 """Ensure distribution is importable on `path` (default=sys.path)""" | |
2776 if path is None: | |
2777 path = sys.path | |
2778 self.insert_on(path, replace=replace) | |
2779 if path is sys.path: | |
2780 fixup_namespace_packages(self.location) | |
2781 for pkg in self._get_metadata('namespace_packages.txt'): | |
2782 if pkg in sys.modules: | |
2783 declare_namespace(pkg) | |
2784 | |
2785 def egg_name(self): | |
2786 """Return what this distribution's standard .egg filename should be""" | |
2787 filename = "%s-%s-py%s" % ( | |
2788 to_filename(self.project_name), to_filename(self.version), | |
2789 self.py_version or PY_MAJOR | |
2790 ) | |
2791 | |
2792 if self.platform: | |
2793 filename += '-' + self.platform | |
2794 return filename | |
2795 | |
2796 def __repr__(self): | |
2797 if self.location: | |
2798 return "%s (%s)" % (self, self.location) | |
2799 else: | |
2800 return str(self) | |
2801 | |
2802 def __str__(self): | |
2803 try: | |
2804 version = getattr(self, 'version', None) | |
2805 except ValueError: | |
2806 version = None | |
2807 version = version or "[unknown version]" | |
2808 return "%s %s" % (self.project_name, version) | |
2809 | |
2810 def __getattr__(self, attr): | |
2811 """Delegate all unrecognized public attributes to .metadata provider""" | |
2812 if attr.startswith('_'): | |
2813 raise AttributeError(attr) | |
2814 return getattr(self._provider, attr) | |
2815 | |
2816 def __dir__(self): | |
2817 return list( | |
2818 set(super(Distribution, self).__dir__()) | |
2819 | set( | |
2820 attr for attr in self._provider.__dir__() | |
2821 if not attr.startswith('_') | |
2822 ) | |
2823 ) | |
2824 | |
2825 @classmethod | |
2826 def from_filename(cls, filename, metadata=None, **kw): | |
2827 return cls.from_location( | |
2828 _normalize_cached(filename), os.path.basename(filename), metadata, | |
2829 **kw | |
2830 ) | |
2831 | |
2832 def as_requirement(self): | |
2833 """Return a ``Requirement`` that matches this distribution exactly""" | |
2834 if isinstance(self.parsed_version, packaging.version.Version): | |
2835 spec = "%s==%s" % (self.project_name, self.parsed_version) | |
2836 else: | |
2837 spec = "%s===%s" % (self.project_name, self.parsed_version) | |
2838 | |
2839 return Requirement.parse(spec) | |
2840 | |
2841 def load_entry_point(self, group, name): | |
2842 """Return the `name` entry point of `group` or raise ImportError""" | |
2843 ep = self.get_entry_info(group, name) | |
2844 if ep is None: | |
2845 raise ImportError("Entry point %r not found" % ((group, name),)) | |
2846 return ep.load() | |
2847 | |
2848 def get_entry_map(self, group=None): | |
2849 """Return the entry point map for `group`, or the full entry map""" | |
2850 try: | |
2851 ep_map = self._ep_map | |
2852 except AttributeError: | |
2853 ep_map = self._ep_map = EntryPoint.parse_map( | |
2854 self._get_metadata('entry_points.txt'), self | |
2855 ) | |
2856 if group is not None: | |
2857 return ep_map.get(group, {}) | |
2858 return ep_map | |
2859 | |
2860 def get_entry_info(self, group, name): | |
2861 """Return the EntryPoint object for `group`+`name`, or ``None``""" | |
2862 return self.get_entry_map(group).get(name) | |
2863 | |
2864 # FIXME: 'Distribution.insert_on' is too complex (13) | |
2865 def insert_on(self, path, loc=None, replace=False): # noqa: C901 | |
2866 """Ensure self.location is on path | |
2867 | |
2868 If replace=False (default): | |
2869 - If location is already in path anywhere, do nothing. | |
2870 - Else: | |
2871 - If it's an egg and its parent directory is on path, | |
2872 insert just ahead of the parent. | |
2873 - Else: add to the end of path. | |
2874 If replace=True: | |
2875 - If location is already on path anywhere (not eggs) | |
2876 or higher priority than its parent (eggs) | |
2877 do nothing. | |
2878 - Else: | |
2879 - If it's an egg and its parent directory is on path, | |
2880 insert just ahead of the parent, | |
2881 removing any lower-priority entries. | |
2882 - Else: add it to the front of path. | |
2883 """ | |
2884 | |
2885 loc = loc or self.location | |
2886 if not loc: | |
2887 return | |
2888 | |
2889 nloc = _normalize_cached(loc) | |
2890 bdir = os.path.dirname(nloc) | |
2891 npath = [(p and _normalize_cached(p) or p) for p in path] | |
2892 | |
2893 for p, item in enumerate(npath): | |
2894 if item == nloc: | |
2895 if replace: | |
2896 break | |
2897 else: | |
2898 # don't modify path (even removing duplicates) if | |
2899 # found and not replace | |
2900 return | |
2901 elif item == bdir and self.precedence == EGG_DIST: | |
2902 # if it's an .egg, give it precedence over its directory | |
2903 # UNLESS it's already been added to sys.path and replace=False | |
2904 if (not replace) and nloc in npath[p:]: | |
2905 return | |
2906 if path is sys.path: | |
2907 self.check_version_conflict() | |
2908 path.insert(p, loc) | |
2909 npath.insert(p, nloc) | |
2910 break | |
2911 else: | |
2912 if path is sys.path: | |
2913 self.check_version_conflict() | |
2914 if replace: | |
2915 path.insert(0, loc) | |
2916 else: | |
2917 path.append(loc) | |
2918 return | |
2919 | |
2920 # p is the spot where we found or inserted loc; now remove duplicates | |
2921 while True: | |
2922 try: | |
2923 np = npath.index(nloc, p + 1) | |
2924 except ValueError: | |
2925 break | |
2926 else: | |
2927 del npath[np], path[np] | |
2928 # ha! | |
2929 p = np | |
2930 | |
2931 return | |
2932 | |
2933 def check_version_conflict(self): | |
2934 if self.key == 'setuptools': | |
2935 # ignore the inevitable setuptools self-conflicts :( | |
2936 return | |
2937 | |
2938 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) | |
2939 loc = normalize_path(self.location) | |
2940 for modname in self._get_metadata('top_level.txt'): | |
2941 if (modname not in sys.modules or modname in nsp | |
2942 or modname in _namespace_packages): | |
2943 continue | |
2944 if modname in ('pkg_resources', 'setuptools', 'site'): | |
2945 continue | |
2946 fn = getattr(sys.modules[modname], '__file__', None) | |
2947 if fn and (normalize_path(fn).startswith(loc) or | |
2948 fn.startswith(self.location)): | |
2949 continue | |
2950 issue_warning( | |
2951 "Module %s was already imported from %s, but %s is being added" | |
2952 " to sys.path" % (modname, fn, self.location), | |
2953 ) | |
2954 | |
2955 def has_version(self): | |
2956 try: | |
2957 self.version | |
2958 except ValueError: | |
2959 issue_warning("Unbuilt egg for " + repr(self)) | |
2960 return False | |
2961 return True | |
2962 | |
2963 def clone(self, **kw): | |
2964 """Copy this distribution, substituting in any changed keyword args""" | |
2965 names = 'project_name version py_version platform location precedence' | |
2966 for attr in names.split(): | |
2967 kw.setdefault(attr, getattr(self, attr, None)) | |
2968 kw.setdefault('metadata', self._provider) | |
2969 return self.__class__(**kw) | |
2970 | |
2971 @property | |
2972 def extras(self): | |
2973 return [dep for dep in self._dep_map if dep] | |
2974 | |
2975 | |
2976 class EggInfoDistribution(Distribution): | |
2977 def _reload_version(self): | |
2978 """ | |
2979 Packages installed by distutils (e.g. numpy or scipy), | |
2980 which uses an old safe_version, and so | |
2981 their version numbers can get mangled when | |
2982 converted to filenames (e.g., 1.11.0.dev0+2329eae to | |
2983 1.11.0.dev0_2329eae). These distributions will not be | |
2984 parsed properly | |
2985 downstream by Distribution and safe_version, so | |
2986 take an extra step and try to get the version number from | |
2987 the metadata file itself instead of the filename. | |
2988 """ | |
2989 md_version = self._get_version() | |
2990 if md_version: | |
2991 self._version = md_version | |
2992 return self | |
2993 | |
2994 | |
2995 class DistInfoDistribution(Distribution): | |
2996 """ | |
2997 Wrap an actual or potential sys.path entry | |
2998 w/metadata, .dist-info style. | |
2999 """ | |
3000 PKG_INFO = 'METADATA' | |
3001 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") | |
3002 | |
3003 @property | |
3004 def _parsed_pkg_info(self): | |
3005 """Parse and cache metadata""" | |
3006 try: | |
3007 return self._pkg_info | |
3008 except AttributeError: | |
3009 metadata = self.get_metadata(self.PKG_INFO) | |
3010 self._pkg_info = email.parser.Parser().parsestr(metadata) | |
3011 return self._pkg_info | |
3012 | |
3013 @property | |
3014 def _dep_map(self): | |
3015 try: | |
3016 return self.__dep_map | |
3017 except AttributeError: | |
3018 self.__dep_map = self._compute_dependencies() | |
3019 return self.__dep_map | |
3020 | |
3021 def _compute_dependencies(self): | |
3022 """Recompute this distribution's dependencies.""" | |
3023 dm = self.__dep_map = {None: []} | |
3024 | |
3025 reqs = [] | |
3026 # Including any condition expressions | |
3027 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: | |
3028 reqs.extend(parse_requirements(req)) | |
3029 | |
3030 def reqs_for_extra(extra): | |
3031 for req in reqs: | |
3032 if not req.marker or req.marker.evaluate({'extra': extra}): | |
3033 yield req | |
3034 | |
3035 common = frozenset(reqs_for_extra(None)) | |
3036 dm[None].extend(common) | |
3037 | |
3038 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: | |
3039 s_extra = safe_extra(extra.strip()) | |
3040 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) | |
3041 | |
3042 return dm | |
3043 | |
3044 | |
3045 _distributionImpl = { | |
3046 '.egg': Distribution, | |
3047 '.egg-info': EggInfoDistribution, | |
3048 '.dist-info': DistInfoDistribution, | |
3049 } | |
3050 | |
3051 | |
3052 def issue_warning(*args, **kw): | |
3053 level = 1 | |
3054 g = globals() | |
3055 try: | |
3056 # find the first stack frame that is *not* code in | |
3057 # the pkg_resources module, to use for the warning | |
3058 while sys._getframe(level).f_globals is g: | |
3059 level += 1 | |
3060 except ValueError: | |
3061 pass | |
3062 warnings.warn(stacklevel=level + 1, *args, **kw) | |
3063 | |
3064 | |
3065 def parse_requirements(strs): | |
3066 """Yield ``Requirement`` objects for each specification in `strs` | |
3067 | |
3068 `strs` must be a string, or a (possibly-nested) iterable thereof. | |
3069 """ | |
3070 # create a steppable iterator, so we can handle \-continuations | |
3071 lines = iter(yield_lines(strs)) | |
3072 | |
3073 for line in lines: | |
3074 # Drop comments -- a hash without a space may be in a URL. | |
3075 if ' #' in line: | |
3076 line = line[:line.find(' #')] | |
3077 # If there is a line continuation, drop it, and append the next line. | |
3078 if line.endswith('\\'): | |
3079 line = line[:-2].strip() | |
3080 try: | |
3081 line += next(lines) | |
3082 except StopIteration: | |
3083 return | |
3084 yield Requirement(line) | |
3085 | |
3086 | |
3087 class RequirementParseError(packaging.requirements.InvalidRequirement): | |
3088 "Compatibility wrapper for InvalidRequirement" | |
3089 | |
3090 | |
3091 class Requirement(packaging.requirements.Requirement): | |
3092 def __init__(self, requirement_string): | |
3093 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" | |
3094 super(Requirement, self).__init__(requirement_string) | |
3095 self.unsafe_name = self.name | |
3096 project_name = safe_name(self.name) | |
3097 self.project_name, self.key = project_name, project_name.lower() | |
3098 self.specs = [ | |
3099 (spec.operator, spec.version) for spec in self.specifier] | |
3100 self.extras = tuple(map(safe_extra, self.extras)) | |
3101 self.hashCmp = ( | |
3102 self.key, | |
3103 self.url, | |
3104 self.specifier, | |
3105 frozenset(self.extras), | |
3106 str(self.marker) if self.marker else None, | |
3107 ) | |
3108 self.__hash = hash(self.hashCmp) | |
3109 | |
3110 def __eq__(self, other): | |
3111 return ( | |
3112 isinstance(other, Requirement) and | |
3113 self.hashCmp == other.hashCmp | |
3114 ) | |
3115 | |
3116 def __ne__(self, other): | |
3117 return not self == other | |
3118 | |
3119 def __contains__(self, item): | |
3120 if isinstance(item, Distribution): | |
3121 if item.key != self.key: | |
3122 return False | |
3123 | |
3124 item = item.version | |
3125 | |
3126 # Allow prereleases always in order to match the previous behavior of | |
3127 # this method. In the future this should be smarter and follow PEP 440 | |
3128 # more accurately. | |
3129 return self.specifier.contains(item, prereleases=True) | |
3130 | |
3131 def __hash__(self): | |
3132 return self.__hash | |
3133 | |
3134 def __repr__(self): | |
3135 return "Requirement.parse(%r)" % str(self) | |
3136 | |
3137 @staticmethod | |
3138 def parse(s): | |
3139 req, = parse_requirements(s) | |
3140 return req | |
3141 | |
3142 | |
3143 def _always_object(classes): | |
3144 """ | |
3145 Ensure object appears in the mro even | |
3146 for old-style classes. | |
3147 """ | |
3148 if object not in classes: | |
3149 return classes + (object,) | |
3150 return classes | |
3151 | |
3152 | |
3153 def _find_adapter(registry, ob): | |
3154 """Return an adapter factory for `ob` from `registry`""" | |
3155 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) | |
3156 for t in types: | |
3157 if t in registry: | |
3158 return registry[t] | |
3159 | |
3160 | |
3161 def ensure_directory(path): | |
3162 """Ensure that the parent directory of `path` exists""" | |
3163 dirname = os.path.dirname(path) | |
3164 os.makedirs(dirname, exist_ok=True) | |
3165 | |
3166 | |
3167 def _bypass_ensure_directory(path): | |
3168 """Sandbox-bypassing version of ensure_directory()""" | |
3169 if not WRITE_SUPPORT: | |
3170 raise IOError('"os.mkdir" not supported on this platform.') | |
3171 dirname, filename = split(path) | |
3172 if dirname and filename and not isdir(dirname): | |
3173 _bypass_ensure_directory(dirname) | |
3174 try: | |
3175 mkdir(dirname, 0o755) | |
3176 except FileExistsError: | |
3177 pass | |
3178 | |
3179 | |
3180 def split_sections(s): | |
3181 """Split a string or iterable thereof into (section, content) pairs | |
3182 | |
3183 Each ``section`` is a stripped version of the section header ("[section]") | |
3184 and each ``content`` is a list of stripped lines excluding blank lines and | |
3185 comment-only lines. If there are any such lines before the first section | |
3186 header, they're returned in a first ``section`` of ``None``. | |
3187 """ | |
3188 section = None | |
3189 content = [] | |
3190 for line in yield_lines(s): | |
3191 if line.startswith("["): | |
3192 if line.endswith("]"): | |
3193 if section or content: | |
3194 yield section, content | |
3195 section = line[1:-1].strip() | |
3196 content = [] | |
3197 else: | |
3198 raise ValueError("Invalid section heading", line) | |
3199 else: | |
3200 content.append(line) | |
3201 | |
3202 # wrap up last segment | |
3203 yield section, content | |
3204 | |
3205 | |
3206 def _mkstemp(*args, **kw): | |
3207 old_open = os.open | |
3208 try: | |
3209 # temporarily bypass sandboxing | |
3210 os.open = os_open | |
3211 return tempfile.mkstemp(*args, **kw) | |
3212 finally: | |
3213 # and then put it back | |
3214 os.open = old_open | |
3215 | |
3216 | |
3217 # Silence the PEP440Warning by default, so that end users don't get hit by it | |
3218 # randomly just because they use pkg_resources. We want to append the rule | |
3219 # because we want earlier uses of filterwarnings to take precedence over this | |
3220 # one. | |
3221 warnings.filterwarnings("ignore", category=PEP440Warning, append=True) | |
3222 | |
3223 | |
3224 # from jaraco.functools 1.3 | |
3225 def _call_aside(f, *args, **kwargs): | |
3226 f(*args, **kwargs) | |
3227 return f | |
3228 | |
3229 | |
3230 @_call_aside | |
3231 def _initialize(g=globals()): | |
3232 "Set up global resource manager (deliberately not state-saved)" | |
3233 manager = ResourceManager() | |
3234 g['_manager'] = manager | |
3235 g.update( | |
3236 (name, getattr(manager, name)) | |
3237 for name in dir(manager) | |
3238 if not name.startswith('_') | |
3239 ) | |
3240 | |
3241 | |
3242 @_call_aside | |
3243 def _initialize_master_working_set(): | |
3244 """ | |
3245 Prepare the master working set and make the ``require()`` | |
3246 API available. | |
3247 | |
3248 This function has explicit effects on the global state | |
3249 of pkg_resources. It is intended to be invoked once at | |
3250 the initialization of this module. | |
3251 | |
3252 Invocation by other packages is unsupported and done | |
3253 at their own risk. | |
3254 """ | |
3255 working_set = WorkingSet._build_master() | |
3256 _declare_state('object', working_set=working_set) | |
3257 | |
3258 require = working_set.require | |
3259 iter_entry_points = working_set.iter_entry_points | |
3260 add_activation_listener = working_set.subscribe | |
3261 run_script = working_set.run_script | |
3262 # backward compatibility | |
3263 run_main = run_script | |
3264 # Activate all distributions already on sys.path with replace=False and | |
3265 # ensure that all distributions added to the working set in the future | |
3266 # (e.g. by calling ``require()``) will get activated as well, | |
3267 # with higher priority (replace=True). | |
3268 tuple( | |
3269 dist.activate(replace=False) | |
3270 for dist in working_set | |
3271 ) | |
3272 add_activation_listener( | |
3273 lambda dist: dist.activate(replace=True), | |
3274 existing=False, | |
3275 ) | |
3276 working_set.entries = [] | |
3277 # match order | |
3278 list(map(working_set.add_entry, sys.path)) | |
3279 globals().update(locals()) | |
3280 | |
3281 | |
3282 class PkgResourcesDeprecationWarning(Warning): | |
3283 """ | |
3284 Base class for warning about deprecations in ``pkg_resources`` | |
3285 | |
3286 This class is not derived from ``DeprecationWarning``, and as such is | |
3287 visible by default. | |
3288 """ |