• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1"""
2Package resource API
3--------------------
4
5A resource is a logical file contained within a package, or a logical
6subdirectory thereof.  The package resource API expects resource names
7to have their path parts separated with ``/``, *not* whatever the local
8path separator is.  Do not use os.path operations to manipulate resource
9names being passed into the API.
10
11The package resource API is designed to work with normal filesystem packages,
12.egg files, and unpacked .egg files.  It can also work in a limited way with
13.zip files and with custom PEP 302 loaders that support the ``get_data()``
14method.
15"""
16
17import sys
18import os
19import io
20import time
21import re
22import types
23import zipfile
24import zipimport
25import warnings
26import stat
27import functools
28import pkgutil
29import operator
30import platform
31import collections
32import plistlib
33import email.parser
34import errno
35import tempfile
36import textwrap
37import itertools
38import inspect
39import ntpath
40import posixpath
41import importlib
42from pkgutil import get_importer
43
44try:
45    import _imp
46except ImportError:
47    # Python 3.2 compatibility
48    import imp as _imp
49
50try:
51    FileExistsError
52except NameError:
53    FileExistsError = OSError
54
55# capture these to bypass sandboxing
56from os import utime
57try:
58    from os import mkdir, rename, unlink
59    WRITE_SUPPORT = True
60except ImportError:
61    # no write support, probably under GAE
62    WRITE_SUPPORT = False
63
64from os import open as os_open
65from os.path import isdir, split
66
67try:
68    import importlib.machinery as importlib_machinery
69    # access attribute to force import under delayed import mechanisms.
70    importlib_machinery.__name__
71except ImportError:
72    importlib_machinery = None
73
74from pkg_resources.extern.jaraco.text import (
75    yield_lines,
76    drop_comment,
77    join_continuation,
78)
79
80from pkg_resources.extern import appdirs
81from pkg_resources.extern import packaging
82__import__('pkg_resources.extern.packaging.version')
83__import__('pkg_resources.extern.packaging.specifiers')
84__import__('pkg_resources.extern.packaging.requirements')
85__import__('pkg_resources.extern.packaging.markers')
86
87if sys.version_info < (3, 5):
88    raise RuntimeError("Python 3.5 or later is required")
89
90# declare some globals that will be defined later to
91# satisfy the linters.
92require = None
93working_set = None
94add_activation_listener = None
95resources_stream = None
96cleanup_resources = None
97resource_dir = None
98resource_stream = None
99set_extraction_path = None
100resource_isdir = None
101resource_string = None
102iter_entry_points = None
103resource_listdir = None
104resource_filename = None
105resource_exists = None
106_distribution_finders = None
107_namespace_handlers = None
108_namespace_packages = None
109
110
111class PEP440Warning(RuntimeWarning):
112    """
113    Used when there is an issue with a version or specifier not complying with
114    PEP 440.
115    """
116
117
118def parse_version(v):
119    try:
120        return packaging.version.Version(v)
121    except packaging.version.InvalidVersion:
122        warnings.warn(
123            f"{v} is an invalid version and will not be supported in "
124            "a future release",
125            PkgResourcesDeprecationWarning,
126        )
127        return packaging.version.LegacyVersion(v)
128
129
130_state_vars = {}
131
132
133def _declare_state(vartype, **kw):
134    globals().update(kw)
135    _state_vars.update(dict.fromkeys(kw, vartype))
136
137
138def __getstate__():
139    state = {}
140    g = globals()
141    for k, v in _state_vars.items():
142        state[k] = g['_sget_' + v](g[k])
143    return state
144
145
146def __setstate__(state):
147    g = globals()
148    for k, v in state.items():
149        g['_sset_' + _state_vars[k]](k, g[k], v)
150    return state
151
152
153def _sget_dict(val):
154    return val.copy()
155
156
157def _sset_dict(key, ob, state):
158    ob.clear()
159    ob.update(state)
160
161
162def _sget_object(val):
163    return val.__getstate__()
164
165
166def _sset_object(key, ob, state):
167    ob.__setstate__(state)
168
169
170_sget_none = _sset_none = lambda *args: None
171
172
173def get_supported_platform():
174    """Return this platform's maximum compatible version.
175
176    distutils.util.get_platform() normally reports the minimum version
177    of macOS that would be required to *use* extensions produced by
178    distutils.  But what we want when checking compatibility is to know the
179    version of macOS that we are *running*.  To allow usage of packages that
180    explicitly require a newer version of macOS, we must also know the
181    current version of the OS.
182
183    If this condition occurs for any other platform with a version in its
184    platform strings, this function should be extended accordingly.
185    """
186    plat = get_build_platform()
187    m = macosVersionString.match(plat)
188    if m is not None and sys.platform == "darwin":
189        try:
190            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
191        except ValueError:
192            # not macOS
193            pass
194    return plat
195
196
197__all__ = [
198    # Basic resource access and distribution/entry point discovery
199    'require', 'run_script', 'get_provider', 'get_distribution',
200    'load_entry_point', 'get_entry_map', 'get_entry_info',
201    'iter_entry_points',
202    'resource_string', 'resource_stream', 'resource_filename',
203    'resource_listdir', 'resource_exists', 'resource_isdir',
204
205    # Environmental control
206    'declare_namespace', 'working_set', 'add_activation_listener',
207    'find_distributions', 'set_extraction_path', 'cleanup_resources',
208    'get_default_cache',
209
210    # Primary implementation classes
211    'Environment', 'WorkingSet', 'ResourceManager',
212    'Distribution', 'Requirement', 'EntryPoint',
213
214    # Exceptions
215    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
216    'UnknownExtra', 'ExtractionError',
217
218    # Warnings
219    'PEP440Warning',
220
221    # Parsing functions and string utilities
222    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
223    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
224    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
225
226    # filesystem utilities
227    'ensure_directory', 'normalize_path',
228
229    # Distribution "precedence" constants
230    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
231
232    # "Provider" interfaces, implementations, and registration/lookup APIs
233    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
234    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
235    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
236    'register_finder', 'register_namespace_handler', 'register_loader_type',
237    'fixup_namespace_packages', 'get_importer',
238
239    # Warnings
240    'PkgResourcesDeprecationWarning',
241
242    # Deprecated/backward compatibility only
243    'run_main', 'AvailableDistributions',
244]
245
246
247class ResolutionError(Exception):
248    """Abstract base for dependency resolution errors"""
249
250    def __repr__(self):
251        return self.__class__.__name__ + repr(self.args)
252
253
254class VersionConflict(ResolutionError):
255    """
256    An already-installed version conflicts with the requested version.
257
258    Should be initialized with the installed Distribution and the requested
259    Requirement.
260    """
261
262    _template = "{self.dist} is installed but {self.req} is required"
263
264    @property
265    def dist(self):
266        return self.args[0]
267
268    @property
269    def req(self):
270        return self.args[1]
271
272    def report(self):
273        return self._template.format(**locals())
274
275    def with_context(self, required_by):
276        """
277        If required_by is non-empty, return a version of self that is a
278        ContextualVersionConflict.
279        """
280        if not required_by:
281            return self
282        args = self.args + (required_by,)
283        return ContextualVersionConflict(*args)
284
285
286class ContextualVersionConflict(VersionConflict):
287    """
288    A VersionConflict that accepts a third parameter, the set of the
289    requirements that required the installed Distribution.
290    """
291
292    _template = VersionConflict._template + ' by {self.required_by}'
293
294    @property
295    def required_by(self):
296        return self.args[2]
297
298
299class DistributionNotFound(ResolutionError):
300    """A requested distribution was not found"""
301
302    _template = ("The '{self.req}' distribution was not found "
303                 "and is required by {self.requirers_str}")
304
305    @property
306    def req(self):
307        return self.args[0]
308
309    @property
310    def requirers(self):
311        return self.args[1]
312
313    @property
314    def requirers_str(self):
315        if not self.requirers:
316            return 'the application'
317        return ', '.join(self.requirers)
318
319    def report(self):
320        return self._template.format(**locals())
321
322    def __str__(self):
323        return self.report()
324
325
326class UnknownExtra(ResolutionError):
327    """Distribution doesn't have an "extra feature" of the given name"""
328
329
330_provider_factories = {}
331
332PY_MAJOR = '{}.{}'.format(*sys.version_info)
333EGG_DIST = 3
334BINARY_DIST = 2
335SOURCE_DIST = 1
336CHECKOUT_DIST = 0
337DEVELOP_DIST = -1
338
339
340def register_loader_type(loader_type, provider_factory):
341    """Register `provider_factory` to make providers for `loader_type`
342
343    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
344    and `provider_factory` is a function that, passed a *module* object,
345    returns an ``IResourceProvider`` for that module.
346    """
347    _provider_factories[loader_type] = provider_factory
348
349
350def get_provider(moduleOrReq):
351    """Return an IResourceProvider for the named module or requirement"""
352    if isinstance(moduleOrReq, Requirement):
353        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
354    try:
355        module = sys.modules[moduleOrReq]
356    except KeyError:
357        __import__(moduleOrReq)
358        module = sys.modules[moduleOrReq]
359    loader = getattr(module, '__loader__', None)
360    return _find_adapter(_provider_factories, loader)(module)
361
362
363def _macos_vers(_cache=[]):
364    if not _cache:
365        version = platform.mac_ver()[0]
366        # fallback for MacPorts
367        if version == '':
368            plist = '/System/Library/CoreServices/SystemVersion.plist'
369            if os.path.exists(plist):
370                if hasattr(plistlib, 'readPlist'):
371                    plist_content = plistlib.readPlist(plist)
372                    if 'ProductVersion' in plist_content:
373                        version = plist_content['ProductVersion']
374
375        _cache.append(version.split('.'))
376    return _cache[0]
377
378
379def _macos_arch(machine):
380    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
381
382
383def get_build_platform():
384    """Return this platform's string for platform-specific distributions
385
386    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
387    needs some hacks for Linux and macOS.
388    """
389    from sysconfig import get_platform
390
391    plat = get_platform()
392    if sys.platform == "darwin" and not plat.startswith('macosx-'):
393        try:
394            version = _macos_vers()
395            machine = os.uname()[4].replace(" ", "_")
396            return "macosx-%d.%d-%s" % (
397                int(version[0]), int(version[1]),
398                _macos_arch(machine),
399            )
400        except ValueError:
401            # if someone is running a non-Mac darwin system, this will fall
402            # through to the default implementation
403            pass
404    return plat
405
406
407macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
408darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
409# XXX backward compat
410get_platform = get_build_platform
411
412
413def compatible_platforms(provided, required):
414    """Can code for the `provided` platform run on the `required` platform?
415
416    Returns true if either platform is ``None``, or the platforms are equal.
417
418    XXX Needs compatibility checks for Linux and other unixy OSes.
419    """
420    if provided is None or required is None or provided == required:
421        # easy case
422        return True
423
424    # macOS special cases
425    reqMac = macosVersionString.match(required)
426    if reqMac:
427        provMac = macosVersionString.match(provided)
428
429        # is this a Mac package?
430        if not provMac:
431            # this is backwards compatibility for packages built before
432            # setuptools 0.6. All packages built after this point will
433            # use the new macOS designation.
434            provDarwin = darwinVersionString.match(provided)
435            if provDarwin:
436                dversion = int(provDarwin.group(1))
437                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
438                if dversion == 7 and macosversion >= "10.3" or \
439                        dversion == 8 and macosversion >= "10.4":
440                    return True
441            # egg isn't macOS or legacy darwin
442            return False
443
444        # are they the same major version and machine type?
445        if provMac.group(1) != reqMac.group(1) or \
446                provMac.group(3) != reqMac.group(3):
447            return False
448
449        # is the required OS major update >= the provided one?
450        if int(provMac.group(2)) > int(reqMac.group(2)):
451            return False
452
453        return True
454
455    # XXX Linux and other platforms' special cases should go here
456    return False
457
458
459def run_script(dist_spec, script_name):
460    """Locate distribution `dist_spec` and run its `script_name` script"""
461    ns = sys._getframe(1).f_globals
462    name = ns['__name__']
463    ns.clear()
464    ns['__name__'] = name
465    require(dist_spec)[0].run_script(script_name, ns)
466
467
468# backward compatibility
469run_main = run_script
470
471
472def get_distribution(dist):
473    """Return a current distribution object for a Requirement or string"""
474    if isinstance(dist, str):
475        dist = Requirement.parse(dist)
476    if isinstance(dist, Requirement):
477        dist = get_provider(dist)
478    if not isinstance(dist, Distribution):
479        raise TypeError("Expected string, Requirement, or Distribution", dist)
480    return dist
481
482
483def load_entry_point(dist, group, name):
484    """Return `name` entry point of `group` for `dist` or raise ImportError"""
485    return get_distribution(dist).load_entry_point(group, name)
486
487
488def get_entry_map(dist, group=None):
489    """Return the entry point map for `group`, or the full entry map"""
490    return get_distribution(dist).get_entry_map(group)
491
492
493def get_entry_info(dist, group, name):
494    """Return the EntryPoint object for `group`+`name`, or ``None``"""
495    return get_distribution(dist).get_entry_info(group, name)
496
497
498class IMetadataProvider:
499    def has_metadata(name):
500        """Does the package's distribution contain the named metadata?"""
501
502    def get_metadata(name):
503        """The named metadata resource as a string"""
504
505    def get_metadata_lines(name):
506        """Yield named metadata resource as list of non-blank non-comment lines
507
508       Leading and trailing whitespace is stripped from each line, and lines
509       with ``#`` as the first non-blank character are omitted."""
510
511    def metadata_isdir(name):
512        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
513
514    def metadata_listdir(name):
515        """List of metadata names in the directory (like ``os.listdir()``)"""
516
517    def run_script(script_name, namespace):
518        """Execute the named script in the supplied namespace dictionary"""
519
520
521class IResourceProvider(IMetadataProvider):
522    """An object that provides access to package resources"""
523
524    def get_resource_filename(manager, resource_name):
525        """Return a true filesystem path for `resource_name`
526
527        `manager` must be an ``IResourceManager``"""
528
529    def get_resource_stream(manager, resource_name):
530        """Return a readable file-like object for `resource_name`
531
532        `manager` must be an ``IResourceManager``"""
533
534    def get_resource_string(manager, resource_name):
535        """Return a string containing the contents of `resource_name`
536
537        `manager` must be an ``IResourceManager``"""
538
539    def has_resource(resource_name):
540        """Does the package contain the named resource?"""
541
542    def resource_isdir(resource_name):
543        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
544
545    def resource_listdir(resource_name):
546        """List of resource names in the directory (like ``os.listdir()``)"""
547
548
549class WorkingSet:
550    """A collection of active distributions on sys.path (or a similar list)"""
551
552    def __init__(self, entries=None):
553        """Create working set from list of path entries (default=sys.path)"""
554        self.entries = []
555        self.entry_keys = {}
556        self.by_key = {}
557        self.callbacks = []
558
559        if entries is None:
560            entries = sys.path
561
562        for entry in entries:
563            self.add_entry(entry)
564
565    @classmethod
566    def _build_master(cls):
567        """
568        Prepare the master working set.
569        """
570        ws = cls()
571        try:
572            from __main__ import __requires__
573        except ImportError:
574            # The main program does not list any requirements
575            return ws
576
577        # ensure the requirements are met
578        try:
579            ws.require(__requires__)
580        except VersionConflict:
581            return cls._build_from_requirements(__requires__)
582
583        return ws
584
585    @classmethod
586    def _build_from_requirements(cls, req_spec):
587        """
588        Build a working set from a requirement spec. Rewrites sys.path.
589        """
590        # try it without defaults already on sys.path
591        # by starting with an empty path
592        ws = cls([])
593        reqs = parse_requirements(req_spec)
594        dists = ws.resolve(reqs, Environment())
595        for dist in dists:
596            ws.add(dist)
597
598        # add any missing entries from sys.path
599        for entry in sys.path:
600            if entry not in ws.entries:
601                ws.add_entry(entry)
602
603        # then copy back to sys.path
604        sys.path[:] = ws.entries
605        return ws
606
607    def add_entry(self, entry):
608        """Add a path item to ``.entries``, finding any distributions on it
609
610        ``find_distributions(entry, True)`` is used to find distributions
611        corresponding to the path entry, and they are added.  `entry` is
612        always appended to ``.entries``, even if it is already present.
613        (This is because ``sys.path`` can contain the same value more than
614        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
615        equal ``sys.path``.)
616        """
617        self.entry_keys.setdefault(entry, [])
618        self.entries.append(entry)
619        for dist in find_distributions(entry, True):
620            self.add(dist, entry, False)
621
622    def __contains__(self, dist):
623        """True if `dist` is the active distribution for its project"""
624        return self.by_key.get(dist.key) == dist
625
626    def find(self, req):
627        """Find a distribution matching requirement `req`
628
629        If there is an active distribution for the requested project, this
630        returns it as long as it meets the version requirement specified by
631        `req`.  But, if there is an active distribution for the project and it
632        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
633        If there is no active distribution for the requested project, ``None``
634        is returned.
635        """
636        dist = self.by_key.get(req.key)
637        if dist is not None and dist not in req:
638            # XXX add more info
639            raise VersionConflict(dist, req)
640        return dist
641
642    def iter_entry_points(self, group, name=None):
643        """Yield entry point objects from `group` matching `name`
644
645        If `name` is None, yields all entry points in `group` from all
646        distributions in the working set, otherwise only ones matching
647        both `group` and `name` are yielded (in distribution order).
648        """
649        return (
650            entry
651            for dist in self
652            for entry in dist.get_entry_map(group).values()
653            if name is None or name == entry.name
654        )
655
656    def run_script(self, requires, script_name):
657        """Locate distribution for `requires` and run `script_name` script"""
658        ns = sys._getframe(1).f_globals
659        name = ns['__name__']
660        ns.clear()
661        ns['__name__'] = name
662        self.require(requires)[0].run_script(script_name, ns)
663
664    def __iter__(self):
665        """Yield distributions for non-duplicate projects in the working set
666
667        The yield order is the order in which the items' path entries were
668        added to the working set.
669        """
670        seen = {}
671        for item in self.entries:
672            if item not in self.entry_keys:
673                # workaround a cache issue
674                continue
675
676            for key in self.entry_keys[item]:
677                if key not in seen:
678                    seen[key] = 1
679                    yield self.by_key[key]
680
681    def add(self, dist, entry=None, insert=True, replace=False):
682        """Add `dist` to working set, associated with `entry`
683
684        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
685        On exit from this routine, `entry` is added to the end of the working
686        set's ``.entries`` (if it wasn't already present).
687
688        `dist` is only added to the working set if it's for a project that
689        doesn't already have a distribution in the set, unless `replace=True`.
690        If it's added, any callbacks registered with the ``subscribe()`` method
691        will be called.
692        """
693        if insert:
694            dist.insert_on(self.entries, entry, replace=replace)
695
696        if entry is None:
697            entry = dist.location
698        keys = self.entry_keys.setdefault(entry, [])
699        keys2 = self.entry_keys.setdefault(dist.location, [])
700        if not replace and dist.key in self.by_key:
701            # ignore hidden distros
702            return
703
704        self.by_key[dist.key] = dist
705        if dist.key not in keys:
706            keys.append(dist.key)
707        if dist.key not in keys2:
708            keys2.append(dist.key)
709        self._added_new(dist)
710
711    # FIXME: 'WorkingSet.resolve' is too complex (11)
712    def resolve(self, requirements, env=None, installer=None,  # noqa: C901
713                replace_conflicting=False, extras=None):
714        """List all distributions needed to (recursively) meet `requirements`
715
716        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
717        if supplied, should be an ``Environment`` instance.  If
718        not supplied, it defaults to all distributions available within any
719        entry or distribution in the working set.  `installer`, if supplied,
720        will be invoked with each requirement that cannot be met by an
721        already-installed distribution; it should return a ``Distribution`` or
722        ``None``.
723
724        Unless `replace_conflicting=True`, raises a VersionConflict exception
725        if
726        any requirements are found on the path that have the correct name but
727        the wrong version.  Otherwise, if an `installer` is supplied it will be
728        invoked to obtain the correct version of the requirement and activate
729        it.
730
731        `extras` is a list of the extras to be used with these requirements.
732        This is important because extra requirements may look like `my_req;
733        extra = "my_extra"`, which would otherwise be interpreted as a purely
734        optional requirement.  Instead, we want to be able to assert that these
735        requirements are truly required.
736        """
737
738        # set up the stack
739        requirements = list(requirements)[::-1]
740        # set of processed requirements
741        processed = {}
742        # key -> dist
743        best = {}
744        to_activate = []
745
746        req_extras = _ReqExtras()
747
748        # Mapping of requirement to set of distributions that required it;
749        # useful for reporting info about conflicts.
750        required_by = collections.defaultdict(set)
751
752        while requirements:
753            # process dependencies breadth-first
754            req = requirements.pop(0)
755            if req in processed:
756                # Ignore cyclic or redundant dependencies
757                continue
758
759            if not req_extras.markers_pass(req, extras):
760                continue
761
762            dist = best.get(req.key)
763            if dist is None:
764                # Find the best distribution and add it to the map
765                dist = self.by_key.get(req.key)
766                if dist is None or (dist not in req and replace_conflicting):
767                    ws = self
768                    if env is None:
769                        if dist is None:
770                            env = Environment(self.entries)
771                        else:
772                            # Use an empty environment and workingset to avoid
773                            # any further conflicts with the conflicting
774                            # distribution
775                            env = Environment([])
776                            ws = WorkingSet([])
777                    dist = best[req.key] = env.best_match(
778                        req, ws, installer,
779                        replace_conflicting=replace_conflicting
780                    )
781                    if dist is None:
782                        requirers = required_by.get(req, None)
783                        raise DistributionNotFound(req, requirers)
784                to_activate.append(dist)
785            if dist not in req:
786                # Oops, the "best" so far conflicts with a dependency
787                dependent_req = required_by[req]
788                raise VersionConflict(dist, req).with_context(dependent_req)
789
790            # push the new requirements onto the stack
791            new_requirements = dist.requires(req.extras)[::-1]
792            requirements.extend(new_requirements)
793
794            # Register the new requirements needed by req
795            for new_requirement in new_requirements:
796                required_by[new_requirement].add(req.project_name)
797                req_extras[new_requirement] = req.extras
798
799            processed[req] = True
800
801        # return list of distros to activate
802        return to_activate
803
804    def find_plugins(
805            self, plugin_env, full_env=None, installer=None, fallback=True):
806        """Find all activatable distributions in `plugin_env`
807
808        Example usage::
809
810            distributions, errors = working_set.find_plugins(
811                Environment(plugin_dirlist)
812            )
813            # add plugins+libs to sys.path
814            map(working_set.add, distributions)
815            # display errors
816            print('Could not load', errors)
817
818        The `plugin_env` should be an ``Environment`` instance that contains
819        only distributions that are in the project's "plugin directory" or
820        directories. The `full_env`, if supplied, should be an ``Environment``
821        contains all currently-available distributions.  If `full_env` is not
822        supplied, one is created automatically from the ``WorkingSet`` this
823        method is called on, which will typically mean that every directory on
824        ``sys.path`` will be scanned for distributions.
825
826        `installer` is a standard installer callback as used by the
827        ``resolve()`` method. The `fallback` flag indicates whether we should
828        attempt to resolve older versions of a plugin if the newest version
829        cannot be resolved.
830
831        This method returns a 2-tuple: (`distributions`, `error_info`), where
832        `distributions` is a list of the distributions found in `plugin_env`
833        that were loadable, along with any other distributions that are needed
834        to resolve their dependencies.  `error_info` is a dictionary mapping
835        unloadable plugin distributions to an exception instance describing the
836        error that occurred. Usually this will be a ``DistributionNotFound`` or
837        ``VersionConflict`` instance.
838        """
839
840        plugin_projects = list(plugin_env)
841        # scan project names in alphabetic order
842        plugin_projects.sort()
843
844        error_info = {}
845        distributions = {}
846
847        if full_env is None:
848            env = Environment(self.entries)
849            env += plugin_env
850        else:
851            env = full_env + plugin_env
852
853        shadow_set = self.__class__([])
854        # put all our entries in shadow_set
855        list(map(shadow_set.add, self))
856
857        for project_name in plugin_projects:
858
859            for dist in plugin_env[project_name]:
860
861                req = [dist.as_requirement()]
862
863                try:
864                    resolvees = shadow_set.resolve(req, env, installer)
865
866                except ResolutionError as v:
867                    # save error info
868                    error_info[dist] = v
869                    if fallback:
870                        # try the next older version of project
871                        continue
872                    else:
873                        # give up on this project, keep going
874                        break
875
876                else:
877                    list(map(shadow_set.add, resolvees))
878                    distributions.update(dict.fromkeys(resolvees))
879
880                    # success, no need to try any more versions of this project
881                    break
882
883        distributions = list(distributions)
884        distributions.sort()
885
886        return distributions, error_info
887
888    def require(self, *requirements):
889        """Ensure that distributions matching `requirements` are activated
890
891        `requirements` must be a string or a (possibly-nested) sequence
892        thereof, specifying the distributions and versions required.  The
893        return value is a sequence of the distributions that needed to be
894        activated to fulfill the requirements; all relevant distributions are
895        included, even if they were already activated in this working set.
896        """
897        needed = self.resolve(parse_requirements(requirements))
898
899        for dist in needed:
900            self.add(dist)
901
902        return needed
903
904    def subscribe(self, callback, existing=True):
905        """Invoke `callback` for all distributions
906
907        If `existing=True` (default),
908        call on all existing ones, as well.
909        """
910        if callback in self.callbacks:
911            return
912        self.callbacks.append(callback)
913        if not existing:
914            return
915        for dist in self:
916            callback(dist)
917
918    def _added_new(self, dist):
919        for callback in self.callbacks:
920            callback(dist)
921
922    def __getstate__(self):
923        return (
924            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
925            self.callbacks[:]
926        )
927
928    def __setstate__(self, e_k_b_c):
929        entries, keys, by_key, callbacks = e_k_b_c
930        self.entries = entries[:]
931        self.entry_keys = keys.copy()
932        self.by_key = by_key.copy()
933        self.callbacks = callbacks[:]
934
935
936class _ReqExtras(dict):
937    """
938    Map each requirement to the extras that demanded it.
939    """
940
941    def markers_pass(self, req, extras=None):
942        """
943        Evaluate markers for req against each extra that
944        demanded it.
945
946        Return False if the req has a marker and fails
947        evaluation. Otherwise, return True.
948        """
949        extra_evals = (
950            req.marker.evaluate({'extra': extra})
951            for extra in self.get(req, ()) + (extras or (None,))
952        )
953        return not req.marker or any(extra_evals)
954
955
956class Environment:
957    """Searchable snapshot of distributions on a search path"""
958
959    def __init__(
960            self, search_path=None, platform=get_supported_platform(),
961            python=PY_MAJOR):
962        """Snapshot distributions available on a search path
963
964        Any distributions found on `search_path` are added to the environment.
965        `search_path` should be a sequence of ``sys.path`` items.  If not
966        supplied, ``sys.path`` is used.
967
968        `platform` is an optional string specifying the name of the platform
969        that platform-specific distributions must be compatible with.  If
970        unspecified, it defaults to the current platform.  `python` is an
971        optional string naming the desired version of Python (e.g. ``'3.6'``);
972        it defaults to the current version.
973
974        You may explicitly set `platform` (and/or `python`) to ``None`` if you
975        wish to map *all* distributions, not just those compatible with the
976        running platform or Python version.
977        """
978        self._distmap = {}
979        self.platform = platform
980        self.python = python
981        self.scan(search_path)
982
983    def can_add(self, dist):
984        """Is distribution `dist` acceptable for this environment?
985
986        The distribution must match the platform and python version
987        requirements specified when this environment was created, or False
988        is returned.
989        """
990        py_compat = (
991            self.python is None
992            or dist.py_version is None
993            or dist.py_version == self.python
994        )
995        return py_compat and compatible_platforms(dist.platform, self.platform)
996
997    def remove(self, dist):
998        """Remove `dist` from the environment"""
999        self._distmap[dist.key].remove(dist)
1000
1001    def scan(self, search_path=None):
1002        """Scan `search_path` for distributions usable in this environment
1003
1004        Any distributions found are added to the environment.
1005        `search_path` should be a sequence of ``sys.path`` items.  If not
1006        supplied, ``sys.path`` is used.  Only distributions conforming to
1007        the platform/python version defined at initialization are added.
1008        """
1009        if search_path is None:
1010            search_path = sys.path
1011
1012        for item in search_path:
1013            for dist in find_distributions(item):
1014                self.add(dist)
1015
1016    def __getitem__(self, project_name):
1017        """Return a newest-to-oldest list of distributions for `project_name`
1018
1019        Uses case-insensitive `project_name` comparison, assuming all the
1020        project's distributions use their project's name converted to all
1021        lowercase as their key.
1022
1023        """
1024        distribution_key = project_name.lower()
1025        return self._distmap.get(distribution_key, [])
1026
1027    def add(self, dist):
1028        """Add `dist` if we ``can_add()`` it and it has not already been added
1029        """
1030        if self.can_add(dist) and dist.has_version():
1031            dists = self._distmap.setdefault(dist.key, [])
1032            if dist not in dists:
1033                dists.append(dist)
1034                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1035
1036    def best_match(
1037            self, req, working_set, installer=None, replace_conflicting=False):
1038        """Find distribution best matching `req` and usable on `working_set`
1039
1040        This calls the ``find(req)`` method of the `working_set` to see if a
1041        suitable distribution is already active.  (This may raise
1042        ``VersionConflict`` if an unsuitable version of the project is already
1043        active in the specified `working_set`.)  If a suitable distribution
1044        isn't active, this method returns the newest distribution in the
1045        environment that meets the ``Requirement`` in `req`.  If no suitable
1046        distribution is found, and `installer` is supplied, then the result of
1047        calling the environment's ``obtain(req, installer)`` method will be
1048        returned.
1049        """
1050        try:
1051            dist = working_set.find(req)
1052        except VersionConflict:
1053            if not replace_conflicting:
1054                raise
1055            dist = None
1056        if dist is not None:
1057            return dist
1058        for dist in self[req.key]:
1059            if dist in req:
1060                return dist
1061        # try to download/install
1062        return self.obtain(req, installer)
1063
1064    def obtain(self, requirement, installer=None):
1065        """Obtain a distribution matching `requirement` (e.g. via download)
1066
1067        Obtain a distro that matches requirement (e.g. via download).  In the
1068        base ``Environment`` class, this routine just returns
1069        ``installer(requirement)``, unless `installer` is None, in which case
1070        None is returned instead.  This method is a hook that allows subclasses
1071        to attempt other ways of obtaining a distribution before falling back
1072        to the `installer` argument."""
1073        if installer is not None:
1074            return installer(requirement)
1075
1076    def __iter__(self):
1077        """Yield the unique project names of the available distributions"""
1078        for key in self._distmap.keys():
1079            if self[key]:
1080                yield key
1081
1082    def __iadd__(self, other):
1083        """In-place addition of a distribution or environment"""
1084        if isinstance(other, Distribution):
1085            self.add(other)
1086        elif isinstance(other, Environment):
1087            for project in other:
1088                for dist in other[project]:
1089                    self.add(dist)
1090        else:
1091            raise TypeError("Can't add %r to environment" % (other,))
1092        return self
1093
1094    def __add__(self, other):
1095        """Add an environment or distribution to an environment"""
1096        new = self.__class__([], platform=None, python=None)
1097        for env in self, other:
1098            new += env
1099        return new
1100
1101
1102# XXX backward compatibility
1103AvailableDistributions = Environment
1104
1105
1106class ExtractionError(RuntimeError):
1107    """An error occurred extracting a resource
1108
1109    The following attributes are available from instances of this exception:
1110
1111    manager
1112        The resource manager that raised this exception
1113
1114    cache_path
1115        The base directory for resource extraction
1116
1117    original_error
1118        The exception instance that caused extraction to fail
1119    """
1120
1121
1122class ResourceManager:
1123    """Manage resource extraction and packages"""
1124    extraction_path = None
1125
1126    def __init__(self):
1127        self.cached_files = {}
1128
1129    def resource_exists(self, package_or_requirement, resource_name):
1130        """Does the named resource exist?"""
1131        return get_provider(package_or_requirement).has_resource(resource_name)
1132
1133    def resource_isdir(self, package_or_requirement, resource_name):
1134        """Is the named resource an existing directory?"""
1135        return get_provider(package_or_requirement).resource_isdir(
1136            resource_name
1137        )
1138
1139    def resource_filename(self, package_or_requirement, resource_name):
1140        """Return a true filesystem path for specified resource"""
1141        return get_provider(package_or_requirement).get_resource_filename(
1142            self, resource_name
1143        )
1144
1145    def resource_stream(self, package_or_requirement, resource_name):
1146        """Return a readable file-like object for specified resource"""
1147        return get_provider(package_or_requirement).get_resource_stream(
1148            self, resource_name
1149        )
1150
1151    def resource_string(self, package_or_requirement, resource_name):
1152        """Return specified resource as a string"""
1153        return get_provider(package_or_requirement).get_resource_string(
1154            self, resource_name
1155        )
1156
1157    def resource_listdir(self, package_or_requirement, resource_name):
1158        """List the contents of the named resource directory"""
1159        return get_provider(package_or_requirement).resource_listdir(
1160            resource_name
1161        )
1162
1163    def extraction_error(self):
1164        """Give an error message for problems extracting file(s)"""
1165
1166        old_exc = sys.exc_info()[1]
1167        cache_path = self.extraction_path or get_default_cache()
1168
1169        tmpl = textwrap.dedent("""
1170            Can't extract file(s) to egg cache
1171
1172            The following error occurred while trying to extract file(s)
1173            to the Python egg cache:
1174
1175              {old_exc}
1176
1177            The Python egg cache directory is currently set to:
1178
1179              {cache_path}
1180
1181            Perhaps your account does not have write access to this directory?
1182            You can change the cache directory by setting the PYTHON_EGG_CACHE
1183            environment variable to point to an accessible directory.
1184            """).lstrip()
1185        err = ExtractionError(tmpl.format(**locals()))
1186        err.manager = self
1187        err.cache_path = cache_path
1188        err.original_error = old_exc
1189        raise err
1190
1191    def get_cache_path(self, archive_name, names=()):
1192        """Return absolute location in cache for `archive_name` and `names`
1193
1194        The parent directory of the resulting path will be created if it does
1195        not already exist.  `archive_name` should be the base filename of the
1196        enclosing egg (which may not be the name of the enclosing zipfile!),
1197        including its ".egg" extension.  `names`, if provided, should be a
1198        sequence of path name parts "under" the egg's extraction location.
1199
1200        This method should only be called by resource providers that need to
1201        obtain an extraction location, and only for names they intend to
1202        extract, as it tracks the generated names for possible cleanup later.
1203        """
1204        extract_path = self.extraction_path or get_default_cache()
1205        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1206        try:
1207            _bypass_ensure_directory(target_path)
1208        except Exception:
1209            self.extraction_error()
1210
1211        self._warn_unsafe_extraction_path(extract_path)
1212
1213        self.cached_files[target_path] = 1
1214        return target_path
1215
1216    @staticmethod
1217    def _warn_unsafe_extraction_path(path):
1218        """
1219        If the default extraction path is overridden and set to an insecure
1220        location, such as /tmp, it opens up an opportunity for an attacker to
1221        replace an extracted file with an unauthorized payload. Warn the user
1222        if a known insecure location is used.
1223
1224        See Distribute #375 for more details.
1225        """
1226        if os.name == 'nt' and not path.startswith(os.environ['windir']):
1227            # On Windows, permissions are generally restrictive by default
1228            #  and temp directories are not writable by other users, so
1229            #  bypass the warning.
1230            return
1231        mode = os.stat(path).st_mode
1232        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1233            msg = (
1234                "Extraction path is writable by group/others "
1235                "and vulnerable to attack when "
1236                "used with get_resource_filename ({path}). "
1237                "Consider a more secure "
1238                "location (set with .set_extraction_path or the "
1239                "PYTHON_EGG_CACHE environment variable)."
1240            ).format(**locals())
1241            warnings.warn(msg, UserWarning)
1242
1243    def postprocess(self, tempname, filename):
1244        """Perform any platform-specific postprocessing of `tempname`
1245
1246        This is where Mac header rewrites should be done; other platforms don't
1247        have anything special they should do.
1248
1249        Resource providers should call this method ONLY after successfully
1250        extracting a compressed resource.  They must NOT call it on resources
1251        that are already in the filesystem.
1252
1253        `tempname` is the current (temporary) name of the file, and `filename`
1254        is the name it will be renamed to by the caller after this routine
1255        returns.
1256        """
1257
1258        if os.name == 'posix':
1259            # Make the resource executable
1260            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1261            os.chmod(tempname, mode)
1262
1263    def set_extraction_path(self, path):
1264        """Set the base path where resources will be extracted to, if needed.
1265
1266        If you do not call this routine before any extractions take place, the
1267        path defaults to the return value of ``get_default_cache()``.  (Which
1268        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1269        platform-specific fallbacks.  See that routine's documentation for more
1270        details.)
1271
1272        Resources are extracted to subdirectories of this path based upon
1273        information given by the ``IResourceProvider``.  You may set this to a
1274        temporary directory, but then you must call ``cleanup_resources()`` to
1275        delete the extracted files when done.  There is no guarantee that
1276        ``cleanup_resources()`` will be able to remove all extracted files.
1277
1278        (Note: you may not change the extraction path for a given resource
1279        manager once resources have been extracted, unless you first call
1280        ``cleanup_resources()``.)
1281        """
1282        if self.cached_files:
1283            raise ValueError(
1284                "Can't change extraction path, files already extracted"
1285            )
1286
1287        self.extraction_path = path
1288
1289    def cleanup_resources(self, force=False):
1290        """
1291        Delete all extracted resource files and directories, returning a list
1292        of the file and directory names that could not be successfully removed.
1293        This function does not have any concurrency protection, so it should
1294        generally only be called when the extraction path is a temporary
1295        directory exclusive to a single process.  This method is not
1296        automatically called; you must call it explicitly or register it as an
1297        ``atexit`` function if you wish to ensure cleanup of a temporary
1298        directory used for extractions.
1299        """
1300        # XXX
1301
1302
1303def get_default_cache():
1304    """
1305    Return the ``PYTHON_EGG_CACHE`` environment variable
1306    or a platform-relevant user cache dir for an app
1307    named "Python-Eggs".
1308    """
1309    return (
1310        os.environ.get('PYTHON_EGG_CACHE')
1311        or appdirs.user_cache_dir(appname='Python-Eggs')
1312    )
1313
1314
1315def safe_name(name):
1316    """Convert an arbitrary string to a standard distribution name
1317
1318    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1319    """
1320    return re.sub('[^A-Za-z0-9.]+', '-', name)
1321
1322
1323def safe_version(version):
1324    """
1325    Convert an arbitrary string to a standard version string
1326    """
1327    try:
1328        # normalize the version
1329        return str(packaging.version.Version(version))
1330    except packaging.version.InvalidVersion:
1331        version = version.replace(' ', '.')
1332        return re.sub('[^A-Za-z0-9.]+', '-', version)
1333
1334
1335def safe_extra(extra):
1336    """Convert an arbitrary string to a standard 'extra' name
1337
1338    Any runs of non-alphanumeric characters are replaced with a single '_',
1339    and the result is always lowercased.
1340    """
1341    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1342
1343
1344def to_filename(name):
1345    """Convert a project or version name to its filename-escaped form
1346
1347    Any '-' characters are currently replaced with '_'.
1348    """
1349    return name.replace('-', '_')
1350
1351
1352def invalid_marker(text):
1353    """
1354    Validate text as a PEP 508 environment marker; return an exception
1355    if invalid or False otherwise.
1356    """
1357    try:
1358        evaluate_marker(text)
1359    except SyntaxError as e:
1360        e.filename = None
1361        e.lineno = None
1362        return e
1363    return False
1364
1365
1366def evaluate_marker(text, extra=None):
1367    """
1368    Evaluate a PEP 508 environment marker.
1369    Return a boolean indicating the marker result in this environment.
1370    Raise SyntaxError if marker is invalid.
1371
1372    This implementation uses the 'pyparsing' module.
1373    """
1374    try:
1375        marker = packaging.markers.Marker(text)
1376        return marker.evaluate()
1377    except packaging.markers.InvalidMarker as e:
1378        raise SyntaxError(e) from e
1379
1380
1381class NullProvider:
1382    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1383
1384    egg_name = None
1385    egg_info = None
1386    loader = None
1387
1388    def __init__(self, module):
1389        self.loader = getattr(module, '__loader__', None)
1390        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1391
1392    def get_resource_filename(self, manager, resource_name):
1393        return self._fn(self.module_path, resource_name)
1394
1395    def get_resource_stream(self, manager, resource_name):
1396        return io.BytesIO(self.get_resource_string(manager, resource_name))
1397
1398    def get_resource_string(self, manager, resource_name):
1399        return self._get(self._fn(self.module_path, resource_name))
1400
1401    def has_resource(self, resource_name):
1402        return self._has(self._fn(self.module_path, resource_name))
1403
1404    def _get_metadata_path(self, name):
1405        return self._fn(self.egg_info, name)
1406
1407    def has_metadata(self, name):
1408        if not self.egg_info:
1409            return self.egg_info
1410
1411        path = self._get_metadata_path(name)
1412        return self._has(path)
1413
1414    def get_metadata(self, name):
1415        if not self.egg_info:
1416            return ""
1417        path = self._get_metadata_path(name)
1418        value = self._get(path)
1419        try:
1420            return value.decode('utf-8')
1421        except UnicodeDecodeError as exc:
1422            # Include the path in the error message to simplify
1423            # troubleshooting, and without changing the exception type.
1424            exc.reason += ' in {} file at path: {}'.format(name, path)
1425            raise
1426
1427    def get_metadata_lines(self, name):
1428        return yield_lines(self.get_metadata(name))
1429
1430    def resource_isdir(self, resource_name):
1431        return self._isdir(self._fn(self.module_path, resource_name))
1432
1433    def metadata_isdir(self, name):
1434        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1435
1436    def resource_listdir(self, resource_name):
1437        return self._listdir(self._fn(self.module_path, resource_name))
1438
1439    def metadata_listdir(self, name):
1440        if self.egg_info:
1441            return self._listdir(self._fn(self.egg_info, name))
1442        return []
1443
1444    def run_script(self, script_name, namespace):
1445        script = 'scripts/' + script_name
1446        if not self.has_metadata(script):
1447            raise ResolutionError(
1448                "Script {script!r} not found in metadata at {self.egg_info!r}"
1449                .format(**locals()),
1450            )
1451        script_text = self.get_metadata(script).replace('\r\n', '\n')
1452        script_text = script_text.replace('\r', '\n')
1453        script_filename = self._fn(self.egg_info, script)
1454        namespace['__file__'] = script_filename
1455        if os.path.exists(script_filename):
1456            with open(script_filename) as fid:
1457                source = fid.read()
1458            code = compile(source, script_filename, 'exec')
1459            exec(code, namespace, namespace)
1460        else:
1461            from linecache import cache
1462            cache[script_filename] = (
1463                len(script_text), 0, script_text.split('\n'), script_filename
1464            )
1465            script_code = compile(script_text, script_filename, 'exec')
1466            exec(script_code, namespace, namespace)
1467
1468    def _has(self, path):
1469        raise NotImplementedError(
1470            "Can't perform this operation for unregistered loader type"
1471        )
1472
1473    def _isdir(self, path):
1474        raise NotImplementedError(
1475            "Can't perform this operation for unregistered loader type"
1476        )
1477
1478    def _listdir(self, path):
1479        raise NotImplementedError(
1480            "Can't perform this operation for unregistered loader type"
1481        )
1482
1483    def _fn(self, base, resource_name):
1484        self._validate_resource_path(resource_name)
1485        if resource_name:
1486            return os.path.join(base, *resource_name.split('/'))
1487        return base
1488
1489    @staticmethod
1490    def _validate_resource_path(path):
1491        """
1492        Validate the resource paths according to the docs.
1493        https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
1494
1495        >>> warned = getfixture('recwarn')
1496        >>> warnings.simplefilter('always')
1497        >>> vrp = NullProvider._validate_resource_path
1498        >>> vrp('foo/bar.txt')
1499        >>> bool(warned)
1500        False
1501        >>> vrp('../foo/bar.txt')
1502        >>> bool(warned)
1503        True
1504        >>> warned.clear()
1505        >>> vrp('/foo/bar.txt')
1506        >>> bool(warned)
1507        True
1508        >>> vrp('foo/../../bar.txt')
1509        >>> bool(warned)
1510        True
1511        >>> warned.clear()
1512        >>> vrp('foo/f../bar.txt')
1513        >>> bool(warned)
1514        False
1515
1516        Windows path separators are straight-up disallowed.
1517        >>> vrp(r'\\foo/bar.txt')
1518        Traceback (most recent call last):
1519        ...
1520        ValueError: Use of .. or absolute path in a resource path \
1521is not allowed.
1522
1523        >>> vrp(r'C:\\foo/bar.txt')
1524        Traceback (most recent call last):
1525        ...
1526        ValueError: Use of .. or absolute path in a resource path \
1527is not allowed.
1528
1529        Blank values are allowed
1530
1531        >>> vrp('')
1532        >>> bool(warned)
1533        False
1534
1535        Non-string values are not.
1536
1537        >>> vrp(None)
1538        Traceback (most recent call last):
1539        ...
1540        AttributeError: ...
1541        """
1542        invalid = (
1543            os.path.pardir in path.split(posixpath.sep) or
1544            posixpath.isabs(path) or
1545            ntpath.isabs(path)
1546        )
1547        if not invalid:
1548            return
1549
1550        msg = "Use of .. or absolute path in a resource path is not allowed."
1551
1552        # Aggressively disallow Windows absolute paths
1553        if ntpath.isabs(path) and not posixpath.isabs(path):
1554            raise ValueError(msg)
1555
1556        # for compatibility, warn; in future
1557        # raise ValueError(msg)
1558        warnings.warn(
1559            msg[:-1] + " and will raise exceptions in a future release.",
1560            DeprecationWarning,
1561            stacklevel=4,
1562        )
1563
1564    def _get(self, path):
1565        if hasattr(self.loader, 'get_data'):
1566            return self.loader.get_data(path)
1567        raise NotImplementedError(
1568            "Can't perform this operation for loaders without 'get_data()'"
1569        )
1570
1571
1572register_loader_type(object, NullProvider)
1573
1574
1575def _parents(path):
1576    """
1577    yield all parents of path including path
1578    """
1579    last = None
1580    while path != last:
1581        yield path
1582        last = path
1583        path, _ = os.path.split(path)
1584
1585
1586class EggProvider(NullProvider):
1587    """Provider based on a virtual filesystem"""
1588
1589    def __init__(self, module):
1590        super().__init__(module)
1591        self._setup_prefix()
1592
1593    def _setup_prefix(self):
1594        # Assume that metadata may be nested inside a "basket"
1595        # of multiple eggs and use module_path instead of .archive.
1596        eggs = filter(_is_egg_path, _parents(self.module_path))
1597        egg = next(eggs, None)
1598        egg and self._set_egg(egg)
1599
1600    def _set_egg(self, path):
1601        self.egg_name = os.path.basename(path)
1602        self.egg_info = os.path.join(path, 'EGG-INFO')
1603        self.egg_root = path
1604
1605
1606class DefaultProvider(EggProvider):
1607    """Provides access to package resources in the filesystem"""
1608
1609    def _has(self, path):
1610        return os.path.exists(path)
1611
1612    def _isdir(self, path):
1613        return os.path.isdir(path)
1614
1615    def _listdir(self, path):
1616        return os.listdir(path)
1617
1618    def get_resource_stream(self, manager, resource_name):
1619        return open(self._fn(self.module_path, resource_name), 'rb')
1620
1621    def _get(self, path):
1622        with open(path, 'rb') as stream:
1623            return stream.read()
1624
1625    @classmethod
1626    def _register(cls):
1627        loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
1628        for name in loader_names:
1629            loader_cls = getattr(importlib_machinery, name, type(None))
1630            register_loader_type(loader_cls, cls)
1631
1632
1633DefaultProvider._register()
1634
1635
1636class EmptyProvider(NullProvider):
1637    """Provider that returns nothing for all requests"""
1638
1639    module_path = None
1640
1641    _isdir = _has = lambda self, path: False
1642
1643    def _get(self, path):
1644        return ''
1645
1646    def _listdir(self, path):
1647        return []
1648
1649    def __init__(self):
1650        pass
1651
1652
1653empty_provider = EmptyProvider()
1654
1655
1656class ZipManifests(dict):
1657    """
1658    zip manifest builder
1659    """
1660
1661    @classmethod
1662    def build(cls, path):
1663        """
1664        Build a dictionary similar to the zipimport directory
1665        caches, except instead of tuples, store ZipInfo objects.
1666
1667        Use a platform-specific path separator (os.sep) for the path keys
1668        for compatibility with pypy on Windows.
1669        """
1670        with zipfile.ZipFile(path) as zfile:
1671            items = (
1672                (
1673                    name.replace('/', os.sep),
1674                    zfile.getinfo(name),
1675                )
1676                for name in zfile.namelist()
1677            )
1678            return dict(items)
1679
1680    load = build
1681
1682
1683class MemoizedZipManifests(ZipManifests):
1684    """
1685    Memoized zipfile manifests.
1686    """
1687    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1688
1689    def load(self, path):
1690        """
1691        Load a manifest at path or return a suitable manifest already loaded.
1692        """
1693        path = os.path.normpath(path)
1694        mtime = os.stat(path).st_mtime
1695
1696        if path not in self or self[path].mtime != mtime:
1697            manifest = self.build(path)
1698            self[path] = self.manifest_mod(manifest, mtime)
1699
1700        return self[path].manifest
1701
1702
1703class ZipProvider(EggProvider):
1704    """Resource support for zips and eggs"""
1705
1706    eagers = None
1707    _zip_manifests = MemoizedZipManifests()
1708
1709    def __init__(self, module):
1710        super().__init__(module)
1711        self.zip_pre = self.loader.archive + os.sep
1712
1713    def _zipinfo_name(self, fspath):
1714        # Convert a virtual filename (full path to file) into a zipfile subpath
1715        # usable with the zipimport directory cache for our target archive
1716        fspath = fspath.rstrip(os.sep)
1717        if fspath == self.loader.archive:
1718            return ''
1719        if fspath.startswith(self.zip_pre):
1720            return fspath[len(self.zip_pre):]
1721        raise AssertionError(
1722            "%s is not a subpath of %s" % (fspath, self.zip_pre)
1723        )
1724
1725    def _parts(self, zip_path):
1726        # Convert a zipfile subpath into an egg-relative path part list.
1727        # pseudo-fs path
1728        fspath = self.zip_pre + zip_path
1729        if fspath.startswith(self.egg_root + os.sep):
1730            return fspath[len(self.egg_root) + 1:].split(os.sep)
1731        raise AssertionError(
1732            "%s is not a subpath of %s" % (fspath, self.egg_root)
1733        )
1734
1735    @property
1736    def zipinfo(self):
1737        return self._zip_manifests.load(self.loader.archive)
1738
1739    def get_resource_filename(self, manager, resource_name):
1740        if not self.egg_name:
1741            raise NotImplementedError(
1742                "resource_filename() only supported for .egg, not .zip"
1743            )
1744        # no need to lock for extraction, since we use temp names
1745        zip_path = self._resource_to_zip(resource_name)
1746        eagers = self._get_eager_resources()
1747        if '/'.join(self._parts(zip_path)) in eagers:
1748            for name in eagers:
1749                self._extract_resource(manager, self._eager_to_zip(name))
1750        return self._extract_resource(manager, zip_path)
1751
1752    @staticmethod
1753    def _get_date_and_size(zip_stat):
1754        size = zip_stat.file_size
1755        # ymdhms+wday, yday, dst
1756        date_time = zip_stat.date_time + (0, 0, -1)
1757        # 1980 offset already done
1758        timestamp = time.mktime(date_time)
1759        return timestamp, size
1760
1761    # FIXME: 'ZipProvider._extract_resource' is too complex (12)
1762    def _extract_resource(self, manager, zip_path):  # noqa: C901
1763
1764        if zip_path in self._index():
1765            for name in self._index()[zip_path]:
1766                last = self._extract_resource(
1767                    manager, os.path.join(zip_path, name)
1768                )
1769            # return the extracted directory name
1770            return os.path.dirname(last)
1771
1772        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1773
1774        if not WRITE_SUPPORT:
1775            raise IOError('"os.rename" and "os.unlink" are not supported '
1776                          'on this platform')
1777        try:
1778
1779            real_path = manager.get_cache_path(
1780                self.egg_name, self._parts(zip_path)
1781            )
1782
1783            if self._is_current(real_path, zip_path):
1784                return real_path
1785
1786            outf, tmpnam = _mkstemp(
1787                ".$extract",
1788                dir=os.path.dirname(real_path),
1789            )
1790            os.write(outf, self.loader.get_data(zip_path))
1791            os.close(outf)
1792            utime(tmpnam, (timestamp, timestamp))
1793            manager.postprocess(tmpnam, real_path)
1794
1795            try:
1796                rename(tmpnam, real_path)
1797
1798            except os.error:
1799                if os.path.isfile(real_path):
1800                    if self._is_current(real_path, zip_path):
1801                        # the file became current since it was checked above,
1802                        #  so proceed.
1803                        return real_path
1804                    # Windows, del old file and retry
1805                    elif os.name == 'nt':
1806                        unlink(real_path)
1807                        rename(tmpnam, real_path)
1808                        return real_path
1809                raise
1810
1811        except os.error:
1812            # report a user-friendly error
1813            manager.extraction_error()
1814
1815        return real_path
1816
1817    def _is_current(self, file_path, zip_path):
1818        """
1819        Return True if the file_path is current for this zip_path
1820        """
1821        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1822        if not os.path.isfile(file_path):
1823            return False
1824        stat = os.stat(file_path)
1825        if stat.st_size != size or stat.st_mtime != timestamp:
1826            return False
1827        # check that the contents match
1828        zip_contents = self.loader.get_data(zip_path)
1829        with open(file_path, 'rb') as f:
1830            file_contents = f.read()
1831        return zip_contents == file_contents
1832
1833    def _get_eager_resources(self):
1834        if self.eagers is None:
1835            eagers = []
1836            for name in ('native_libs.txt', 'eager_resources.txt'):
1837                if self.has_metadata(name):
1838                    eagers.extend(self.get_metadata_lines(name))
1839            self.eagers = eagers
1840        return self.eagers
1841
1842    def _index(self):
1843        try:
1844            return self._dirindex
1845        except AttributeError:
1846            ind = {}
1847            for path in self.zipinfo:
1848                parts = path.split(os.sep)
1849                while parts:
1850                    parent = os.sep.join(parts[:-1])
1851                    if parent in ind:
1852                        ind[parent].append(parts[-1])
1853                        break
1854                    else:
1855                        ind[parent] = [parts.pop()]
1856            self._dirindex = ind
1857            return ind
1858
1859    def _has(self, fspath):
1860        zip_path = self._zipinfo_name(fspath)
1861        return zip_path in self.zipinfo or zip_path in self._index()
1862
1863    def _isdir(self, fspath):
1864        return self._zipinfo_name(fspath) in self._index()
1865
1866    def _listdir(self, fspath):
1867        return list(self._index().get(self._zipinfo_name(fspath), ()))
1868
1869    def _eager_to_zip(self, resource_name):
1870        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1871
1872    def _resource_to_zip(self, resource_name):
1873        return self._zipinfo_name(self._fn(self.module_path, resource_name))
1874
1875
1876register_loader_type(zipimport.zipimporter, ZipProvider)
1877
1878
1879class FileMetadata(EmptyProvider):
1880    """Metadata handler for standalone PKG-INFO files
1881
1882    Usage::
1883
1884        metadata = FileMetadata("/path/to/PKG-INFO")
1885
1886    This provider rejects all data and metadata requests except for PKG-INFO,
1887    which is treated as existing, and will be the contents of the file at
1888    the provided location.
1889    """
1890
1891    def __init__(self, path):
1892        self.path = path
1893
1894    def _get_metadata_path(self, name):
1895        return self.path
1896
1897    def has_metadata(self, name):
1898        return name == 'PKG-INFO' and os.path.isfile(self.path)
1899
1900    def get_metadata(self, name):
1901        if name != 'PKG-INFO':
1902            raise KeyError("No metadata except PKG-INFO is available")
1903
1904        with io.open(self.path, encoding='utf-8', errors="replace") as f:
1905            metadata = f.read()
1906        self._warn_on_replacement(metadata)
1907        return metadata
1908
1909    def _warn_on_replacement(self, metadata):
1910        replacement_char = '�'
1911        if replacement_char in metadata:
1912            tmpl = "{self.path} could not be properly decoded in UTF-8"
1913            msg = tmpl.format(**locals())
1914            warnings.warn(msg)
1915
1916    def get_metadata_lines(self, name):
1917        return yield_lines(self.get_metadata(name))
1918
1919
1920class PathMetadata(DefaultProvider):
1921    """Metadata provider for egg directories
1922
1923    Usage::
1924
1925        # Development eggs:
1926
1927        egg_info = "/path/to/PackageName.egg-info"
1928        base_dir = os.path.dirname(egg_info)
1929        metadata = PathMetadata(base_dir, egg_info)
1930        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1931        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1932
1933        # Unpacked egg directories:
1934
1935        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1936        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1937        dist = Distribution.from_filename(egg_path, metadata=metadata)
1938    """
1939
1940    def __init__(self, path, egg_info):
1941        self.module_path = path
1942        self.egg_info = egg_info
1943
1944
1945class EggMetadata(ZipProvider):
1946    """Metadata provider for .egg files"""
1947
1948    def __init__(self, importer):
1949        """Create a metadata provider from a zipimporter"""
1950
1951        self.zip_pre = importer.archive + os.sep
1952        self.loader = importer
1953        if importer.prefix:
1954            self.module_path = os.path.join(importer.archive, importer.prefix)
1955        else:
1956            self.module_path = importer.archive
1957        self._setup_prefix()
1958
1959
1960_declare_state('dict', _distribution_finders={})
1961
1962
1963def register_finder(importer_type, distribution_finder):
1964    """Register `distribution_finder` to find distributions in sys.path items
1965
1966    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1967    handler), and `distribution_finder` is a callable that, passed a path
1968    item and the importer instance, yields ``Distribution`` instances found on
1969    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1970    _distribution_finders[importer_type] = distribution_finder
1971
1972
1973def find_distributions(path_item, only=False):
1974    """Yield distributions accessible via `path_item`"""
1975    importer = get_importer(path_item)
1976    finder = _find_adapter(_distribution_finders, importer)
1977    return finder(importer, path_item, only)
1978
1979
1980def find_eggs_in_zip(importer, path_item, only=False):
1981    """
1982    Find eggs in zip files; possibly multiple nested eggs.
1983    """
1984    if importer.archive.endswith('.whl'):
1985        # wheels are not supported with this finder
1986        # they don't have PKG-INFO metadata, and won't ever contain eggs
1987        return
1988    metadata = EggMetadata(importer)
1989    if metadata.has_metadata('PKG-INFO'):
1990        yield Distribution.from_filename(path_item, metadata=metadata)
1991    if only:
1992        # don't yield nested distros
1993        return
1994    for subitem in metadata.resource_listdir(''):
1995        if _is_egg_path(subitem):
1996            subpath = os.path.join(path_item, subitem)
1997            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
1998            for dist in dists:
1999                yield dist
2000        elif subitem.lower().endswith(('.dist-info', '.egg-info')):
2001            subpath = os.path.join(path_item, subitem)
2002            submeta = EggMetadata(zipimport.zipimporter(subpath))
2003            submeta.egg_info = subpath
2004            yield Distribution.from_location(path_item, subitem, submeta)
2005
2006
2007register_finder(zipimport.zipimporter, find_eggs_in_zip)
2008
2009
2010def find_nothing(importer, path_item, only=False):
2011    return ()
2012
2013
2014register_finder(object, find_nothing)
2015
2016
2017def _by_version_descending(names):
2018    """
2019    Given a list of filenames, return them in descending order
2020    by version number.
2021
2022    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
2023    >>> _by_version_descending(names)
2024    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo']
2025    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
2026    >>> _by_version_descending(names)
2027    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
2028    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
2029    >>> _by_version_descending(names)
2030    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
2031    """
2032    def try_parse(name):
2033        """
2034        Attempt to parse as a version or return a null version.
2035        """
2036        try:
2037            return packaging.version.Version(name)
2038        except Exception:
2039            return packaging.version.Version('0')
2040
2041    def _by_version(name):
2042        """
2043        Parse each component of the filename
2044        """
2045        name, ext = os.path.splitext(name)
2046        parts = itertools.chain(name.split('-'), [ext])
2047        return [try_parse(part) for part in parts]
2048
2049    return sorted(names, key=_by_version, reverse=True)
2050
2051
2052def find_on_path(importer, path_item, only=False):
2053    """Yield distributions accessible on a sys.path directory"""
2054    path_item = _normalize_cached(path_item)
2055
2056    if _is_unpacked_egg(path_item):
2057        yield Distribution.from_filename(
2058            path_item, metadata=PathMetadata(
2059                path_item, os.path.join(path_item, 'EGG-INFO')
2060            )
2061        )
2062        return
2063
2064    entries = (
2065        os.path.join(path_item, child)
2066        for child in safe_listdir(path_item)
2067    )
2068
2069    # for performance, before sorting by version,
2070    # screen entries for only those that will yield
2071    # distributions
2072    filtered = (
2073        entry
2074        for entry in entries
2075        if dist_factory(path_item, entry, only)
2076    )
2077
2078    # scan for .egg and .egg-info in directory
2079    path_item_entries = _by_version_descending(filtered)
2080    for entry in path_item_entries:
2081        fullpath = os.path.join(path_item, entry)
2082        factory = dist_factory(path_item, entry, only)
2083        for dist in factory(fullpath):
2084            yield dist
2085
2086
2087def dist_factory(path_item, entry, only):
2088    """Return a dist_factory for the given entry."""
2089    lower = entry.lower()
2090    is_egg_info = lower.endswith('.egg-info')
2091    is_dist_info = (
2092        lower.endswith('.dist-info') and
2093        os.path.isdir(os.path.join(path_item, entry))
2094    )
2095    is_meta = is_egg_info or is_dist_info
2096    return (
2097        distributions_from_metadata
2098        if is_meta else
2099        find_distributions
2100        if not only and _is_egg_path(entry) else
2101        resolve_egg_link
2102        if not only and lower.endswith('.egg-link') else
2103        NoDists()
2104    )
2105
2106
2107class NoDists:
2108    """
2109    >>> bool(NoDists())
2110    False
2111
2112    >>> list(NoDists()('anything'))
2113    []
2114    """
2115    def __bool__(self):
2116        return False
2117
2118    def __call__(self, fullpath):
2119        return iter(())
2120
2121
2122def safe_listdir(path):
2123    """
2124    Attempt to list contents of path, but suppress some exceptions.
2125    """
2126    try:
2127        return os.listdir(path)
2128    except (PermissionError, NotADirectoryError):
2129        pass
2130    except OSError as e:
2131        # Ignore the directory if does not exist, not a directory or
2132        # permission denied
2133        if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
2134            raise
2135    return ()
2136
2137
2138def distributions_from_metadata(path):
2139    root = os.path.dirname(path)
2140    if os.path.isdir(path):
2141        if len(os.listdir(path)) == 0:
2142            # empty metadata dir; skip
2143            return
2144        metadata = PathMetadata(root, path)
2145    else:
2146        metadata = FileMetadata(path)
2147    entry = os.path.basename(path)
2148    yield Distribution.from_location(
2149        root, entry, metadata, precedence=DEVELOP_DIST,
2150    )
2151
2152
2153def non_empty_lines(path):
2154    """
2155    Yield non-empty lines from file at path
2156    """
2157    with open(path) as f:
2158        for line in f:
2159            line = line.strip()
2160            if line:
2161                yield line
2162
2163
2164def resolve_egg_link(path):
2165    """
2166    Given a path to an .egg-link, resolve distributions
2167    present in the referenced path.
2168    """
2169    referenced_paths = non_empty_lines(path)
2170    resolved_paths = (
2171        os.path.join(os.path.dirname(path), ref)
2172        for ref in referenced_paths
2173    )
2174    dist_groups = map(find_distributions, resolved_paths)
2175    return next(dist_groups, ())
2176
2177
2178register_finder(pkgutil.ImpImporter, find_on_path)
2179
2180if hasattr(importlib_machinery, 'FileFinder'):
2181    register_finder(importlib_machinery.FileFinder, find_on_path)
2182
2183_declare_state('dict', _namespace_handlers={})
2184_declare_state('dict', _namespace_packages={})
2185
2186
2187def register_namespace_handler(importer_type, namespace_handler):
2188    """Register `namespace_handler` to declare namespace packages
2189
2190    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2191    handler), and `namespace_handler` is a callable like this::
2192
2193        def namespace_handler(importer, path_entry, moduleName, module):
2194            # return a path_entry to use for child packages
2195
2196    Namespace handlers are only called if the importer object has already
2197    agreed that it can handle the relevant path item, and they should only
2198    return a subpath if the module __path__ does not already contain an
2199    equivalent subpath.  For an example namespace handler, see
2200    ``pkg_resources.file_ns_handler``.
2201    """
2202    _namespace_handlers[importer_type] = namespace_handler
2203
2204
2205def _handle_ns(packageName, path_item):
2206    """Ensure that named package includes a subpath of path_item (if needed)"""
2207
2208    importer = get_importer(path_item)
2209    if importer is None:
2210        return None
2211
2212    # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
2213    try:
2214        spec = importer.find_spec(packageName)
2215    except AttributeError:
2216        # capture warnings due to #1111
2217        with warnings.catch_warnings():
2218            warnings.simplefilter("ignore")
2219            loader = importer.find_module(packageName)
2220    else:
2221        loader = spec.loader if spec else None
2222
2223    if loader is None:
2224        return None
2225    module = sys.modules.get(packageName)
2226    if module is None:
2227        module = sys.modules[packageName] = types.ModuleType(packageName)
2228        module.__path__ = []
2229        _set_parent_ns(packageName)
2230    elif not hasattr(module, '__path__'):
2231        raise TypeError("Not a package:", packageName)
2232    handler = _find_adapter(_namespace_handlers, importer)
2233    subpath = handler(importer, path_item, packageName, module)
2234    if subpath is not None:
2235        path = module.__path__
2236        path.append(subpath)
2237        importlib.import_module(packageName)
2238        _rebuild_mod_path(path, packageName, module)
2239    return subpath
2240
2241
2242def _rebuild_mod_path(orig_path, package_name, module):
2243    """
2244    Rebuild module.__path__ ensuring that all entries are ordered
2245    corresponding to their sys.path order
2246    """
2247    sys_path = [_normalize_cached(p) for p in sys.path]
2248
2249    def safe_sys_path_index(entry):
2250        """
2251        Workaround for #520 and #513.
2252        """
2253        try:
2254            return sys_path.index(entry)
2255        except ValueError:
2256            return float('inf')
2257
2258    def position_in_sys_path(path):
2259        """
2260        Return the ordinal of the path based on its position in sys.path
2261        """
2262        path_parts = path.split(os.sep)
2263        module_parts = package_name.count('.') + 1
2264        parts = path_parts[:-module_parts]
2265        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2266
2267    new_path = sorted(orig_path, key=position_in_sys_path)
2268    new_path = [_normalize_cached(p) for p in new_path]
2269
2270    if isinstance(module.__path__, list):
2271        module.__path__[:] = new_path
2272    else:
2273        module.__path__ = new_path
2274
2275
2276def declare_namespace(packageName):
2277    """Declare that package 'packageName' is a namespace package"""
2278
2279    _imp.acquire_lock()
2280    try:
2281        if packageName in _namespace_packages:
2282            return
2283
2284        path = sys.path
2285        parent, _, _ = packageName.rpartition('.')
2286
2287        if parent:
2288            declare_namespace(parent)
2289            if parent not in _namespace_packages:
2290                __import__(parent)
2291            try:
2292                path = sys.modules[parent].__path__
2293            except AttributeError as e:
2294                raise TypeError("Not a package:", parent) from e
2295
2296        # Track what packages are namespaces, so when new path items are added,
2297        # they can be updated
2298        _namespace_packages.setdefault(parent or None, []).append(packageName)
2299        _namespace_packages.setdefault(packageName, [])
2300
2301        for path_item in path:
2302            # Ensure all the parent's path items are reflected in the child,
2303            # if they apply
2304            _handle_ns(packageName, path_item)
2305
2306    finally:
2307        _imp.release_lock()
2308
2309
2310def fixup_namespace_packages(path_item, parent=None):
2311    """Ensure that previously-declared namespace packages include path_item"""
2312    _imp.acquire_lock()
2313    try:
2314        for package in _namespace_packages.get(parent, ()):
2315            subpath = _handle_ns(package, path_item)
2316            if subpath:
2317                fixup_namespace_packages(subpath, package)
2318    finally:
2319        _imp.release_lock()
2320
2321
2322def file_ns_handler(importer, path_item, packageName, module):
2323    """Compute an ns-package subpath for a filesystem or zipfile importer"""
2324
2325    subpath = os.path.join(path_item, packageName.split('.')[-1])
2326    normalized = _normalize_cached(subpath)
2327    for item in module.__path__:
2328        if _normalize_cached(item) == normalized:
2329            break
2330    else:
2331        # Only return the path if it's not already there
2332        return subpath
2333
2334
2335register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2336register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2337
2338if hasattr(importlib_machinery, 'FileFinder'):
2339    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2340
2341
2342def null_ns_handler(importer, path_item, packageName, module):
2343    return None
2344
2345
2346register_namespace_handler(object, null_ns_handler)
2347
2348
2349def normalize_path(filename):
2350    """Normalize a file/dir name for comparison purposes"""
2351    return os.path.normcase(os.path.realpath(os.path.normpath(
2352        _cygwin_patch(filename))))
2353
2354
2355def _cygwin_patch(filename):  # pragma: nocover
2356    """
2357    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
2358    symlink components. Using
2359    os.path.abspath() works around this limitation. A fix in os.getcwd()
2360    would probably better, in Cygwin even more so, except
2361    that this seems to be by design...
2362    """
2363    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
2364
2365
2366def _normalize_cached(filename, _cache={}):
2367    try:
2368        return _cache[filename]
2369    except KeyError:
2370        _cache[filename] = result = normalize_path(filename)
2371        return result
2372
2373
2374def _is_egg_path(path):
2375    """
2376    Determine if given path appears to be an egg.
2377    """
2378    return _is_zip_egg(path) or _is_unpacked_egg(path)
2379
2380
2381def _is_zip_egg(path):
2382    return (
2383        path.lower().endswith('.egg') and
2384        os.path.isfile(path) and
2385        zipfile.is_zipfile(path)
2386    )
2387
2388
2389def _is_unpacked_egg(path):
2390    """
2391    Determine if given path appears to be an unpacked egg.
2392    """
2393    return (
2394        path.lower().endswith('.egg') and
2395        os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2396    )
2397
2398
2399def _set_parent_ns(packageName):
2400    parts = packageName.split('.')
2401    name = parts.pop()
2402    if parts:
2403        parent = '.'.join(parts)
2404        setattr(sys.modules[parent], name, sys.modules[packageName])
2405
2406
2407MODULE = re.compile(r"\w+(\.\w+)*$").match
2408EGG_NAME = re.compile(
2409    r"""
2410    (?P<name>[^-]+) (
2411        -(?P<ver>[^-]+) (
2412            -py(?P<pyver>[^-]+) (
2413                -(?P<plat>.+)
2414            )?
2415        )?
2416    )?
2417    """,
2418    re.VERBOSE | re.IGNORECASE,
2419).match
2420
2421
2422class EntryPoint:
2423    """Object representing an advertised importable object"""
2424
2425    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2426        if not MODULE(module_name):
2427            raise ValueError("Invalid module name", module_name)
2428        self.name = name
2429        self.module_name = module_name
2430        self.attrs = tuple(attrs)
2431        self.extras = tuple(extras)
2432        self.dist = dist
2433
2434    def __str__(self):
2435        s = "%s = %s" % (self.name, self.module_name)
2436        if self.attrs:
2437            s += ':' + '.'.join(self.attrs)
2438        if self.extras:
2439            s += ' [%s]' % ','.join(self.extras)
2440        return s
2441
2442    def __repr__(self):
2443        return "EntryPoint.parse(%r)" % str(self)
2444
2445    def load(self, require=True, *args, **kwargs):
2446        """
2447        Require packages for this EntryPoint, then resolve it.
2448        """
2449        if not require or args or kwargs:
2450            warnings.warn(
2451                "Parameters to load are deprecated.  Call .resolve and "
2452                ".require separately.",
2453                PkgResourcesDeprecationWarning,
2454                stacklevel=2,
2455            )
2456        if require:
2457            self.require(*args, **kwargs)
2458        return self.resolve()
2459
2460    def resolve(self):
2461        """
2462        Resolve the entry point from its module and attrs.
2463        """
2464        module = __import__(self.module_name, fromlist=['__name__'], level=0)
2465        try:
2466            return functools.reduce(getattr, self.attrs, module)
2467        except AttributeError as exc:
2468            raise ImportError(str(exc)) from exc
2469
2470    def require(self, env=None, installer=None):
2471        if self.extras and not self.dist:
2472            raise UnknownExtra("Can't require() without a distribution", self)
2473
2474        # Get the requirements for this entry point with all its extras and
2475        # then resolve them. We have to pass `extras` along when resolving so
2476        # that the working set knows what extras we want. Otherwise, for
2477        # dist-info distributions, the working set will assume that the
2478        # requirements for that extra are purely optional and skip over them.
2479        reqs = self.dist.requires(self.extras)
2480        items = working_set.resolve(reqs, env, installer, extras=self.extras)
2481        list(map(working_set.add, items))
2482
2483    pattern = re.compile(
2484        r'\s*'
2485        r'(?P<name>.+?)\s*'
2486        r'=\s*'
2487        r'(?P<module>[\w.]+)\s*'
2488        r'(:\s*(?P<attr>[\w.]+))?\s*'
2489        r'(?P<extras>\[.*\])?\s*$'
2490    )
2491
2492    @classmethod
2493    def parse(cls, src, dist=None):
2494        """Parse a single entry point from string `src`
2495
2496        Entry point syntax follows the form::
2497
2498            name = some.module:some.attr [extra1, extra2]
2499
2500        The entry name and module name are required, but the ``:attrs`` and
2501        ``[extras]`` parts are optional
2502        """
2503        m = cls.pattern.match(src)
2504        if not m:
2505            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2506            raise ValueError(msg, src)
2507        res = m.groupdict()
2508        extras = cls._parse_extras(res['extras'])
2509        attrs = res['attr'].split('.') if res['attr'] else ()
2510        return cls(res['name'], res['module'], attrs, extras, dist)
2511
2512    @classmethod
2513    def _parse_extras(cls, extras_spec):
2514        if not extras_spec:
2515            return ()
2516        req = Requirement.parse('x' + extras_spec)
2517        if req.specs:
2518            raise ValueError()
2519        return req.extras
2520
2521    @classmethod
2522    def parse_group(cls, group, lines, dist=None):
2523        """Parse an entry point group"""
2524        if not MODULE(group):
2525            raise ValueError("Invalid group name", group)
2526        this = {}
2527        for line in yield_lines(lines):
2528            ep = cls.parse(line, dist)
2529            if ep.name in this:
2530                raise ValueError("Duplicate entry point", group, ep.name)
2531            this[ep.name] = ep
2532        return this
2533
2534    @classmethod
2535    def parse_map(cls, data, dist=None):
2536        """Parse a map of entry point groups"""
2537        if isinstance(data, dict):
2538            data = data.items()
2539        else:
2540            data = split_sections(data)
2541        maps = {}
2542        for group, lines in data:
2543            if group is None:
2544                if not lines:
2545                    continue
2546                raise ValueError("Entry points must be listed in groups")
2547            group = group.strip()
2548            if group in maps:
2549                raise ValueError("Duplicate group name", group)
2550            maps[group] = cls.parse_group(group, lines, dist)
2551        return maps
2552
2553
2554def _version_from_file(lines):
2555    """
2556    Given an iterable of lines from a Metadata file, return
2557    the value of the Version field, if present, or None otherwise.
2558    """
2559    def is_version_line(line):
2560        return line.lower().startswith('version:')
2561    version_lines = filter(is_version_line, lines)
2562    line = next(iter(version_lines), '')
2563    _, _, value = line.partition(':')
2564    return safe_version(value.strip()) or None
2565
2566
2567class Distribution:
2568    """Wrap an actual or potential sys.path entry w/metadata"""
2569    PKG_INFO = 'PKG-INFO'
2570
2571    def __init__(
2572            self, location=None, metadata=None, project_name=None,
2573            version=None, py_version=PY_MAJOR, platform=None,
2574            precedence=EGG_DIST):
2575        self.project_name = safe_name(project_name or 'Unknown')
2576        if version is not None:
2577            self._version = safe_version(version)
2578        self.py_version = py_version
2579        self.platform = platform
2580        self.location = location
2581        self.precedence = precedence
2582        self._provider = metadata or empty_provider
2583
2584    @classmethod
2585    def from_location(cls, location, basename, metadata=None, **kw):
2586        project_name, version, py_version, platform = [None] * 4
2587        basename, ext = os.path.splitext(basename)
2588        if ext.lower() in _distributionImpl:
2589            cls = _distributionImpl[ext.lower()]
2590
2591            match = EGG_NAME(basename)
2592            if match:
2593                project_name, version, py_version, platform = match.group(
2594                    'name', 'ver', 'pyver', 'plat'
2595                )
2596        return cls(
2597            location, metadata, project_name=project_name, version=version,
2598            py_version=py_version, platform=platform, **kw
2599        )._reload_version()
2600
2601    def _reload_version(self):
2602        return self
2603
2604    @property
2605    def hashcmp(self):
2606        return (
2607            self.parsed_version,
2608            self.precedence,
2609            self.key,
2610            self.location,
2611            self.py_version or '',
2612            self.platform or '',
2613        )
2614
2615    def __hash__(self):
2616        return hash(self.hashcmp)
2617
2618    def __lt__(self, other):
2619        return self.hashcmp < other.hashcmp
2620
2621    def __le__(self, other):
2622        return self.hashcmp <= other.hashcmp
2623
2624    def __gt__(self, other):
2625        return self.hashcmp > other.hashcmp
2626
2627    def __ge__(self, other):
2628        return self.hashcmp >= other.hashcmp
2629
2630    def __eq__(self, other):
2631        if not isinstance(other, self.__class__):
2632            # It's not a Distribution, so they are not equal
2633            return False
2634        return self.hashcmp == other.hashcmp
2635
2636    def __ne__(self, other):
2637        return not self == other
2638
2639    # These properties have to be lazy so that we don't have to load any
2640    # metadata until/unless it's actually needed.  (i.e., some distributions
2641    # may not know their name or version without loading PKG-INFO)
2642
2643    @property
2644    def key(self):
2645        try:
2646            return self._key
2647        except AttributeError:
2648            self._key = key = self.project_name.lower()
2649            return key
2650
2651    @property
2652    def parsed_version(self):
2653        if not hasattr(self, "_parsed_version"):
2654            self._parsed_version = parse_version(self.version)
2655
2656        return self._parsed_version
2657
2658    def _warn_legacy_version(self):
2659        LV = packaging.version.LegacyVersion
2660        is_legacy = isinstance(self._parsed_version, LV)
2661        if not is_legacy:
2662            return
2663
2664        # While an empty version is technically a legacy version and
2665        # is not a valid PEP 440 version, it's also unlikely to
2666        # actually come from someone and instead it is more likely that
2667        # it comes from setuptools attempting to parse a filename and
2668        # including it in the list. So for that we'll gate this warning
2669        # on if the version is anything at all or not.
2670        if not self.version:
2671            return
2672
2673        tmpl = textwrap.dedent("""
2674            '{project_name} ({version})' is being parsed as a legacy,
2675            non PEP 440,
2676            version. You may find odd behavior and sort order.
2677            In particular it will be sorted as less than 0.0. It
2678            is recommended to migrate to PEP 440 compatible
2679            versions.
2680            """).strip().replace('\n', ' ')
2681
2682        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2683
2684    @property
2685    def version(self):
2686        try:
2687            return self._version
2688        except AttributeError as e:
2689            version = self._get_version()
2690            if version is None:
2691                path = self._get_metadata_path_for_display(self.PKG_INFO)
2692                msg = (
2693                    "Missing 'Version:' header and/or {} file at path: {}"
2694                ).format(self.PKG_INFO, path)
2695                raise ValueError(msg, self) from e
2696
2697            return version
2698
2699    @property
2700    def _dep_map(self):
2701        """
2702        A map of extra to its list of (direct) requirements
2703        for this distribution, including the null extra.
2704        """
2705        try:
2706            return self.__dep_map
2707        except AttributeError:
2708            self.__dep_map = self._filter_extras(self._build_dep_map())
2709        return self.__dep_map
2710
2711    @staticmethod
2712    def _filter_extras(dm):
2713        """
2714        Given a mapping of extras to dependencies, strip off
2715        environment markers and filter out any dependencies
2716        not matching the markers.
2717        """
2718        for extra in list(filter(None, dm)):
2719            new_extra = extra
2720            reqs = dm.pop(extra)
2721            new_extra, _, marker = extra.partition(':')
2722            fails_marker = marker and (
2723                invalid_marker(marker)
2724                or not evaluate_marker(marker)
2725            )
2726            if fails_marker:
2727                reqs = []
2728            new_extra = safe_extra(new_extra) or None
2729
2730            dm.setdefault(new_extra, []).extend(reqs)
2731        return dm
2732
2733    def _build_dep_map(self):
2734        dm = {}
2735        for name in 'requires.txt', 'depends.txt':
2736            for extra, reqs in split_sections(self._get_metadata(name)):
2737                dm.setdefault(extra, []).extend(parse_requirements(reqs))
2738        return dm
2739
2740    def requires(self, extras=()):
2741        """List of Requirements needed for this distro if `extras` are used"""
2742        dm = self._dep_map
2743        deps = []
2744        deps.extend(dm.get(None, ()))
2745        for ext in extras:
2746            try:
2747                deps.extend(dm[safe_extra(ext)])
2748            except KeyError as e:
2749                raise UnknownExtra(
2750                    "%s has no such extra feature %r" % (self, ext)
2751                ) from e
2752        return deps
2753
2754    def _get_metadata_path_for_display(self, name):
2755        """
2756        Return the path to the given metadata file, if available.
2757        """
2758        try:
2759            # We need to access _get_metadata_path() on the provider object
2760            # directly rather than through this class's __getattr__()
2761            # since _get_metadata_path() is marked private.
2762            path = self._provider._get_metadata_path(name)
2763
2764        # Handle exceptions e.g. in case the distribution's metadata
2765        # provider doesn't support _get_metadata_path().
2766        except Exception:
2767            return '[could not detect]'
2768
2769        return path
2770
2771    def _get_metadata(self, name):
2772        if self.has_metadata(name):
2773            for line in self.get_metadata_lines(name):
2774                yield line
2775
2776    def _get_version(self):
2777        lines = self._get_metadata(self.PKG_INFO)
2778        version = _version_from_file(lines)
2779
2780        return version
2781
2782    def activate(self, path=None, replace=False):
2783        """Ensure distribution is importable on `path` (default=sys.path)"""
2784        if path is None:
2785            path = sys.path
2786        self.insert_on(path, replace=replace)
2787        if path is sys.path:
2788            fixup_namespace_packages(self.location)
2789            for pkg in self._get_metadata('namespace_packages.txt'):
2790                if pkg in sys.modules:
2791                    declare_namespace(pkg)
2792
2793    def egg_name(self):
2794        """Return what this distribution's standard .egg filename should be"""
2795        filename = "%s-%s-py%s" % (
2796            to_filename(self.project_name), to_filename(self.version),
2797            self.py_version or PY_MAJOR
2798        )
2799
2800        if self.platform:
2801            filename += '-' + self.platform
2802        return filename
2803
2804    def __repr__(self):
2805        if self.location:
2806            return "%s (%s)" % (self, self.location)
2807        else:
2808            return str(self)
2809
2810    def __str__(self):
2811        try:
2812            version = getattr(self, 'version', None)
2813        except ValueError:
2814            version = None
2815        version = version or "[unknown version]"
2816        return "%s %s" % (self.project_name, version)
2817
2818    def __getattr__(self, attr):
2819        """Delegate all unrecognized public attributes to .metadata provider"""
2820        if attr.startswith('_'):
2821            raise AttributeError(attr)
2822        return getattr(self._provider, attr)
2823
2824    def __dir__(self):
2825        return list(
2826            set(super(Distribution, self).__dir__())
2827            | set(
2828                attr for attr in self._provider.__dir__()
2829                if not attr.startswith('_')
2830            )
2831        )
2832
2833    @classmethod
2834    def from_filename(cls, filename, metadata=None, **kw):
2835        return cls.from_location(
2836            _normalize_cached(filename), os.path.basename(filename), metadata,
2837            **kw
2838        )
2839
2840    def as_requirement(self):
2841        """Return a ``Requirement`` that matches this distribution exactly"""
2842        if isinstance(self.parsed_version, packaging.version.Version):
2843            spec = "%s==%s" % (self.project_name, self.parsed_version)
2844        else:
2845            spec = "%s===%s" % (self.project_name, self.parsed_version)
2846
2847        return Requirement.parse(spec)
2848
2849    def load_entry_point(self, group, name):
2850        """Return the `name` entry point of `group` or raise ImportError"""
2851        ep = self.get_entry_info(group, name)
2852        if ep is None:
2853            raise ImportError("Entry point %r not found" % ((group, name),))
2854        return ep.load()
2855
2856    def get_entry_map(self, group=None):
2857        """Return the entry point map for `group`, or the full entry map"""
2858        try:
2859            ep_map = self._ep_map
2860        except AttributeError:
2861            ep_map = self._ep_map = EntryPoint.parse_map(
2862                self._get_metadata('entry_points.txt'), self
2863            )
2864        if group is not None:
2865            return ep_map.get(group, {})
2866        return ep_map
2867
2868    def get_entry_info(self, group, name):
2869        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2870        return self.get_entry_map(group).get(name)
2871
2872    # FIXME: 'Distribution.insert_on' is too complex (13)
2873    def insert_on(self, path, loc=None, replace=False):  # noqa: C901
2874        """Ensure self.location is on path
2875
2876        If replace=False (default):
2877            - If location is already in path anywhere, do nothing.
2878            - Else:
2879              - If it's an egg and its parent directory is on path,
2880                insert just ahead of the parent.
2881              - Else: add to the end of path.
2882        If replace=True:
2883            - If location is already on path anywhere (not eggs)
2884              or higher priority than its parent (eggs)
2885              do nothing.
2886            - Else:
2887              - If it's an egg and its parent directory is on path,
2888                insert just ahead of the parent,
2889                removing any lower-priority entries.
2890              - Else: add it to the front of path.
2891        """
2892
2893        loc = loc or self.location
2894        if not loc:
2895            return
2896
2897        nloc = _normalize_cached(loc)
2898        bdir = os.path.dirname(nloc)
2899        npath = [(p and _normalize_cached(p) or p) for p in path]
2900
2901        for p, item in enumerate(npath):
2902            if item == nloc:
2903                if replace:
2904                    break
2905                else:
2906                    # don't modify path (even removing duplicates) if
2907                    # found and not replace
2908                    return
2909            elif item == bdir and self.precedence == EGG_DIST:
2910                # if it's an .egg, give it precedence over its directory
2911                # UNLESS it's already been added to sys.path and replace=False
2912                if (not replace) and nloc in npath[p:]:
2913                    return
2914                if path is sys.path:
2915                    self.check_version_conflict()
2916                path.insert(p, loc)
2917                npath.insert(p, nloc)
2918                break
2919        else:
2920            if path is sys.path:
2921                self.check_version_conflict()
2922            if replace:
2923                path.insert(0, loc)
2924            else:
2925                path.append(loc)
2926            return
2927
2928        # p is the spot where we found or inserted loc; now remove duplicates
2929        while True:
2930            try:
2931                np = npath.index(nloc, p + 1)
2932            except ValueError:
2933                break
2934            else:
2935                del npath[np], path[np]
2936                # ha!
2937                p = np
2938
2939        return
2940
2941    def check_version_conflict(self):
2942        if self.key == 'setuptools':
2943            # ignore the inevitable setuptools self-conflicts  :(
2944            return
2945
2946        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2947        loc = normalize_path(self.location)
2948        for modname in self._get_metadata('top_level.txt'):
2949            if (modname not in sys.modules or modname in nsp
2950                    or modname in _namespace_packages):
2951                continue
2952            if modname in ('pkg_resources', 'setuptools', 'site'):
2953                continue
2954            fn = getattr(sys.modules[modname], '__file__', None)
2955            if fn and (normalize_path(fn).startswith(loc) or
2956                       fn.startswith(self.location)):
2957                continue
2958            issue_warning(
2959                "Module %s was already imported from %s, but %s is being added"
2960                " to sys.path" % (modname, fn, self.location),
2961            )
2962
2963    def has_version(self):
2964        try:
2965            self.version
2966        except ValueError:
2967            issue_warning("Unbuilt egg for " + repr(self))
2968            return False
2969        return True
2970
2971    def clone(self, **kw):
2972        """Copy this distribution, substituting in any changed keyword args"""
2973        names = 'project_name version py_version platform location precedence'
2974        for attr in names.split():
2975            kw.setdefault(attr, getattr(self, attr, None))
2976        kw.setdefault('metadata', self._provider)
2977        return self.__class__(**kw)
2978
2979    @property
2980    def extras(self):
2981        return [dep for dep in self._dep_map if dep]
2982
2983
2984class EggInfoDistribution(Distribution):
2985    def _reload_version(self):
2986        """
2987        Packages installed by distutils (e.g. numpy or scipy),
2988        which uses an old safe_version, and so
2989        their version numbers can get mangled when
2990        converted to filenames (e.g., 1.11.0.dev0+2329eae to
2991        1.11.0.dev0_2329eae). These distributions will not be
2992        parsed properly
2993        downstream by Distribution and safe_version, so
2994        take an extra step and try to get the version number from
2995        the metadata file itself instead of the filename.
2996        """
2997        md_version = self._get_version()
2998        if md_version:
2999            self._version = md_version
3000        return self
3001
3002
3003class DistInfoDistribution(Distribution):
3004    """
3005    Wrap an actual or potential sys.path entry
3006    w/metadata, .dist-info style.
3007    """
3008    PKG_INFO = 'METADATA'
3009    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
3010
3011    @property
3012    def _parsed_pkg_info(self):
3013        """Parse and cache metadata"""
3014        try:
3015            return self._pkg_info
3016        except AttributeError:
3017            metadata = self.get_metadata(self.PKG_INFO)
3018            self._pkg_info = email.parser.Parser().parsestr(metadata)
3019            return self._pkg_info
3020
3021    @property
3022    def _dep_map(self):
3023        try:
3024            return self.__dep_map
3025        except AttributeError:
3026            self.__dep_map = self._compute_dependencies()
3027            return self.__dep_map
3028
3029    def _compute_dependencies(self):
3030        """Recompute this distribution's dependencies."""
3031        dm = self.__dep_map = {None: []}
3032
3033        reqs = []
3034        # Including any condition expressions
3035        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
3036            reqs.extend(parse_requirements(req))
3037
3038        def reqs_for_extra(extra):
3039            for req in reqs:
3040                if not req.marker or req.marker.evaluate({'extra': extra}):
3041                    yield req
3042
3043        common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
3044        dm[None].extend(common)
3045
3046        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
3047            s_extra = safe_extra(extra.strip())
3048            dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
3049
3050        return dm
3051
3052
3053_distributionImpl = {
3054    '.egg': Distribution,
3055    '.egg-info': EggInfoDistribution,
3056    '.dist-info': DistInfoDistribution,
3057}
3058
3059
3060def issue_warning(*args, **kw):
3061    level = 1
3062    g = globals()
3063    try:
3064        # find the first stack frame that is *not* code in
3065        # the pkg_resources module, to use for the warning
3066        while sys._getframe(level).f_globals is g:
3067            level += 1
3068    except ValueError:
3069        pass
3070    warnings.warn(stacklevel=level + 1, *args, **kw)
3071
3072
3073def parse_requirements(strs):
3074    """
3075    Yield ``Requirement`` objects for each specification in `strs`.
3076
3077    `strs` must be a string, or a (possibly-nested) iterable thereof.
3078    """
3079    return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
3080
3081
3082class RequirementParseError(packaging.requirements.InvalidRequirement):
3083    "Compatibility wrapper for InvalidRequirement"
3084
3085
3086class Requirement(packaging.requirements.Requirement):
3087    def __init__(self, requirement_string):
3088        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
3089        super(Requirement, self).__init__(requirement_string)
3090        self.unsafe_name = self.name
3091        project_name = safe_name(self.name)
3092        self.project_name, self.key = project_name, project_name.lower()
3093        self.specs = [
3094            (spec.operator, spec.version) for spec in self.specifier]
3095        self.extras = tuple(map(safe_extra, self.extras))
3096        self.hashCmp = (
3097            self.key,
3098            self.url,
3099            self.specifier,
3100            frozenset(self.extras),
3101            str(self.marker) if self.marker else None,
3102        )
3103        self.__hash = hash(self.hashCmp)
3104
3105    def __eq__(self, other):
3106        return (
3107            isinstance(other, Requirement) and
3108            self.hashCmp == other.hashCmp
3109        )
3110
3111    def __ne__(self, other):
3112        return not self == other
3113
3114    def __contains__(self, item):
3115        if isinstance(item, Distribution):
3116            if item.key != self.key:
3117                return False
3118
3119            item = item.version
3120
3121        # Allow prereleases always in order to match the previous behavior of
3122        # this method. In the future this should be smarter and follow PEP 440
3123        # more accurately.
3124        return self.specifier.contains(item, prereleases=True)
3125
3126    def __hash__(self):
3127        return self.__hash
3128
3129    def __repr__(self):
3130        return "Requirement.parse(%r)" % str(self)
3131
3132    @staticmethod
3133    def parse(s):
3134        req, = parse_requirements(s)
3135        return req
3136
3137
3138def _always_object(classes):
3139    """
3140    Ensure object appears in the mro even
3141    for old-style classes.
3142    """
3143    if object not in classes:
3144        return classes + (object,)
3145    return classes
3146
3147
3148def _find_adapter(registry, ob):
3149    """Return an adapter factory for `ob` from `registry`"""
3150    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3151    for t in types:
3152        if t in registry:
3153            return registry[t]
3154
3155
3156def ensure_directory(path):
3157    """Ensure that the parent directory of `path` exists"""
3158    dirname = os.path.dirname(path)
3159    os.makedirs(dirname, exist_ok=True)
3160
3161
3162def _bypass_ensure_directory(path):
3163    """Sandbox-bypassing version of ensure_directory()"""
3164    if not WRITE_SUPPORT:
3165        raise IOError('"os.mkdir" not supported on this platform.')
3166    dirname, filename = split(path)
3167    if dirname and filename and not isdir(dirname):
3168        _bypass_ensure_directory(dirname)
3169        try:
3170            mkdir(dirname, 0o755)
3171        except FileExistsError:
3172            pass
3173
3174
3175def split_sections(s):
3176    """Split a string or iterable thereof into (section, content) pairs
3177
3178    Each ``section`` is a stripped version of the section header ("[section]")
3179    and each ``content`` is a list of stripped lines excluding blank lines and
3180    comment-only lines.  If there are any such lines before the first section
3181    header, they're returned in a first ``section`` of ``None``.
3182    """
3183    section = None
3184    content = []
3185    for line in yield_lines(s):
3186        if line.startswith("["):
3187            if line.endswith("]"):
3188                if section or content:
3189                    yield section, content
3190                section = line[1:-1].strip()
3191                content = []
3192            else:
3193                raise ValueError("Invalid section heading", line)
3194        else:
3195            content.append(line)
3196
3197    # wrap up last segment
3198    yield section, content
3199
3200
3201def _mkstemp(*args, **kw):
3202    old_open = os.open
3203    try:
3204        # temporarily bypass sandboxing
3205        os.open = os_open
3206        return tempfile.mkstemp(*args, **kw)
3207    finally:
3208        # and then put it back
3209        os.open = old_open
3210
3211
3212# Silence the PEP440Warning by default, so that end users don't get hit by it
3213# randomly just because they use pkg_resources. We want to append the rule
3214# because we want earlier uses of filterwarnings to take precedence over this
3215# one.
3216warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3217
3218
3219# from jaraco.functools 1.3
3220def _call_aside(f, *args, **kwargs):
3221    f(*args, **kwargs)
3222    return f
3223
3224
3225@_call_aside
3226def _initialize(g=globals()):
3227    "Set up global resource manager (deliberately not state-saved)"
3228    manager = ResourceManager()
3229    g['_manager'] = manager
3230    g.update(
3231        (name, getattr(manager, name))
3232        for name in dir(manager)
3233        if not name.startswith('_')
3234    )
3235
3236
3237class PkgResourcesDeprecationWarning(Warning):
3238    """
3239    Base class for warning about deprecations in ``pkg_resources``
3240
3241    This class is not derived from ``DeprecationWarning``, and as such is
3242    visible by default.
3243    """
3244
3245
3246@_call_aside
3247def _initialize_master_working_set():
3248    """
3249    Prepare the master working set and make the ``require()``
3250    API available.
3251
3252    This function has explicit effects on the global state
3253    of pkg_resources. It is intended to be invoked once at
3254    the initialization of this module.
3255
3256    Invocation by other packages is unsupported and done
3257    at their own risk.
3258    """
3259    working_set = WorkingSet._build_master()
3260    _declare_state('object', working_set=working_set)
3261
3262    require = working_set.require
3263    iter_entry_points = working_set.iter_entry_points
3264    add_activation_listener = working_set.subscribe
3265    run_script = working_set.run_script
3266    # backward compatibility
3267    run_main = run_script
3268    # Activate all distributions already on sys.path with replace=False and
3269    # ensure that all distributions added to the working set in the future
3270    # (e.g. by calling ``require()``) will get activated as well,
3271    # with higher priority (replace=True).
3272    tuple(
3273        dist.activate(replace=False)
3274        for dist in working_set
3275    )
3276    add_activation_listener(
3277        lambda dist: dist.activate(replace=True),
3278        existing=False,
3279    )
3280    working_set.entries = []
3281    # match order
3282    list(map(working_set.add_entry, sys.path))
3283    globals().update(locals())
3284