PageRenderTime 132ms CodeModel.GetById 63ms app.highlight 55ms RepoModel.GetById 1ms app.codeStats 1ms

/lib/pkg_resources.py

https://bitbucket.org/cistrome/cistrome-harvard/
Python | 2625 lines | 2424 code | 96 blank | 105 comment | 65 complexity | 42537d2e5dd1311cff8b1e28342369c5 MD5 | raw file

Large files files are truncated, but you can click here to view the full file

   1"""Package resource API
   2--------------------
   3
   4A resource is a logical file contained within a package, or a logical
   5subdirectory thereof.  The package resource API expects resource names
   6to have their path parts separated with ``/``, *not* whatever the local
   7path separator is.  Do not use os.path operations to manipulate resource
   8names being passed into the API.
   9
  10The package resource API is designed to work with normal filesystem packages,
  11.egg files, and unpacked .egg files.  It can also work in a limited way with
  12.zip files and with custom PEP 302 loaders that support the ``get_data()``
  13method.
  14"""
  15
  16import sys, os, zipimport, time, re, imp
  17
  18try:
  19    frozenset
  20except NameError:
  21    from sets import ImmutableSet as frozenset
  22
  23# capture these to bypass sandboxing
  24from os import utime, rename, unlink, mkdir
  25from os import open as os_open
  26from os.path import isdir, split
  27
  28
  29def _bypass_ensure_directory(name, mode=0777):
  30    # Sandbox-bypassing version of ensure_directory()
  31    dirname, filename = split(name)
  32    if dirname and filename and not isdir(dirname):
  33        _bypass_ensure_directory(dirname)
  34        mkdir(dirname, mode)
  35
  36
  37
  38
  39
  40
  41
  42_state_vars = {}
  43
  44def _declare_state(vartype, **kw):
  45    g = globals()
  46    for name, val in kw.iteritems():
  47        g[name] = val
  48        _state_vars[name] = vartype
  49
  50def __getstate__():
  51    state = {}
  52    g = globals()
  53    for k, v in _state_vars.iteritems():
  54        state[k] = g['_sget_'+v](g[k])
  55    return state
  56
  57def __setstate__(state):
  58    g = globals()
  59    for k, v in state.iteritems():
  60        g['_sset_'+_state_vars[k]](k, g[k], v)
  61    return state
  62
  63def _sget_dict(val):
  64    return val.copy()
  65
  66def _sset_dict(key, ob, state):
  67    ob.clear()
  68    ob.update(state)
  69
  70def _sget_object(val):
  71    return val.__getstate__()
  72
  73def _sset_object(key, ob, state):
  74    ob.__setstate__(state)
  75
  76_sget_none = _sset_none = lambda *args: None
  77
  78
  79
  80
  81
  82
  83def get_supported_platform():
  84    """Return this platform's maximum compatible version.
  85
  86    distutils.util.get_platform() normally reports the minimum version
  87    of Mac OS X that would be required to *use* extensions produced by
  88    distutils.  But what we want when checking compatibility is to know the
  89    version of Mac OS X that we are *running*.  To allow usage of packages that
  90    explicitly require a newer version of Mac OS X, we must also know the
  91    current version of the OS.
  92
  93    If this condition occurs for any other platform with a version in its
  94    platform strings, this function should be extended accordingly.
  95    """
  96    plat = get_build_platform(); m = macosVersionString.match(plat)
  97    if m is not None and sys.platform == "darwin":
  98        try:
  99            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
 100        except ValueError:
 101            pass    # not Mac OS X
 102    return plat
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124__all__ = [
 125    # Basic resource access and distribution/entry point discovery
 126    'require', 'run_script', 'get_provider',  'get_distribution',
 127    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
 128    'resource_string', 'resource_stream', 'resource_filename',
 129    'resource_listdir', 'resource_exists', 'resource_isdir',
 130
 131    # Environmental control
 132    'declare_namespace', 'working_set', 'add_activation_listener',
 133    'find_distributions', 'set_extraction_path', 'cleanup_resources',
 134    'get_default_cache',
 135
 136    # Primary implementation classes
 137    'Environment', 'WorkingSet', 'ResourceManager',
 138    'Distribution', 'Requirement', 'EntryPoint',
 139
 140    # Exceptions
 141    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
 142    'ExtractionError',
 143
 144    # Parsing functions and string utilities
 145    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
 146    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
 147    'safe_extra', 'to_filename',
 148
 149    # filesystem utilities
 150    'ensure_directory', 'normalize_path',
 151
 152    # Distribution "precedence" constants
 153    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
 154
 155    # "Provider" interfaces, implementations, and registration/lookup APIs
 156    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
 157    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
 158    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
 159    'register_finder', 'register_namespace_handler', 'register_loader_type',
 160    'fixup_namespace_packages', 'get_importer',
 161
 162    # Deprecated/backward compatibility only
 163    'run_main', 'AvailableDistributions',
 164]
 165class ResolutionError(Exception):
 166    """Abstract base for dependency resolution errors"""
 167    def __repr__(self): return self.__class__.__name__+repr(self.args)
 168
 169class VersionConflict(ResolutionError):
 170    """An already-installed version conflicts with the requested version"""
 171
 172class DistributionNotFound(ResolutionError):
 173    """A requested distribution was not found"""
 174
 175class UnknownExtra(ResolutionError):
 176    """Distribution doesn't have an "extra feature" of the given name"""
 177_provider_factories = {}
 178PY_MAJOR = sys.version[:3]
 179EGG_DIST    = 3
 180BINARY_DIST = 2
 181SOURCE_DIST = 1
 182CHECKOUT_DIST = 0
 183DEVELOP_DIST = -1
 184
 185def register_loader_type(loader_type, provider_factory):
 186    """Register `provider_factory` to make providers for `loader_type`
 187
 188    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
 189    and `provider_factory` is a function that, passed a *module* object,
 190    returns an ``IResourceProvider`` for that module.
 191    """
 192    _provider_factories[loader_type] = provider_factory
 193
 194def get_provider(moduleOrReq):
 195    """Return an IResourceProvider for the named module or requirement"""
 196    if isinstance(moduleOrReq,Requirement):
 197        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
 198    try:
 199        module = sys.modules[moduleOrReq]
 200    except KeyError:
 201        __import__(moduleOrReq)
 202        module = sys.modules[moduleOrReq]
 203    loader = getattr(module, '__loader__', None)
 204    return _find_adapter(_provider_factories, loader)(module)
 205
 206def _macosx_vers(_cache=[]):
 207    if not _cache:
 208        from platform import mac_ver
 209        _cache.append(mac_ver()[0].split('.'))
 210    return _cache[0]
 211
 212def _macosx_arch(machine):
 213    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
 214
 215def get_build_platform():
 216    """Return this platform's string for platform-specific distributions
 217
 218    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
 219    needs some hacks for Linux and Mac OS X.
 220    """
 221    from distutils.util import get_platform
 222    plat = get_platform()
 223    if sys.platform == "darwin" and not plat.startswith('macosx-'):
 224        try:
 225            version = _macosx_vers()
 226            machine = os.uname()[4].replace(" ", "_")
 227            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
 228                _macosx_arch(machine))
 229        except ValueError:
 230            # if someone is running a non-Mac darwin system, this will fall
 231            # through to the default implementation
 232            pass
 233    return plat
 234
 235macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
 236darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
 237get_platform = get_build_platform   # XXX backward compat
 238
 239
 240
 241
 242
 243
 244
 245
 246
 247def compatible_platforms(provided,required):
 248    """Can code for the `provided` platform run on the `required` platform?
 249
 250    Returns true if either platform is ``None``, or the platforms are equal.
 251
 252    XXX Needs compatibility checks for Linux and other unixy OSes.
 253    """
 254    if provided is None or required is None or provided==required:
 255        return True     # easy case
 256
 257    # Mac OS X special cases
 258    reqMac = macosVersionString.match(required)
 259    if reqMac:
 260        provMac = macosVersionString.match(provided)
 261
 262        # is this a Mac package?
 263        if not provMac:
 264            # this is backwards compatibility for packages built before
 265            # setuptools 0.6. All packages built after this point will
 266            # use the new macosx designation.
 267            provDarwin = darwinVersionString.match(provided)
 268            if provDarwin:
 269                dversion = int(provDarwin.group(1))
 270                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
 271                if dversion == 7 and macosversion >= "10.3" or \
 272                    dversion == 8 and macosversion >= "10.4":
 273
 274                    #import warnings
 275                    #warnings.warn("Mac eggs should be rebuilt to "
 276                    #    "use the macosx designation instead of darwin.",
 277                    #    category=DeprecationWarning)
 278                    return True
 279            return False    # egg isn't macosx or legacy darwin
 280
 281        # are they the same major version and machine type?
 282        if provMac.group(1) != reqMac.group(1) or \
 283            provMac.group(3) != reqMac.group(3):
 284            return False
 285
 286
 287
 288        # is the required OS major update >= the provided one?
 289        if int(provMac.group(2)) > int(reqMac.group(2)):
 290            return False
 291
 292        return True
 293
 294    # XXX Linux and other platforms' special cases should go here
 295    return False
 296
 297
 298def run_script(dist_spec, script_name):
 299    """Locate distribution `dist_spec` and run its `script_name` script"""
 300    ns = sys._getframe(1).f_globals
 301    name = ns['__name__']
 302    ns.clear()
 303    ns['__name__'] = name
 304    require(dist_spec)[0].run_script(script_name, ns)
 305
 306run_main = run_script   # backward compatibility
 307
 308def get_distribution(dist):
 309    """Return a current distribution object for a Requirement or string"""
 310    if isinstance(dist,basestring): dist = Requirement.parse(dist)
 311    if isinstance(dist,Requirement): dist = get_provider(dist)
 312    if not isinstance(dist,Distribution):
 313        raise TypeError("Expected string, Requirement, or Distribution", dist)
 314    return dist
 315
 316def load_entry_point(dist, group, name):
 317    """Return `name` entry point of `group` for `dist` or raise ImportError"""
 318    return get_distribution(dist).load_entry_point(group, name)
 319
 320def get_entry_map(dist, group=None):
 321    """Return the entry point map for `group`, or the full entry map"""
 322    return get_distribution(dist).get_entry_map(group)
 323
 324def get_entry_info(dist, group, name):
 325    """Return the EntryPoint object for `group`+`name`, or ``None``"""
 326    return get_distribution(dist).get_entry_info(group, name)
 327
 328
 329class IMetadataProvider:
 330
 331    def has_metadata(name):
 332        """Does the package's distribution contain the named metadata?"""
 333
 334    def get_metadata(name):
 335        """The named metadata resource as a string"""
 336
 337    def get_metadata_lines(name):
 338        """Yield named metadata resource as list of non-blank non-comment lines
 339
 340       Leading and trailing whitespace is stripped from each line, and lines
 341       with ``#`` as the first non-blank character are omitted."""
 342
 343    def metadata_isdir(name):
 344        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
 345
 346    def metadata_listdir(name):
 347        """List of metadata names in the directory (like ``os.listdir()``)"""
 348
 349    def run_script(script_name, namespace):
 350        """Execute the named script in the supplied namespace dictionary"""
 351
 352
 353
 354
 355
 356
 357
 358
 359
 360
 361
 362
 363
 364
 365
 366
 367
 368
 369
 370class IResourceProvider(IMetadataProvider):
 371    """An object that provides access to package resources"""
 372
 373    def get_resource_filename(manager, resource_name):
 374        """Return a true filesystem path for `resource_name`
 375
 376        `manager` must be an ``IResourceManager``"""
 377
 378    def get_resource_stream(manager, resource_name):
 379        """Return a readable file-like object for `resource_name`
 380
 381        `manager` must be an ``IResourceManager``"""
 382
 383    def get_resource_string(manager, resource_name):
 384        """Return a string containing the contents of `resource_name`
 385
 386        `manager` must be an ``IResourceManager``"""
 387
 388    def has_resource(resource_name):
 389        """Does the package contain the named resource?"""
 390
 391    def resource_isdir(resource_name):
 392        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
 393
 394    def resource_listdir(resource_name):
 395        """List of resource names in the directory (like ``os.listdir()``)"""
 396
 397
 398
 399
 400
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411class WorkingSet(object):
 412    """A collection of active distributions on sys.path (or a similar list)"""
 413
 414    def __init__(self, entries=None):
 415        """Create working set from list of path entries (default=sys.path)"""
 416        self.entries = []
 417        self.entry_keys = {}
 418        self.by_key = {}
 419        self.callbacks = []
 420
 421        if entries is None:
 422            entries = sys.path
 423
 424        for entry in entries:
 425            self.add_entry(entry)
 426
 427
 428    def add_entry(self, entry):
 429        """Add a path item to ``.entries``, finding any distributions on it
 430
 431        ``find_distributions(entry, True)`` is used to find distributions
 432        corresponding to the path entry, and they are added.  `entry` is
 433        always appended to ``.entries``, even if it is already present.
 434        (This is because ``sys.path`` can contain the same value more than
 435        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
 436        equal ``sys.path``.)
 437        """
 438        self.entry_keys.setdefault(entry, [])
 439        self.entries.append(entry)
 440        for dist in find_distributions(entry, True):
 441            self.add(dist, entry, False)
 442
 443
 444    def __contains__(self,dist):
 445        """True if `dist` is the active distribution for its project"""
 446        return self.by_key.get(dist.key) == dist
 447
 448
 449
 450
 451
 452    def find(self, req):
 453        """Find a distribution matching requirement `req`
 454
 455        If there is an active distribution for the requested project, this
 456        returns it as long as it meets the version requirement specified by
 457        `req`.  But, if there is an active distribution for the project and it
 458        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
 459        If there is no active distribution for the requested project, ``None``
 460        is returned.
 461        """
 462        dist = self.by_key.get(req.key)
 463        if dist is not None and dist not in req:
 464            raise VersionConflict(dist,req)     # XXX add more info
 465        else:
 466            return dist
 467
 468    def iter_entry_points(self, group, name=None):
 469        """Yield entry point objects from `group` matching `name`
 470
 471        If `name` is None, yields all entry points in `group` from all
 472        distributions in the working set, otherwise only ones matching
 473        both `group` and `name` are yielded (in distribution order).
 474        """
 475        for dist in self:
 476            entries = dist.get_entry_map(group)
 477            if name is None:
 478                for ep in entries.values():
 479                    yield ep
 480            elif name in entries:
 481                yield entries[name]
 482
 483    def run_script(self, requires, script_name):
 484        """Locate distribution for `requires` and run `script_name` script"""
 485        ns = sys._getframe(1).f_globals
 486        name = ns['__name__']
 487        ns.clear()
 488        ns['__name__'] = name
 489        self.require(requires)[0].run_script(script_name, ns)
 490
 491
 492
 493    def __iter__(self):
 494        """Yield distributions for non-duplicate projects in the working set
 495
 496        The yield order is the order in which the items' path entries were
 497        added to the working set.
 498        """
 499        seen = {}
 500        for item in self.entries:
 501            for key in self.entry_keys[item]:
 502                if key not in seen:
 503                    seen[key]=1
 504                    yield self.by_key[key]
 505
 506    def add(self, dist, entry=None, insert=True):
 507        """Add `dist` to working set, associated with `entry`
 508
 509        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
 510        On exit from this routine, `entry` is added to the end of the working
 511        set's ``.entries`` (if it wasn't already present).
 512
 513        `dist` is only added to the working set if it's for a project that
 514        doesn't already have a distribution in the set.  If it's added, any
 515        callbacks registered with the ``subscribe()`` method will be called.
 516        """
 517        if insert:
 518            dist.insert_on(self.entries, entry)
 519
 520        if entry is None:
 521            entry = dist.location
 522        keys = self.entry_keys.setdefault(entry,[])
 523        keys2 = self.entry_keys.setdefault(dist.location,[])
 524        if dist.key in self.by_key:
 525            return      # ignore hidden distros
 526
 527        self.by_key[dist.key] = dist
 528        if dist.key not in keys:
 529            keys.append(dist.key)
 530        if dist.key not in keys2:
 531            keys2.append(dist.key)
 532        self._added_new(dist)
 533
 534    def resolve(self, requirements, env=None, installer=None):
 535        """List all distributions needed to (recursively) meet `requirements`
 536
 537        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
 538        if supplied, should be an ``Environment`` instance.  If
 539        not supplied, it defaults to all distributions available within any
 540        entry or distribution in the working set.  `installer`, if supplied,
 541        will be invoked with each requirement that cannot be met by an
 542        already-installed distribution; it should return a ``Distribution`` or
 543        ``None``.
 544        """
 545
 546        requirements = list(requirements)[::-1]  # set up the stack
 547        processed = {}  # set of processed requirements
 548        best = {}  # key -> dist
 549        to_activate = []
 550
 551        while requirements:
 552            req = requirements.pop(0)   # process dependencies breadth-first
 553            if req in processed:
 554                # Ignore cyclic or redundant dependencies
 555                continue
 556            dist = best.get(req.key)
 557            if dist is None:
 558                # Find the best distribution and add it to the map
 559                dist = self.by_key.get(req.key)
 560                if dist is None:
 561                    if env is None:
 562                        env = Environment(self.entries)
 563                    dist = best[req.key] = env.best_match(req, self, installer)
 564                    if dist is None:
 565                        raise DistributionNotFound(req)  # XXX put more info here
 566                to_activate.append(dist)
 567            if dist not in req:
 568                # Oops, the "best" so far conflicts with a dependency
 569                raise VersionConflict(dist,req) # XXX put more info here
 570            requirements.extend(dist.requires(req.extras)[::-1])
 571            processed[req] = True
 572
 573        return to_activate    # return list of distros to activate
 574
 575    def find_plugins(self,
 576        plugin_env, full_env=None, installer=None, fallback=True
 577    ):
 578        """Find all activatable distributions in `plugin_env`
 579
 580        Example usage::
 581
 582            distributions, errors = working_set.find_plugins(
 583                Environment(plugin_dirlist)
 584            )
 585            map(working_set.add, distributions)  # add plugins+libs to sys.path
 586            print "Couldn't load", errors        # display errors
 587
 588        The `plugin_env` should be an ``Environment`` instance that contains
 589        only distributions that are in the project's "plugin directory" or
 590        directories. The `full_env`, if supplied, should be an ``Environment``
 591        contains all currently-available distributions.  If `full_env` is not
 592        supplied, one is created automatically from the ``WorkingSet`` this
 593        method is called on, which will typically mean that every directory on
 594        ``sys.path`` will be scanned for distributions.
 595
 596        `installer` is a standard installer callback as used by the
 597        ``resolve()`` method. The `fallback` flag indicates whether we should
 598        attempt to resolve older versions of a plugin if the newest version
 599        cannot be resolved.
 600
 601        This method returns a 2-tuple: (`distributions`, `error_info`), where
 602        `distributions` is a list of the distributions found in `plugin_env`
 603        that were loadable, along with any other distributions that are needed
 604        to resolve their dependencies.  `error_info` is a dictionary mapping
 605        unloadable plugin distributions to an exception instance describing the
 606        error that occurred. Usually this will be a ``DistributionNotFound`` or
 607        ``VersionConflict`` instance.
 608        """
 609
 610        plugin_projects = list(plugin_env)
 611        plugin_projects.sort()  # scan project names in alphabetic order
 612
 613        error_info = {}
 614        distributions = {}
 615
 616        if full_env is None:
 617            env = Environment(self.entries)
 618            env += plugin_env
 619        else:
 620            env = full_env + plugin_env
 621
 622        shadow_set = self.__class__([])
 623        map(shadow_set.add, self)   # put all our entries in shadow_set
 624
 625        for project_name in plugin_projects:
 626
 627            for dist in plugin_env[project_name]:
 628
 629                req = [dist.as_requirement()]
 630
 631                try:
 632                    resolvees = shadow_set.resolve(req, env, installer)
 633
 634                except ResolutionError,v:
 635                    error_info[dist] = v    # save error info
 636                    if fallback:
 637                        continue    # try the next older version of project
 638                    else:
 639                        break       # give up on this project, keep going
 640
 641                else:
 642                    map(shadow_set.add, resolvees)
 643                    distributions.update(dict.fromkeys(resolvees))
 644
 645                    # success, no need to try any more versions of this project
 646                    break
 647
 648        distributions = list(distributions)
 649        distributions.sort()
 650
 651        return distributions, error_info
 652
 653
 654
 655
 656
 657    def require(self, *requirements):
 658        """Ensure that distributions matching `requirements` are activated
 659
 660        `requirements` must be a string or a (possibly-nested) sequence
 661        thereof, specifying the distributions and versions required.  The
 662        return value is a sequence of the distributions that needed to be
 663        activated to fulfill the requirements; all relevant distributions are
 664        included, even if they were already activated in this working set.
 665        """
 666        needed = self.resolve(parse_requirements(requirements))
 667
 668        for dist in needed:
 669            self.add(dist)
 670
 671        return needed
 672
 673    def subscribe(self, callback):
 674        """Invoke `callback` for all distributions (including existing ones)"""
 675        if callback in self.callbacks:
 676            return
 677        self.callbacks.append(callback)
 678        for dist in self:
 679            callback(dist)
 680
 681    def _added_new(self, dist):
 682        for callback in self.callbacks:
 683            callback(dist)
 684
 685    def __getstate__(self):
 686        return (
 687            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
 688            self.callbacks[:]
 689        )
 690
 691    def __setstate__(self, (entries, keys, by_key, callbacks)):
 692        self.entries = entries[:]
 693        self.entry_keys = keys.copy()
 694        self.by_key = by_key.copy()
 695        self.callbacks = callbacks[:]
 696
 697
 698class Environment(object):
 699    """Searchable snapshot of distributions on a search path"""
 700
 701    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
 702        """Snapshot distributions available on a search path
 703
 704        Any distributions found on `search_path` are added to the environment.
 705        `search_path` should be a sequence of ``sys.path`` items.  If not
 706        supplied, ``sys.path`` is used.
 707
 708        `platform` is an optional string specifying the name of the platform
 709        that platform-specific distributions must be compatible with.  If
 710        unspecified, it defaults to the current platform.  `python` is an
 711        optional string naming the desired version of Python (e.g. ``'2.4'``);
 712        it defaults to the current version.
 713
 714        You may explicitly set `platform` (and/or `python`) to ``None`` if you
 715        wish to map *all* distributions, not just those compatible with the
 716        running platform or Python version.
 717        """
 718        self._distmap = {}
 719        self._cache = {}
 720        self.platform = platform
 721        self.python = python
 722        self.scan(search_path)
 723
 724    def can_add(self, dist):
 725        """Is distribution `dist` acceptable for this environment?
 726
 727        The distribution must match the platform and python version
 728        requirements specified when this environment was created, or False
 729        is returned.
 730        """
 731        return (self.python is None or dist.py_version is None
 732            or dist.py_version==self.python) \
 733           and compatible_platforms(dist.platform,self.platform)
 734
 735    def remove(self, dist):
 736        """Remove `dist` from the environment"""
 737        self._distmap[dist.key].remove(dist)
 738
 739    def scan(self, search_path=None):
 740        """Scan `search_path` for distributions usable in this environment
 741
 742        Any distributions found are added to the environment.
 743        `search_path` should be a sequence of ``sys.path`` items.  If not
 744        supplied, ``sys.path`` is used.  Only distributions conforming to
 745        the platform/python version defined at initialization are added.
 746        """
 747        if search_path is None:
 748            search_path = sys.path
 749
 750        for item in search_path:
 751            for dist in find_distributions(item):
 752                self.add(dist)
 753
 754    def __getitem__(self,project_name):
 755        """Return a newest-to-oldest list of distributions for `project_name`
 756        """
 757        try:
 758            return self._cache[project_name]
 759        except KeyError:
 760            project_name = project_name.lower()
 761            if project_name not in self._distmap:
 762                return []
 763
 764        if project_name not in self._cache:
 765            dists = self._cache[project_name] = self._distmap[project_name]
 766            _sort_dists(dists)
 767
 768        return self._cache[project_name]
 769
 770    def add(self,dist):
 771        """Add `dist` if we ``can_add()`` it and it isn't already added"""
 772        if self.can_add(dist) and dist.has_version():
 773            dists = self._distmap.setdefault(dist.key,[])
 774            if dist not in dists:
 775                dists.append(dist)
 776                if dist.key in self._cache:
 777                    _sort_dists(self._cache[dist.key])
 778
 779
 780    def best_match(self, req, working_set, installer=None):
 781        """Find distribution best matching `req` and usable on `working_set`
 782
 783        This calls the ``find(req)`` method of the `working_set` to see if a
 784        suitable distribution is already active.  (This may raise
 785        ``VersionConflict`` if an unsuitable version of the project is already
 786        active in the specified `working_set`.)  If a suitable distribution
 787        isn't active, this method returns the newest distribution in the
 788        environment that meets the ``Requirement`` in `req`.  If no suitable
 789        distribution is found, and `installer` is supplied, then the result of
 790        calling the environment's ``obtain(req, installer)`` method will be
 791        returned.
 792        """
 793        dist = working_set.find(req)
 794        if dist is not None:
 795            return dist
 796        for dist in self[req.key]:
 797            if dist in req:
 798                return dist
 799        return self.obtain(req, installer) # try and download/install
 800
 801    def obtain(self, requirement, installer=None):
 802        """Obtain a distribution matching `requirement` (e.g. via download)
 803
 804        Obtain a distro that matches requirement (e.g. via download).  In the
 805        base ``Environment`` class, this routine just returns
 806        ``installer(requirement)``, unless `installer` is None, in which case
 807        None is returned instead.  This method is a hook that allows subclasses
 808        to attempt other ways of obtaining a distribution before falling back
 809        to the `installer` argument."""
 810        if installer is not None:
 811            return installer(requirement)
 812
 813    def __iter__(self):
 814        """Yield the unique project names of the available distributions"""
 815        for key in self._distmap.keys():
 816            if self[key]: yield key
 817
 818
 819
 820
 821    def __iadd__(self, other):
 822        """In-place addition of a distribution or environment"""
 823        if isinstance(other,Distribution):
 824            self.add(other)
 825        elif isinstance(other,Environment):
 826            for project in other:
 827                for dist in other[project]:
 828                    self.add(dist)
 829        else:
 830            raise TypeError("Can't add %r to environment" % (other,))
 831        return self
 832
 833    def __add__(self, other):
 834        """Add an environment or distribution to an environment"""
 835        new = self.__class__([], platform=None, python=None)
 836        for env in self, other:
 837            new += env
 838        return new
 839
 840
 841AvailableDistributions = Environment    # XXX backward compatibility
 842
 843
 844class ExtractionError(RuntimeError):
 845    """An error occurred extracting a resource
 846
 847    The following attributes are available from instances of this exception:
 848
 849    manager
 850        The resource manager that raised this exception
 851
 852    cache_path
 853        The base directory for resource extraction
 854
 855    original_error
 856        The exception instance that caused extraction to fail
 857    """
 858
 859
 860
 861
 862class ResourceManager:
 863    """Manage resource extraction and packages"""
 864    extraction_path = None
 865
 866    def __init__(self):
 867        self.cached_files = {}
 868
 869    def resource_exists(self, package_or_requirement, resource_name):
 870        """Does the named resource exist?"""
 871        return get_provider(package_or_requirement).has_resource(resource_name)
 872
 873    def resource_isdir(self, package_or_requirement, resource_name):
 874        """Is the named resource an existing directory?"""
 875        return get_provider(package_or_requirement).resource_isdir(
 876            resource_name
 877        )
 878
 879    def resource_filename(self, package_or_requirement, resource_name):
 880        """Return a true filesystem path for specified resource"""
 881        return get_provider(package_or_requirement).get_resource_filename(
 882            self, resource_name
 883        )
 884
 885    def resource_stream(self, package_or_requirement, resource_name):
 886        """Return a readable file-like object for specified resource"""
 887        return get_provider(package_or_requirement).get_resource_stream(
 888            self, resource_name
 889        )
 890
 891    def resource_string(self, package_or_requirement, resource_name):
 892        """Return specified resource as a string"""
 893        return get_provider(package_or_requirement).get_resource_string(
 894            self, resource_name
 895        )
 896
 897    def resource_listdir(self, package_or_requirement, resource_name):
 898        """List the contents of the named resource directory"""
 899        return get_provider(package_or_requirement).resource_listdir(
 900            resource_name
 901        )
 902
 903    def extraction_error(self):
 904        """Give an error message for problems extracting file(s)"""
 905
 906        old_exc = sys.exc_info()[1]
 907        cache_path = self.extraction_path or get_default_cache()
 908
 909        err = ExtractionError("""Can't extract file(s) to egg cache
 910
 911The following error occurred while trying to extract file(s) to the Python egg
 912cache:
 913
 914  %s
 915
 916The Python egg cache directory is currently set to:
 917
 918  %s
 919
 920Perhaps your account does not have write access to this directory?  You can
 921change the cache directory by setting the PYTHON_EGG_CACHE environment
 922variable to point to an accessible directory.
 923"""         % (old_exc, cache_path)
 924        )
 925        err.manager        = self
 926        err.cache_path     = cache_path
 927        err.original_error = old_exc
 928        raise err
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944    def get_cache_path(self, archive_name, names=()):
 945        """Return absolute location in cache for `archive_name` and `names`
 946
 947        The parent directory of the resulting path will be created if it does
 948        not already exist.  `archive_name` should be the base filename of the
 949        enclosing egg (which may not be the name of the enclosing zipfile!),
 950        including its ".egg" extension.  `names`, if provided, should be a
 951        sequence of path name parts "under" the egg's extraction location.
 952
 953        This method should only be called by resource providers that need to
 954        obtain an extraction location, and only for names they intend to
 955        extract, as it tracks the generated names for possible cleanup later.
 956        """
 957        extract_path = self.extraction_path or get_default_cache()
 958        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
 959        try:
 960            _bypass_ensure_directory(target_path)
 961        except:
 962            self.extraction_error()
 963
 964        self.cached_files[target_path] = 1
 965        return target_path
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985    def postprocess(self, tempname, filename):
 986        """Perform any platform-specific postprocessing of `tempname`
 987
 988        This is where Mac header rewrites should be done; other platforms don't
 989        have anything special they should do.
 990
 991        Resource providers should call this method ONLY after successfully
 992        extracting a compressed resource.  They must NOT call it on resources
 993        that are already in the filesystem.
 994
 995        `tempname` is the current (temporary) name of the file, and `filename`
 996        is the name it will be renamed to by the caller after this routine
 997        returns.
 998        """
 999
1000        if os.name == 'posix':
1001            # Make the resource executable
1002            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1003            os.chmod(tempname, mode)
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026    def set_extraction_path(self, path):
1027        """Set the base path where resources will be extracted to, if needed.
1028
1029        If you do not call this routine before any extractions take place, the
1030        path defaults to the return value of ``get_default_cache()``.  (Which
1031        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1032        platform-specific fallbacks.  See that routine's documentation for more
1033        details.)
1034
1035        Resources are extracted to subdirectories of this path based upon
1036        information given by the ``IResourceProvider``.  You may set this to a
1037        temporary directory, but then you must call ``cleanup_resources()`` to
1038        delete the extracted files when done.  There is no guarantee that
1039        ``cleanup_resources()`` will be able to remove all extracted files.
1040
1041        (Note: you may not change the extraction path for a given resource
1042        manager once resources have been extracted, unless you first call
1043        ``cleanup_resources()``.)
1044        """
1045        if self.cached_files:
1046            raise ValueError(
1047                "Can't change extraction path, files already extracted"
1048            )
1049
1050        self.extraction_path = path
1051
1052    def cleanup_resources(self, force=False):
1053        """
1054        Delete all extracted resource files and directories, returning a list
1055        of the file and directory names that could not be successfully removed.
1056        This function does not have any concurrency protection, so it should
1057        generally only be called when the extraction path is a temporary
1058        directory exclusive to a single process.  This method is not
1059        automatically called; you must call it explicitly or register it as an
1060        ``atexit`` function if you wish to ensure cleanup of a temporary
1061        directory used for extractions.
1062        """
1063        # XXX
1064
1065
1066
1067def get_default_cache():
1068    """Determine the default cache location
1069
1070    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1071    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1072    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
1073    """
1074    try:
1075        return os.environ['PYTHON_EGG_CACHE']
1076    except KeyError:
1077        pass
1078
1079    if os.name!='nt':
1080        return os.path.expanduser('~/.python-eggs')
1081
1082    app_data = 'Application Data'   # XXX this may be locale-specific!
1083    app_homes = [
1084        (('APPDATA',), None),       # best option, should be locale-safe
1085        (('USERPROFILE',), app_data),
1086        (('HOMEDRIVE','HOMEPATH'), app_data),
1087        (('HOMEPATH',), app_data),
1088        (('HOME',), None),
1089        (('WINDIR',), app_data),    # 95/98/ME
1090    ]
1091
1092    for keys, subdir in app_homes:
1093        dirname = ''
1094        for key in keys:
1095            if key in os.environ:
1096                dirname = os.path.join(dirname, os.environ[key])
1097            else:
1098                break
1099        else:
1100            if subdir:
1101                dirname = os.path.join(dirname,subdir)
1102            return os.path.join(dirname, 'Python-Eggs')
1103    else:
1104        raise RuntimeError(
1105            "Please set the PYTHON_EGG_CACHE enviroment variable"
1106        )
1107
1108def safe_name(name):
1109    """Convert an arbitrary string to a standard distribution name
1110
1111    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1112    """
1113    return re.sub('[^A-Za-z0-9.]+', '-', name)
1114
1115
1116def safe_version(version):
1117    """Convert an arbitrary string to a standard version string
1118
1119    Spaces become dots, and all other non-alphanumeric characters become
1120    dashes, with runs of multiple dashes condensed to a single dash.
1121    """
1122    version = version.replace(' ','.')
1123    return re.sub('[^A-Za-z0-9.]+', '-', version)
1124
1125
1126def safe_extra(extra):
1127    """Convert an arbitrary string to a standard 'extra' name
1128
1129    Any runs of non-alphanumeric characters are replaced with a single '_',
1130    and the result is always lowercased.
1131    """
1132    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1133
1134
1135def to_filename(name):
1136    """Convert a project or version name to its filename-escaped form
1137
1138    Any '-' characters are currently replaced with '_'.
1139    """
1140    return name.replace('-','_')
1141
1142
1143
1144
1145
1146
1147
1148
1149class NullProvider:
1150    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1151
1152    egg_name = None
1153    egg_info = None
1154    loader = None
1155
1156    def __init__(self, module):
1157        self.loader = getattr(module, '__loader__', None)
1158        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1159
1160    def get_resource_filename(self, manager, resource_name):
1161        return self._fn(self.module_path, resource_name)
1162
1163    def get_resource_stream(self, manager, resource_name):
1164        return StringIO(self.get_resource_string(manager, resource_name))
1165
1166    def get_resource_string(self, manager, resource_name):
1167        return self._get(self._fn(self.module_path, resource_name))
1168
1169    def has_resource(self, resource_name):
1170        return self._has(self._fn(self.module_path, resource_name))
1171
1172    def has_metadata(self, name):
1173        return self.egg_info and self._has(self._fn(self.egg_info,name))
1174
1175    def get_metadata(self, name):
1176        if not self.egg_info:
1177            return ""
1178        return self._get(self._fn(self.egg_info,name))
1179
1180    def get_metadata_lines(self, name):
1181        return yield_lines(self.get_metadata(name))
1182
1183    def resource_isdir(self,resource_name):
1184        return self._isdir(self._fn(self.module_path, resource_name))
1185
1186    def metadata_isdir(self,name):
1187        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1188
1189
1190    def resource_listdir(self,resource_name):
1191        return self._listdir(self._fn(self.module_path,resource_name))
1192
1193    def metadata_listdir(self,name):
1194        if self.egg_info:
1195            return self._listdir(self._fn(self.egg_info,name))
1196        return []
1197
1198    def run_script(self,script_name,namespace):
1199        script = 'scripts/'+script_name
1200        if not self.has_metadata(script):
1201            raise ResolutionError("No script named %r" % script_name)
1202        script_text = self.get_metadata(script).replace('\r\n','\n')
1203        script_text = script_text.replace('\r','\n')
1204        script_filename = self._fn(self.egg_info,script)
1205        namespace['__file__'] = script_filename
1206        if os.path.exists(script_filename):
1207            execfile(script_filename, namespace, namespace)
1208        else:
1209            from linecache import cache
1210            cache[script_filename] = (
1211                len(script_text), 0, script_text.split('\n'), script_filename
1212            )
1213            script_code = compile(script_text,script_filename,'exec')
1214            exec script_code in namespace, namespace
1215
1216    def _has(self, path):
1217        raise NotImplementedError(
1218            "Can't perform this operation for unregistered loader type"
1219        )
1220
1221    def _isdir(self, path):
1222        raise NotImplementedError(
1223            "Can't perform this operation for unregistered loader type"
1224        )
1225
1226    def _listdir(self, path):
1227        raise NotImplementedError(
1228            "Can't perform this operation for unregistered loader type"
1229        )
1230
1231    def _fn(self, base, resource_name):
1232        if resource_name:
1233            return os.path.join(base, *resource_name.split('/'))
1234        return base
1235
1236    def _get(self, path):
1237        if hasattr(self.loader, 'get_data'):
1238            return self.loader.get_data(path)
1239        raise NotImplementedError(
1240            "Can't perform this operation for loaders without 'get_data()'"
1241        )
1242
1243register_loader_type(object, NullProvider)
1244
1245
1246class EggProvider(NullProvider):
1247    """Provider based on a virtual filesystem"""
1248
1249    def __init__(self,module):
1250        NullProvider.__init__(self,module)
1251        self._setup_prefix()
1252
1253    def _setup_prefix(self):
1254        # we assume here that our metadata may be nested inside a "basket"
1255        # of multiple eggs; that's why we use module_path instead of .archive
1256        path = self.module_path
1257        old = None
1258        while path!=old:
1259            if path.lower().endswith('.egg'):
1260                self.egg_name = os.path.basename(path)
1261                self.egg_info = os.path.join(path, 'EGG-INFO')
1262                self.egg_root = path
1263                break
1264            old = path
1265            path, base = os.path.split(path)
1266
1267
1268
1269
1270
1271
1272class DefaultProvider(EggProvider):
1273    """Provides access to package resources in the filesystem"""
1274
1275    def _has(self, path):
1276        return os.path.exists(path)
1277
1278    def _isdir(self,path):
1279        return os.path.isdir(path)
1280
1281    def _listdir(self,path):
1282        return os.listdir(path)
1283
1284    def get_resource_stream(self, manager, resource_name):
1285        return open(self._fn(self.module_path, resource_name), 'rb')
1286
1287    def _get(self, path):
1288        stream = open(path, 'rb')
1289        try:
1290            return stream.read()
1291        finally:
1292            stream.close()
1293
1294register_loader_type(type(None), DefaultProvider)
1295
1296
1297class EmptyProvider(NullProvider):
1298    """Provider that returns nothing for all requests"""
1299
1300    _isdir = _has = lambda self,path: False
1301    _get          = lambda self,path: ''
1302    _listdir      = lambda self,path: []
1303    module_path   = None
1304
1305    def __init__(self):
1306        pass
1307
1308empty_provider = EmptyProvider()
1309
1310
1311
1312
1313class ZipProvider(EggProvider):
1314    """Resource support for zips and eggs"""
1315
1316    eagers = None
1317
1318    def __init__(self, module):
1319        EggProvider.__init__(self,module)
1320        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1321        self.zip_pre = self.loader.archive+os.sep
1322
1323    def _zipinfo_name(self, fspath):
1324        # Convert a virtual filename (full path to file) into a zipfile subpath
1325        # usable with the zipimport directory cache for our target archive
1326        if fspath.startswith(self.zip_pre):
1327            return fspath[len(self.zip_pre):]
1328        raise AssertionError(
1329            "%s is not a subpath of %s" % (fspath,self.zip_pre)
1330        )
1331
1332    def _parts(self,zip_path):
1333        # Convert a zipfile subpath into an egg-relative path part list
1334        fspath = self.zip_pre+zip_path  # pseudo-fs path
1335        if fspath.startswith(self.egg_root+os.sep):
1336            return fspath[len(self.egg_root)+1:].split(os.sep)
1337        raise AssertionError(
1338            "%s is not a subpath of %s" % (fspath,self.egg_root)
1339        )
1340
1341    def get_resource_filename(self, manager, resource_name):
1342        if not self.egg_name:
1343            raise NotImplementedError(
1344                "resource_filename() only supported for .egg, not .zip"
1345            )
1346        # no need to lock for extraction, since we use temp names
1347        zip_path = self._resource_to_zip(resource_name)
1348        eagers = self._get_eager_resources()
1349        if '/'.join(self._parts(zip_path)) in eagers:
1350            for name in eagers:
1351                self._extract_resource(manager, self._eager_to_zip(name))
1352        return self._extract_resource(manager, zip_path)
1353
1354    def _extract_resource(self, manager, zip_path):
1355
1356        if zip_path in self._index():
1357            for name in self._index()[zip_path]:
1358                last = self._extract_resource(
1359                    manager, os.path.join(zip_path, name)
1360                )
1361            return os.path.dirname(last)  # return the extracted directory name
1362
1363        zip_stat = self.zipinfo[zip_path]
1364        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1365        date_time = (
1366            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
1367            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
1368        )
1369        timestamp = time.mktime(date_time)
1370
1371        try:
1372            real_path = manager.get_cache_path(
1373                self.egg_name, self._parts(zip_path)
1374            )
1375
1376            if os.path.isfile(real_path):
1377                stat = os.stat(real_path)
1378                if stat.st_size==size and stat.st_mtime==timestamp:
1379                    # size and stamp match, don't bother extracting
1380                    return real_path
1381
1382            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1383            os.write(outf, self.loader.get_data(zip_path))
1384            os.close(outf)
1385            utime(tmpnam, (timestamp,timestamp))
1386            manager.postprocess(tmpnam, real_path)
1387
1388            try:
1389                rename(tmpnam, real_path)
1390
1391            except os.error:
1392                if os.path.isfile(real_path):
1393                    stat = os.stat(real_path)
1394
1395                    if stat.st_size==size and stat.st_mtime==timestamp:
1396                        # size and stamp match, somebody did it just ahead of
1397                        # us, so we're done
1398                        return real_path
1399                    elif os.name=='nt':     # Windows, del old file and retry
1400                        unlink(real_path)
1401                        rename(tmpnam, real_path)
1402                        return real_path
1403                raise
1404
1405        except os.error:
1406            manager.extraction_error()  # report a user-friendly error
1407
1408        return real_path
1409
1410    def _get_eager_resources(self):
1411        if self.eagers is None:
1412            eagers = []
1413            for name in ('native_libs.txt', 'eager_resources.txt'):
1414                if self.has_metadata(name):
1415                    eagers.extend(self.get_metadata_lines(name))
1416            self.eagers = eagers
1417        return self.eagers
1418
1419    def _index(self):
1420        try:
1421            return self._dirindex
1422        except AttributeError:
1423            ind = {}
1424            for path in self.zipinfo:
1425                parts = path.split(os.sep)
1426                while parts:
1427                    parent = os.sep.join(parts[:-1])
1428                    if parent in ind:
1429                        ind[parent].append(parts[-1])
1430                        break
1431                    else:
1432                        ind[parent] = [parts.pop()]
1433            self._dirindex = ind
1434            return ind
1435
1436    def _has(self, fspath):
1437        zip_path = self._zipinfo_name(fspath)
1438        return zip_path in self.zipinfo or zip_path in self._index()
1439
1440    def _isdir(self,fspath):
1441        return self._zipinfo_name(fspath) in self._index()
1442
1443    def _listdir(self,fspath):
1444        return list(self._index().get(self._zipinfo_name(fspath), ()))
1445
1446    def _eager_to_zip(self,resource_name):
1447        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1448
1449    def _resource_to_zip(self,resource_name):
1450        return self._zipinfo_name(self._fn(self.module_path,resource_name))
1451
1452register_loader_type(zipimport.zipimporter, ZipProvider)
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477class FileMetadata(EmptyProvider):
1478    """Metadata handler for standalone PKG-INFO files
1479
1480    Usage::
1481
1482        metadata = FileMetadata("/path/to/PKG-INFO")
1483
1484    This provider rejects all data and metadata requests except for PKG-INFO,
1485    which is treated as existing, and will be the contents of the file at
1486    the provided location.
1487    """
1488
1489    def __init__(self,path):
1490        self.path = path
1491
1492    def has_metadata(self,name):
1493        return name=='PKG-INFO'
1494
1495    def get_metadata(self,name):
1496        if name=='PKG-INFO':
1497            return open(self.path,'rU').read()
1498        raise KeyError("No metadata except PKG-INFO is available")
1499
1500    def get_metadata_lines(self,name):
1501        return yield_lines(self.get_metadata(name))
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518class PathMetadata(DefaultProvider):
1519    """Metadata provider for egg directories
1520
1521    Usage::
1522
1523        # Development eggs:
1524
1525        egg_info = "/path/to/PackageName.egg-info"
1526        base_dir = os.path.dirname(egg_info)
1527        metadata = PathMetadata(base_dir, egg_info)
1528        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1529        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1530
1531        # Unpacked egg directories:
1532
1533        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1534        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1535        dist = Distribution.from_filename(egg_path, metadata=metadata)
1536    """
1537
1538    def __init__(self, path, egg_info):
1539        self.module_path = path
1540        self.egg_info = egg_info
1541
1542
1543class EggMetadata(ZipProvider):
1544    """Metadata provider for .egg files"""
1545
1546    def __init__(self, importer):
1547        """Create a metadata provider from a zipimporter"""
1548
1549        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1550        self.zip_pre = importer.archive+os.sep
1551        self.loader = importer
1552        if importer.prefix:
1553            self.modul

Large files files are truncated, but you can click here to view the full file