Project

General

Profile

Statistics
| Branch: | Revision:

root / env / lib / python2.7 / site-packages / distribute-0.6.19-py2.7.egg / pkg_resources.py @ 1a305335

History | View | Annotate | Download (87.8 KB)

1 1a305335 officers
"""Package resource API
2
--------------------
3

4
A resource is a logical file contained within a package, or a logical
5
subdirectory thereof.  The package resource API expects resource names
6
to have their path parts separated with ``/``, *not* whatever the local
7
path separator is.  Do not use os.path operations to manipulate resource
8
names being passed into the API.
9

10
The package resource API is designed to work with normal filesystem packages,
11
.egg files, and unpacked .egg files.  It can also work in a limited way with
12
.zip files and with custom PEP 302 loaders that support the ``get_data()``
13
method.
14
"""
15
16
import sys, os, zipimport, time, re, imp, types
17
from urlparse import urlparse, urlunparse
18
19
try:
20
    frozenset
21
except NameError:
22
    from sets import ImmutableSet as frozenset
23
24
# capture these to bypass sandboxing
25
from os import utime
26
try:
27
    from os import mkdir, rename, unlink
28
    WRITE_SUPPORT = True
29
except ImportError:
30
    # no write support, probably under GAE
31
    WRITE_SUPPORT = False
32
33
from os import open as os_open
34
from os.path import isdir, split
35
36
# This marker is used to simplify the process that checks is the
37
# setuptools package was installed by the Setuptools project
38
# or by the Distribute project, in case Setuptools creates
39
# a distribution with the same version.
40
#
41
# The bootstrapping script for instance, will check if this
42
# attribute is present to decide wether to reinstall the package
43
_distribute = True
44
45
def _bypass_ensure_directory(name, mode=0777):
46
    # Sandbox-bypassing version of ensure_directory()
47
    if not WRITE_SUPPORT:
48
        raise IOError('"os.mkdir" not supported on this platform.')
49
    dirname, filename = split(name)
50
    if dirname and filename and not isdir(dirname):
51
        _bypass_ensure_directory(dirname)
52
        mkdir(dirname, mode)
53
54
55
_state_vars = {}
56
57
def _declare_state(vartype, **kw):
58
    g = globals()
59
    for name, val in kw.iteritems():
60
        g[name] = val
61
        _state_vars[name] = vartype
62
63
def __getstate__():
64
    state = {}
65
    g = globals()
66
    for k, v in _state_vars.iteritems():
67
        state[k] = g['_sget_'+v](g[k])
68
    return state
69
70
def __setstate__(state):
71
    g = globals()
72
    for k, v in state.iteritems():
73
        g['_sset_'+_state_vars[k]](k, g[k], v)
74
    return state
75
76
def _sget_dict(val):
77
    return val.copy()
78
79
def _sset_dict(key, ob, state):
80
    ob.clear()
81
    ob.update(state)
82
83
def _sget_object(val):
84
    return val.__getstate__()
85
86
def _sset_object(key, ob, state):
87
    ob.__setstate__(state)
88
89
_sget_none = _sset_none = lambda *args: None
90
91
92
93
def get_supported_platform():
94
    """Return this platform's maximum compatible version.
95

96
    distutils.util.get_platform() normally reports the minimum version
97
    of Mac OS X that would be required to *use* extensions produced by
98
    distutils.  But what we want when checking compatibility is to know the
99
    version of Mac OS X that we are *running*.  To allow usage of packages that
100
    explicitly require a newer version of Mac OS X, we must also know the
101
    current version of the OS.
102

103
    If this condition occurs for any other platform with a version in its
104
    platform strings, this function should be extended accordingly.
105
    """
106
    plat = get_build_platform(); m = macosVersionString.match(plat)
107
    if m is not None and sys.platform == "darwin":
108
        try:
109
            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
110
        except ValueError:
111
            pass    # not Mac OS X
112
    return plat
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
__all__ = [
135
    # Basic resource access and distribution/entry point discovery
136
    'require', 'run_script', 'get_provider',  'get_distribution',
137
    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
138
    'resource_string', 'resource_stream', 'resource_filename',
139
    'resource_listdir', 'resource_exists', 'resource_isdir',
140
141
    # Environmental control
142
    'declare_namespace', 'working_set', 'add_activation_listener',
143
    'find_distributions', 'set_extraction_path', 'cleanup_resources',
144
    'get_default_cache',
145
146
    # Primary implementation classes
147
    'Environment', 'WorkingSet', 'ResourceManager',
148
    'Distribution', 'Requirement', 'EntryPoint',
149
150
    # Exceptions
151
    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
152
    'ExtractionError',
153
154
    # Parsing functions and string utilities
155
    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
156
    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
157
    'safe_extra', 'to_filename',
158
159
    # filesystem utilities
160
    'ensure_directory', 'normalize_path',
161
162
    # Distribution "precedence" constants
163
    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
164
165
    # "Provider" interfaces, implementations, and registration/lookup APIs
166
    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
167
    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
168
    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
169
    'register_finder', 'register_namespace_handler', 'register_loader_type',
170
    'fixup_namespace_packages', 'get_importer',
171
172
    # Deprecated/backward compatibility only
173
    'run_main', 'AvailableDistributions',
174
]
175
class ResolutionError(Exception):
176
    """Abstract base for dependency resolution errors"""
177
    def __repr__(self):
178
        return self.__class__.__name__+repr(self.args)
179
180
class VersionConflict(ResolutionError):
181
    """An already-installed version conflicts with the requested version"""
182
183
class DistributionNotFound(ResolutionError):
184
    """A requested distribution was not found"""
185
186
class UnknownExtra(ResolutionError):
187
    """Distribution doesn't have an "extra feature" of the given name"""
188
_provider_factories = {}
189
190
PY_MAJOR = sys.version[:3]
191
EGG_DIST    = 3
192
BINARY_DIST = 2
193
SOURCE_DIST = 1
194
CHECKOUT_DIST = 0
195
DEVELOP_DIST = -1
196
197
def register_loader_type(loader_type, provider_factory):
198
    """Register `provider_factory` to make providers for `loader_type`
199

200
    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
201
    and `provider_factory` is a function that, passed a *module* object,
202
    returns an ``IResourceProvider`` for that module.
203
    """
204
    _provider_factories[loader_type] = provider_factory
205
206
def get_provider(moduleOrReq):
207
    """Return an IResourceProvider for the named module or requirement"""
208
    if isinstance(moduleOrReq,Requirement):
209
        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
210
    try:
211
        module = sys.modules[moduleOrReq]
212
    except KeyError:
213
        __import__(moduleOrReq)
214
        module = sys.modules[moduleOrReq]
215
    loader = getattr(module, '__loader__', None)
216
    return _find_adapter(_provider_factories, loader)(module)
217
218
def _macosx_vers(_cache=[]):
219
    if not _cache:
220
        import platform
221
        version = platform.mac_ver()[0]
222
        # fallback for MacPorts
223
        if version == '':
224
            import plistlib
225
            plist = '/System/Library/CoreServices/SystemVersion.plist'
226
            if os.path.exists(plist):
227
                if hasattr(plistlib, 'readPlist'):
228
                    plist_content = plistlib.readPlist(plist)
229
                    if 'ProductVersion' in plist_content:
230
                        version = plist_content['ProductVersion']
231
232
        _cache.append(version.split('.'))
233
    return _cache[0]
234
235
def _macosx_arch(machine):
236
    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
237
238
def get_build_platform():
239
    """Return this platform's string for platform-specific distributions
240

241
    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
242
    needs some hacks for Linux and Mac OS X.
243
    """
244
    try:
245
        from distutils.util import get_platform
246
    except ImportError:
247
        from sysconfig import get_platform
248
249
    plat = get_platform()
250
    if sys.platform == "darwin" and not plat.startswith('macosx-'):
251
        try:
252
            version = _macosx_vers()
253
            machine = os.uname()[4].replace(" ", "_")
254
            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
255
                _macosx_arch(machine))
256
        except ValueError:
257
            # if someone is running a non-Mac darwin system, this will fall
258
            # through to the default implementation
259
            pass
260
    return plat
261
262
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
263
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
264
get_platform = get_build_platform   # XXX backward compat
265
266
def compatible_platforms(provided,required):
267
    """Can code for the `provided` platform run on the `required` platform?
268

269
    Returns true if either platform is ``None``, or the platforms are equal.
270

271
    XXX Needs compatibility checks for Linux and other unixy OSes.
272
    """
273
    if provided is None or required is None or provided==required:
274
        return True     # easy case
275
276
    # Mac OS X special cases
277
    reqMac = macosVersionString.match(required)
278
    if reqMac:
279
        provMac = macosVersionString.match(provided)
280
281
        # is this a Mac package?
282
        if not provMac:
283
            # this is backwards compatibility for packages built before
284
            # setuptools 0.6. All packages built after this point will
285
            # use the new macosx designation.
286
            provDarwin = darwinVersionString.match(provided)
287
            if provDarwin:
288
                dversion = int(provDarwin.group(1))
289
                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
290
                if dversion == 7 and macosversion >= "10.3" or \
291
                    dversion == 8 and macosversion >= "10.4":
292
293
                    #import warnings
294
                    #warnings.warn("Mac eggs should be rebuilt to "
295
                    #    "use the macosx designation instead of darwin.",
296
                    #    category=DeprecationWarning)
297
                    return True
298
            return False    # egg isn't macosx or legacy darwin
299
300
        # are they the same major version and machine type?
301
        if provMac.group(1) != reqMac.group(1) or \
302
            provMac.group(3) != reqMac.group(3):
303
            return False
304
305
306
307
        # is the required OS major update >= the provided one?
308
        if int(provMac.group(2)) > int(reqMac.group(2)):
309
            return False
310
311
        return True
312
313
    # XXX Linux and other platforms' special cases should go here
314
    return False
315
316
317
def run_script(dist_spec, script_name):
318
    """Locate distribution `dist_spec` and run its `script_name` script"""
319
    ns = sys._getframe(1).f_globals
320
    name = ns['__name__']
321
    ns.clear()
322
    ns['__name__'] = name
323
    require(dist_spec)[0].run_script(script_name, ns)
324
325
run_main = run_script   # backward compatibility
326
327
def get_distribution(dist):
328
    """Return a current distribution object for a Requirement or string"""
329
    if isinstance(dist,basestring): dist = Requirement.parse(dist)
330
    if isinstance(dist,Requirement): dist = get_provider(dist)
331
    if not isinstance(dist,Distribution):
332
        raise TypeError("Expected string, Requirement, or Distribution", dist)
333
    return dist
334
335
def load_entry_point(dist, group, name):
336
    """Return `name` entry point of `group` for `dist` or raise ImportError"""
337
    return get_distribution(dist).load_entry_point(group, name)
338
339
def get_entry_map(dist, group=None):
340
    """Return the entry point map for `group`, or the full entry map"""
341
    return get_distribution(dist).get_entry_map(group)
342
343
def get_entry_info(dist, group, name):
344
    """Return the EntryPoint object for `group`+`name`, or ``None``"""
345
    return get_distribution(dist).get_entry_info(group, name)
346
347
348
class IMetadataProvider:
349
350
    def has_metadata(name):
351
        """Does the package's distribution contain the named metadata?"""
352
353
    def get_metadata(name):
354
        """The named metadata resource as a string"""
355
356
    def get_metadata_lines(name):
357
        """Yield named metadata resource as list of non-blank non-comment lines
358

359
       Leading and trailing whitespace is stripped from each line, and lines
360
       with ``#`` as the first non-blank character are omitted."""
361
362
    def metadata_isdir(name):
363
        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
364
365
    def metadata_listdir(name):
366
        """List of metadata names in the directory (like ``os.listdir()``)"""
367
368
    def run_script(script_name, namespace):
369
        """Execute the named script in the supplied namespace dictionary"""
370
371
372
373
374
375
376
377
378
379
380
class IResourceProvider(IMetadataProvider):
381
    """An object that provides access to package resources"""
382
383
    def get_resource_filename(manager, resource_name):
384
        """Return a true filesystem path for `resource_name`
385

386
        `manager` must be an ``IResourceManager``"""
387
388
    def get_resource_stream(manager, resource_name):
389
        """Return a readable file-like object for `resource_name`
390

391
        `manager` must be an ``IResourceManager``"""
392
393
    def get_resource_string(manager, resource_name):
394
        """Return a string containing the contents of `resource_name`
395

396
        `manager` must be an ``IResourceManager``"""
397
398
    def has_resource(resource_name):
399
        """Does the package contain the named resource?"""
400
401
    def resource_isdir(resource_name):
402
        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
403
404
    def resource_listdir(resource_name):
405
        """List of resource names in the directory (like ``os.listdir()``)"""
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
class WorkingSet(object):
422
    """A collection of active distributions on sys.path (or a similar list)"""
423
424
    def __init__(self, entries=None):
425
        """Create working set from list of path entries (default=sys.path)"""
426
        self.entries = []
427
        self.entry_keys = {}
428
        self.by_key = {}
429
        self.callbacks = []
430
431
        if entries is None:
432
            entries = sys.path
433
434
        for entry in entries:
435
            self.add_entry(entry)
436
437
438
    def add_entry(self, entry):
439
        """Add a path item to ``.entries``, finding any distributions on it
440

441
        ``find_distributions(entry,True)`` is used to find distributions
442
        corresponding to the path entry, and they are added.  `entry` is
443
        always appended to ``.entries``, even if it is already present.
444
        (This is because ``sys.path`` can contain the same value more than
445
        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
446
        equal ``sys.path``.)
447
        """
448
        self.entry_keys.setdefault(entry, [])
449
        self.entries.append(entry)
450
        for dist in find_distributions(entry, True):
451
            self.add(dist, entry, False)
452
453
454
    def __contains__(self,dist):
455
        """True if `dist` is the active distribution for its project"""
456
        return self.by_key.get(dist.key) == dist
457
458
459
460
461
462
    def find(self, req):
463
        """Find a distribution matching requirement `req`
464

465
        If there is an active distribution for the requested project, this
466
        returns it as long as it meets the version requirement specified by
467
        `req`.  But, if there is an active distribution for the project and it
468
        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
469
        If there is no active distribution for the requested project, ``None``
470
        is returned.
471
        """
472
        dist = self.by_key.get(req.key)
473
        if dist is not None and dist not in req:
474
            raise VersionConflict(dist,req)     # XXX add more info
475
        else:
476
            return dist
477
478
    def iter_entry_points(self, group, name=None):
479
        """Yield entry point objects from `group` matching `name`
480

481
        If `name` is None, yields all entry points in `group` from all
482
        distributions in the working set, otherwise only ones matching
483
        both `group` and `name` are yielded (in distribution order).
484
        """
485
        for dist in self:
486
            entries = dist.get_entry_map(group)
487
            if name is None:
488
                for ep in entries.values():
489
                    yield ep
490
            elif name in entries:
491
                yield entries[name]
492
493
    def run_script(self, requires, script_name):
494
        """Locate distribution for `requires` and run `script_name` script"""
495
        ns = sys._getframe(1).f_globals
496
        name = ns['__name__']
497
        ns.clear()
498
        ns['__name__'] = name
499
        self.require(requires)[0].run_script(script_name, ns)
500
501
502
503
    def __iter__(self):
504
        """Yield distributions for non-duplicate projects in the working set
505

506
        The yield order is the order in which the items' path entries were
507
        added to the working set.
508
        """
509
        seen = {}
510
        for item in self.entries:
511
            for key in self.entry_keys[item]:
512
                if key not in seen:
513
                    seen[key]=1
514
                    yield self.by_key[key]
515
516
    def add(self, dist, entry=None, insert=True):
517
        """Add `dist` to working set, associated with `entry`
518

519
        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
520
        On exit from this routine, `entry` is added to the end of the working
521
        set's ``.entries`` (if it wasn't already present).
522

523
        `dist` is only added to the working set if it's for a project that
524
        doesn't already have a distribution in the set.  If it's added, any
525
        callbacks registered with the ``subscribe()`` method will be called.
526
        """
527
        if insert:
528
            dist.insert_on(self.entries, entry)
529
530
        if entry is None:
531
            entry = dist.location
532
        keys = self.entry_keys.setdefault(entry,[])
533
        keys2 = self.entry_keys.setdefault(dist.location,[])
534
        if dist.key in self.by_key:
535
            return      # ignore hidden distros
536
537
        self.by_key[dist.key] = dist
538
        if dist.key not in keys:
539
            keys.append(dist.key)
540
        if dist.key not in keys2:
541
            keys2.append(dist.key)
542
        self._added_new(dist)
543
544
    def resolve(self, requirements, env=None, installer=None, replacement=True):
545
        """List all distributions needed to (recursively) meet `requirements`
546

547
        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
548
        if supplied, should be an ``Environment`` instance.  If
549
        not supplied, it defaults to all distributions available within any
550
        entry or distribution in the working set.  `installer`, if supplied,
551
        will be invoked with each requirement that cannot be met by an
552
        already-installed distribution; it should return a ``Distribution`` or
553
        ``None``.
554
        """
555
556
        requirements = list(requirements)[::-1]  # set up the stack
557
        processed = {}  # set of processed requirements
558
        best = {}  # key -> dist
559
        to_activate = []
560
561
        while requirements:
562
            req = requirements.pop(0)   # process dependencies breadth-first
563
            if _override_setuptools(req) and replacement:
564
                req = Requirement.parse('distribute')
565
566
            if req in processed:
567
                # Ignore cyclic or redundant dependencies
568
                continue
569
            dist = best.get(req.key)
570
            if dist is None:
571
                # Find the best distribution and add it to the map
572
                dist = self.by_key.get(req.key)
573
                if dist is None:
574
                    if env is None:
575
                        env = Environment(self.entries)
576
                    dist = best[req.key] = env.best_match(req, self, installer)
577
                    if dist is None:
578
                        #msg = ("The '%s' distribution was not found on this "
579
                        #       "system, and is required by this application.")
580
                        #raise DistributionNotFound(msg % req)
581
582
                        # unfortunately, zc.buildout uses a str(err)
583
                        # to get the name of the distribution here..
584
                        raise DistributionNotFound(req)
585
                to_activate.append(dist)
586
            if dist not in req:
587
                # Oops, the "best" so far conflicts with a dependency
588
                raise VersionConflict(dist,req) # XXX put more info here
589
            requirements.extend(dist.requires(req.extras)[::-1])
590
            processed[req] = True
591
592
        return to_activate    # return list of distros to activate
593
594
    def find_plugins(self,
595
        plugin_env, full_env=None, installer=None, fallback=True
596
    ):
597
        """Find all activatable distributions in `plugin_env`
598

599
        Example usage::
600

601
            distributions, errors = working_set.find_plugins(
602
                Environment(plugin_dirlist)
603
            )
604
            map(working_set.add, distributions)  # add plugins+libs to sys.path
605
            print 'Could not load', errors        # display errors
606

607
        The `plugin_env` should be an ``Environment`` instance that contains
608
        only distributions that are in the project's "plugin directory" or
609
        directories. The `full_env`, if supplied, should be an ``Environment``
610
        contains all currently-available distributions.  If `full_env` is not
611
        supplied, one is created automatically from the ``WorkingSet`` this
612
        method is called on, which will typically mean that every directory on
613
        ``sys.path`` will be scanned for distributions.
614

615
        `installer` is a standard installer callback as used by the
616
        ``resolve()`` method. The `fallback` flag indicates whether we should
617
        attempt to resolve older versions of a plugin if the newest version
618
        cannot be resolved.
619

620
        This method returns a 2-tuple: (`distributions`, `error_info`), where
621
        `distributions` is a list of the distributions found in `plugin_env`
622
        that were loadable, along with any other distributions that are needed
623
        to resolve their dependencies.  `error_info` is a dictionary mapping
624
        unloadable plugin distributions to an exception instance describing the
625
        error that occurred. Usually this will be a ``DistributionNotFound`` or
626
        ``VersionConflict`` instance.
627
        """
628
629
        plugin_projects = list(plugin_env)
630
        plugin_projects.sort()  # scan project names in alphabetic order
631
632
        error_info = {}
633
        distributions = {}
634
635
        if full_env is None:
636
            env = Environment(self.entries)
637
            env += plugin_env
638
        else:
639
            env = full_env + plugin_env
640
641
        shadow_set = self.__class__([])
642
        map(shadow_set.add, self)   # put all our entries in shadow_set
643
644
        for project_name in plugin_projects:
645
646
            for dist in plugin_env[project_name]:
647
648
                req = [dist.as_requirement()]
649
650
                try:
651
                    resolvees = shadow_set.resolve(req, env, installer)
652
653
                except ResolutionError,v:
654
                    error_info[dist] = v    # save error info
655
                    if fallback:
656
                        continue    # try the next older version of project
657
                    else:
658
                        break       # give up on this project, keep going
659
660
                else:
661
                    map(shadow_set.add, resolvees)
662
                    distributions.update(dict.fromkeys(resolvees))
663
664
                    # success, no need to try any more versions of this project
665
                    break
666
667
        distributions = list(distributions)
668
        distributions.sort()
669
670
        return distributions, error_info
671
672
673
674
675
676
    def require(self, *requirements):
677
        """Ensure that distributions matching `requirements` are activated
678

679
        `requirements` must be a string or a (possibly-nested) sequence
680
        thereof, specifying the distributions and versions required.  The
681
        return value is a sequence of the distributions that needed to be
682
        activated to fulfill the requirements; all relevant distributions are
683
        included, even if they were already activated in this working set.
684
        """
685
686
        needed = self.resolve(parse_requirements(requirements))
687
688
        for dist in needed:
689
            self.add(dist)
690
691
        return needed
692
693
694
    def subscribe(self, callback):
695
        """Invoke `callback` for all distributions (including existing ones)"""
696
        if callback in self.callbacks:
697
            return
698
        self.callbacks.append(callback)
699
        for dist in self:
700
            callback(dist)
701
702
703
    def _added_new(self, dist):
704
        for callback in self.callbacks:
705
            callback(dist)
706
707
    def __getstate__(self):
708
        return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
709
                self.callbacks[:])
710
711
    def __setstate__(self, (entries, keys, by_key, callbacks)):
712
        self.entries = entries[:]
713
        self.entry_keys = keys.copy()
714
        self.by_key = by_key.copy()
715
        self.callbacks = callbacks[:]
716
717
718
719
720
class Environment(object):
721
    """Searchable snapshot of distributions on a search path"""
722
723
    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
724
        """Snapshot distributions available on a search path
725

726
        Any distributions found on `search_path` are added to the environment.
727
        `search_path` should be a sequence of ``sys.path`` items.  If not
728
        supplied, ``sys.path`` is used.
729

730
        `platform` is an optional string specifying the name of the platform
731
        that platform-specific distributions must be compatible with.  If
732
        unspecified, it defaults to the current platform.  `python` is an
733
        optional string naming the desired version of Python (e.g. ``'2.4'``);
734
        it defaults to the current version.
735

736
        You may explicitly set `platform` (and/or `python`) to ``None`` if you
737
        wish to map *all* distributions, not just those compatible with the
738
        running platform or Python version.
739
        """
740
        self._distmap = {}
741
        self._cache = {}
742
        self.platform = platform
743
        self.python = python
744
        self.scan(search_path)
745
746
    def can_add(self, dist):
747
        """Is distribution `dist` acceptable for this environment?
748

749
        The distribution must match the platform and python version
750
        requirements specified when this environment was created, or False
751
        is returned.
752
        """
753
        return (self.python is None or dist.py_version is None
754
            or dist.py_version==self.python) \
755
           and compatible_platforms(dist.platform,self.platform)
756
757
    def remove(self, dist):
758
        """Remove `dist` from the environment"""
759
        self._distmap[dist.key].remove(dist)
760
761
    def scan(self, search_path=None):
762
        """Scan `search_path` for distributions usable in this environment
763

764
        Any distributions found are added to the environment.
765
        `search_path` should be a sequence of ``sys.path`` items.  If not
766
        supplied, ``sys.path`` is used.  Only distributions conforming to
767
        the platform/python version defined at initialization are added.
768
        """
769
        if search_path is None:
770
            search_path = sys.path
771
772
        for item in search_path:
773
            for dist in find_distributions(item):
774
                self.add(dist)
775
776
    def __getitem__(self,project_name):
777
        """Return a newest-to-oldest list of distributions for `project_name`
778
        """
779
        try:
780
            return self._cache[project_name]
781
        except KeyError:
782
            project_name = project_name.lower()
783
            if project_name not in self._distmap:
784
                return []
785
786
        if project_name not in self._cache:
787
            dists = self._cache[project_name] = self._distmap[project_name]
788
            _sort_dists(dists)
789
790
        return self._cache[project_name]
791
792
    def add(self,dist):
793
        """Add `dist` if we ``can_add()`` it and it isn't already added"""
794
        if self.can_add(dist) and dist.has_version():
795
            dists = self._distmap.setdefault(dist.key,[])
796
            if dist not in dists:
797
                dists.append(dist)
798
                if dist.key in self._cache:
799
                    _sort_dists(self._cache[dist.key])
800
801
802
    def best_match(self, req, working_set, installer=None):
803
        """Find distribution best matching `req` and usable on `working_set`
804

805
        This calls the ``find(req)`` method of the `working_set` to see if a
806
        suitable distribution is already active.  (This may raise
807
        ``VersionConflict`` if an unsuitable version of the project is already
808
        active in the specified `working_set`.)  If a suitable distribution
809
        isn't active, this method returns the newest distribution in the
810
        environment that meets the ``Requirement`` in `req`.  If no suitable
811
        distribution is found, and `installer` is supplied, then the result of
812
        calling the environment's ``obtain(req, installer)`` method will be
813
        returned.
814
        """
815
        dist = working_set.find(req)
816
        if dist is not None:
817
            return dist
818
        for dist in self[req.key]:
819
            if dist in req:
820
                return dist
821
        return self.obtain(req, installer) # try and download/install
822
823
    def obtain(self, requirement, installer=None):
824
        """Obtain a distribution matching `requirement` (e.g. via download)
825

826
        Obtain a distro that matches requirement (e.g. via download).  In the
827
        base ``Environment`` class, this routine just returns
828
        ``installer(requirement)``, unless `installer` is None, in which case
829
        None is returned instead.  This method is a hook that allows subclasses
830
        to attempt other ways of obtaining a distribution before falling back
831
        to the `installer` argument."""
832
        if installer is not None:
833
            return installer(requirement)
834
835
    def __iter__(self):
836
        """Yield the unique project names of the available distributions"""
837
        for key in self._distmap.keys():
838
            if self[key]: yield key
839
840
841
842
843
    def __iadd__(self, other):
844
        """In-place addition of a distribution or environment"""
845
        if isinstance(other,Distribution):
846
            self.add(other)
847
        elif isinstance(other,Environment):
848
            for project in other:
849
                for dist in other[project]:
850
                    self.add(dist)
851
        else:
852
            raise TypeError("Can't add %r to environment" % (other,))
853
        return self
854
855
    def __add__(self, other):
856
        """Add an environment or distribution to an environment"""
857
        new = self.__class__([], platform=None, python=None)
858
        for env in self, other:
859
            new += env
860
        return new
861
862
863
AvailableDistributions = Environment    # XXX backward compatibility
864
865
866
class ExtractionError(RuntimeError):
867
    """An error occurred extracting a resource
868

869
    The following attributes are available from instances of this exception:
870

871
    manager
872
        The resource manager that raised this exception
873

874
    cache_path
875
        The base directory for resource extraction
876

877
    original_error
878
        The exception instance that caused extraction to fail
879
    """
880
881
882
883
884
class ResourceManager:
885
    """Manage resource extraction and packages"""
886
    extraction_path = None
887
888
    def __init__(self):
889
        self.cached_files = {}
890
891
    def resource_exists(self, package_or_requirement, resource_name):
892
        """Does the named resource exist?"""
893
        return get_provider(package_or_requirement).has_resource(resource_name)
894
895
    def resource_isdir(self, package_or_requirement, resource_name):
896
        """Is the named resource an existing directory?"""
897
        return get_provider(package_or_requirement).resource_isdir(
898
            resource_name
899
        )
900
901
    def resource_filename(self, package_or_requirement, resource_name):
902
        """Return a true filesystem path for specified resource"""
903
        return get_provider(package_or_requirement).get_resource_filename(
904
            self, resource_name
905
        )
906
907
    def resource_stream(self, package_or_requirement, resource_name):
908
        """Return a readable file-like object for specified resource"""
909
        return get_provider(package_or_requirement).get_resource_stream(
910
            self, resource_name
911
        )
912
913
    def resource_string(self, package_or_requirement, resource_name):
914
        """Return specified resource as a string"""
915
        return get_provider(package_or_requirement).get_resource_string(
916
            self, resource_name
917
        )
918
919
    def resource_listdir(self, package_or_requirement, resource_name):
920
        """List the contents of the named resource directory"""
921
        return get_provider(package_or_requirement).resource_listdir(
922
            resource_name
923
        )
924
925
    def extraction_error(self):
926
        """Give an error message for problems extracting file(s)"""
927
928
        old_exc = sys.exc_info()[1]
929
        cache_path = self.extraction_path or get_default_cache()
930
931
        err = ExtractionError("""Can't extract file(s) to egg cache
932

933
The following error occurred while trying to extract file(s) to the Python egg
934
cache:
935

936
  %s
937

938
The Python egg cache directory is currently set to:
939

940
  %s
941

942
Perhaps your account does not have write access to this directory?  You can
943
change the cache directory by setting the PYTHON_EGG_CACHE environment
944
variable to point to an accessible directory.
945
"""         % (old_exc, cache_path)
946
        )
947
        err.manager        = self
948
        err.cache_path     = cache_path
949
        err.original_error = old_exc
950
        raise err
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
    def get_cache_path(self, archive_name, names=()):
967
        """Return absolute location in cache for `archive_name` and `names`
968

969
        The parent directory of the resulting path will be created if it does
970
        not already exist.  `archive_name` should be the base filename of the
971
        enclosing egg (which may not be the name of the enclosing zipfile!),
972
        including its ".egg" extension.  `names`, if provided, should be a
973
        sequence of path name parts "under" the egg's extraction location.
974

975
        This method should only be called by resource providers that need to
976
        obtain an extraction location, and only for names they intend to
977
        extract, as it tracks the generated names for possible cleanup later.
978
        """
979
        extract_path = self.extraction_path or get_default_cache()
980
        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
981
        try:
982
            _bypass_ensure_directory(target_path)
983
        except:
984
            self.extraction_error()
985
986
        self.cached_files[target_path] = 1
987
        return target_path
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
    def postprocess(self, tempname, filename):
1008
        """Perform any platform-specific postprocessing of `tempname`
1009

1010
        This is where Mac header rewrites should be done; other platforms don't
1011
        have anything special they should do.
1012

1013
        Resource providers should call this method ONLY after successfully
1014
        extracting a compressed resource.  They must NOT call it on resources
1015
        that are already in the filesystem.
1016

1017
        `tempname` is the current (temporary) name of the file, and `filename`
1018
        is the name it will be renamed to by the caller after this routine
1019
        returns.
1020
        """
1021
1022
        if os.name == 'posix':
1023
            # Make the resource executable
1024
            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1025
            os.chmod(tempname, mode)
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
    def set_extraction_path(self, path):
1049
        """Set the base path where resources will be extracted to, if needed.
1050

1051
        If you do not call this routine before any extractions take place, the
1052
        path defaults to the return value of ``get_default_cache()``.  (Which
1053
        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1054
        platform-specific fallbacks.  See that routine's documentation for more
1055
        details.)
1056

1057
        Resources are extracted to subdirectories of this path based upon
1058
        information given by the ``IResourceProvider``.  You may set this to a
1059
        temporary directory, but then you must call ``cleanup_resources()`` to
1060
        delete the extracted files when done.  There is no guarantee that
1061
        ``cleanup_resources()`` will be able to remove all extracted files.
1062

1063
        (Note: you may not change the extraction path for a given resource
1064
        manager once resources have been extracted, unless you first call
1065
        ``cleanup_resources()``.)
1066
        """
1067
        if self.cached_files:
1068
            raise ValueError(
1069
                "Can't change extraction path, files already extracted"
1070
            )
1071
1072
        self.extraction_path = path
1073
1074
    def cleanup_resources(self, force=False):
1075
        """
1076
        Delete all extracted resource files and directories, returning a list
1077
        of the file and directory names that could not be successfully removed.
1078
        This function does not have any concurrency protection, so it should
1079
        generally only be called when the extraction path is a temporary
1080
        directory exclusive to a single process.  This method is not
1081
        automatically called; you must call it explicitly or register it as an
1082
        ``atexit`` function if you wish to ensure cleanup of a temporary
1083
        directory used for extractions.
1084
        """
1085
        # XXX
1086
1087
1088
1089
def get_default_cache():
1090
    """Determine the default cache location
1091

1092
    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1093
    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1094
    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
1095
    """
1096
    try:
1097
        return os.environ['PYTHON_EGG_CACHE']
1098
    except KeyError:
1099
        pass
1100
1101
    if os.name!='nt':
1102
        return os.path.expanduser('~/.python-eggs')
1103
1104
    app_data = 'Application Data'   # XXX this may be locale-specific!
1105
    app_homes = [
1106
        (('APPDATA',), None),       # best option, should be locale-safe
1107
        (('USERPROFILE',), app_data),
1108
        (('HOMEDRIVE','HOMEPATH'), app_data),
1109
        (('HOMEPATH',), app_data),
1110
        (('HOME',), None),
1111
        (('WINDIR',), app_data),    # 95/98/ME
1112
    ]
1113
1114
    for keys, subdir in app_homes:
1115
        dirname = ''
1116
        for key in keys:
1117
            if key in os.environ:
1118
                dirname = os.path.join(dirname, os.environ[key])
1119
            else:
1120
                break
1121
        else:
1122
            if subdir:
1123
                dirname = os.path.join(dirname,subdir)
1124
            return os.path.join(dirname, 'Python-Eggs')
1125
    else:
1126
        raise RuntimeError(
1127
            "Please set the PYTHON_EGG_CACHE enviroment variable"
1128
        )
1129
1130
def safe_name(name):
1131
    """Convert an arbitrary string to a standard distribution name
1132

1133
    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1134
    """
1135
    return re.sub('[^A-Za-z0-9.]+', '-', name)
1136
1137
1138
def safe_version(version):
1139
    """Convert an arbitrary string to a standard version string
1140

1141
    Spaces become dots, and all other non-alphanumeric characters become
1142
    dashes, with runs of multiple dashes condensed to a single dash.
1143
    """
1144
    version = version.replace(' ','.')
1145
    return re.sub('[^A-Za-z0-9.]+', '-', version)
1146
1147
1148
def safe_extra(extra):
1149
    """Convert an arbitrary string to a standard 'extra' name
1150

1151
    Any runs of non-alphanumeric characters are replaced with a single '_',
1152
    and the result is always lowercased.
1153
    """
1154
    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1155
1156
1157
def to_filename(name):
1158
    """Convert a project or version name to its filename-escaped form
1159

1160
    Any '-' characters are currently replaced with '_'.
1161
    """
1162
    return name.replace('-','_')
1163
1164
1165
1166
1167
1168
1169
1170
1171
class NullProvider:
1172
    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1173
1174
    egg_name = None
1175
    egg_info = None
1176
    loader = None
1177
1178
    def __init__(self, module):
1179
        self.loader = getattr(module, '__loader__', None)
1180
        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1181
1182
    def get_resource_filename(self, manager, resource_name):
1183
        return self._fn(self.module_path, resource_name)
1184
1185
    def get_resource_stream(self, manager, resource_name):
1186
        return StringIO(self.get_resource_string(manager, resource_name))
1187
1188
    def get_resource_string(self, manager, resource_name):
1189
        return self._get(self._fn(self.module_path, resource_name))
1190
1191
    def has_resource(self, resource_name):
1192
        return self._has(self._fn(self.module_path, resource_name))
1193
1194
    def has_metadata(self, name):
1195
        return self.egg_info and self._has(self._fn(self.egg_info,name))
1196
1197
    if sys.version_info <= (3,):
1198
        def get_metadata(self, name):
1199
            if not self.egg_info:
1200
                return ""
1201
            return self._get(self._fn(self.egg_info,name))
1202
    else:
1203
        def get_metadata(self, name):
1204
            if not self.egg_info:
1205
                return ""
1206
            return self._get(self._fn(self.egg_info,name)).decode("utf-8")
1207
1208
    def get_metadata_lines(self, name):
1209
        return yield_lines(self.get_metadata(name))
1210
1211
    def resource_isdir(self,resource_name):
1212
        return self._isdir(self._fn(self.module_path, resource_name))
1213
1214
    def metadata_isdir(self,name):
1215
        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1216
1217
1218
    def resource_listdir(self,resource_name):
1219
        return self._listdir(self._fn(self.module_path,resource_name))
1220
1221
    def metadata_listdir(self,name):
1222
        if self.egg_info:
1223
            return self._listdir(self._fn(self.egg_info,name))
1224
        return []
1225
1226
    def run_script(self,script_name,namespace):
1227
        script = 'scripts/'+script_name
1228
        if not self.has_metadata(script):
1229
            raise ResolutionError("No script named %r" % script_name)
1230
        script_text = self.get_metadata(script).replace('\r\n','\n')
1231
        script_text = script_text.replace('\r','\n')
1232
        script_filename = self._fn(self.egg_info,script)
1233
        namespace['__file__'] = script_filename
1234
        if os.path.exists(script_filename):
1235
            execfile(script_filename, namespace, namespace)
1236
        else:
1237
            from linecache import cache
1238
            cache[script_filename] = (
1239
                len(script_text), 0, script_text.split('\n'), script_filename
1240
            )
1241
            script_code = compile(script_text,script_filename,'exec')
1242
            exec script_code in namespace, namespace
1243
1244
    def _has(self, path):
1245
        raise NotImplementedError(
1246
            "Can't perform this operation for unregistered loader type"
1247
        )
1248
1249
    def _isdir(self, path):
1250
        raise NotImplementedError(
1251
            "Can't perform this operation for unregistered loader type"
1252
        )
1253
1254
    def _listdir(self, path):
1255
        raise NotImplementedError(
1256
            "Can't perform this operation for unregistered loader type"
1257
        )
1258
1259
    def _fn(self, base, resource_name):
1260
        if resource_name:
1261
            return os.path.join(base, *resource_name.split('/'))
1262
        return base
1263
1264
    def _get(self, path):
1265
        if hasattr(self.loader, 'get_data'):
1266
            return self.loader.get_data(path)
1267
        raise NotImplementedError(
1268
            "Can't perform this operation for loaders without 'get_data()'"
1269
        )
1270
1271
register_loader_type(object, NullProvider)
1272
1273
1274
class EggProvider(NullProvider):
1275
    """Provider based on a virtual filesystem"""
1276
1277
    def __init__(self,module):
1278
        NullProvider.__init__(self,module)
1279
        self._setup_prefix()
1280
1281
    def _setup_prefix(self):
1282
        # we assume here that our metadata may be nested inside a "basket"
1283
        # of multiple eggs; that's why we use module_path instead of .archive
1284
        path = self.module_path
1285
        old = None
1286
        while path!=old:
1287
            if path.lower().endswith('.egg'):
1288
                self.egg_name = os.path.basename(path)
1289
                self.egg_info = os.path.join(path, 'EGG-INFO')
1290
                self.egg_root = path
1291
                break
1292
            old = path
1293
            path, base = os.path.split(path)
1294
1295
1296
1297
1298
1299
1300
class DefaultProvider(EggProvider):
1301
    """Provides access to package resources in the filesystem"""
1302
1303
    def _has(self, path):
1304
        return os.path.exists(path)
1305
1306
    def _isdir(self,path):
1307
        return os.path.isdir(path)
1308
1309
    def _listdir(self,path):
1310
        return os.listdir(path)
1311
1312
    def get_resource_stream(self, manager, resource_name):
1313
        return open(self._fn(self.module_path, resource_name), 'rb')
1314
1315
    def _get(self, path):
1316
        stream = open(path, 'rb')
1317
        try:
1318
            return stream.read()
1319
        finally:
1320
            stream.close()
1321
1322
register_loader_type(type(None), DefaultProvider)
1323
1324
1325
class EmptyProvider(NullProvider):
1326
    """Provider that returns nothing for all requests"""
1327
1328
    _isdir = _has = lambda self,path: False
1329
    _get          = lambda self,path: ''
1330
    _listdir      = lambda self,path: []
1331
    module_path   = None
1332
1333
    def __init__(self):
1334
        pass
1335
1336
empty_provider = EmptyProvider()
1337
1338
1339
1340
1341
class ZipProvider(EggProvider):
1342
    """Resource support for zips and eggs"""
1343
1344
    eagers = None
1345
1346
    def __init__(self, module):
1347
        EggProvider.__init__(self,module)
1348
        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1349
        self.zip_pre = self.loader.archive+os.sep
1350
1351
    def _zipinfo_name(self, fspath):
1352
        # Convert a virtual filename (full path to file) into a zipfile subpath
1353
        # usable with the zipimport directory cache for our target archive
1354
        if fspath.startswith(self.zip_pre):
1355
            return fspath[len(self.zip_pre):]
1356
        raise AssertionError(
1357
            "%s is not a subpath of %s" % (fspath,self.zip_pre)
1358
        )
1359
1360
    def _parts(self,zip_path):
1361
        # Convert a zipfile subpath into an egg-relative path part list
1362
        fspath = self.zip_pre+zip_path  # pseudo-fs path
1363
        if fspath.startswith(self.egg_root+os.sep):
1364
            return fspath[len(self.egg_root)+1:].split(os.sep)
1365
        raise AssertionError(
1366
            "%s is not a subpath of %s" % (fspath,self.egg_root)
1367
        )
1368
1369
    def get_resource_filename(self, manager, resource_name):
1370
        if not self.egg_name:
1371
            raise NotImplementedError(
1372
                "resource_filename() only supported for .egg, not .zip"
1373
            )
1374
        # no need to lock for extraction, since we use temp names
1375
        zip_path = self._resource_to_zip(resource_name)
1376
        eagers = self._get_eager_resources()
1377
        if '/'.join(self._parts(zip_path)) in eagers:
1378
            for name in eagers:
1379
                self._extract_resource(manager, self._eager_to_zip(name))
1380
        return self._extract_resource(manager, zip_path)
1381
1382
    def _extract_resource(self, manager, zip_path):
1383
1384
        if zip_path in self._index():
1385
            for name in self._index()[zip_path]:
1386
                last = self._extract_resource(
1387
                    manager, os.path.join(zip_path, name)
1388
                )
1389
            return os.path.dirname(last)  # return the extracted directory name
1390
1391
        zip_stat = self.zipinfo[zip_path]
1392
        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1393
        date_time = (
1394
            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
1395
            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
1396
        )
1397
        timestamp = time.mktime(date_time)
1398
1399
        try:
1400
            if not WRITE_SUPPORT:
1401
                raise IOError('"os.rename" and "os.unlink" are not supported '
1402
                              'on this platform')
1403
1404
            real_path = manager.get_cache_path(
1405
                self.egg_name, self._parts(zip_path)
1406
            )
1407
1408
            if os.path.isfile(real_path):
1409
                stat = os.stat(real_path)
1410
                if stat.st_size==size and stat.st_mtime==timestamp:
1411
                    # size and stamp match, don't bother extracting
1412
                    return real_path
1413
1414
            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1415
            os.write(outf, self.loader.get_data(zip_path))
1416
            os.close(outf)
1417
            utime(tmpnam, (timestamp,timestamp))
1418
            manager.postprocess(tmpnam, real_path)
1419
1420
            try:
1421
                rename(tmpnam, real_path)
1422
1423
            except os.error:
1424
                if os.path.isfile(real_path):
1425
                    stat = os.stat(real_path)
1426
1427
                    if stat.st_size==size and stat.st_mtime==timestamp:
1428
                        # size and stamp match, somebody did it just ahead of
1429
                        # us, so we're done
1430
                        return real_path
1431
                    elif os.name=='nt':     # Windows, del old file and retry
1432
                        unlink(real_path)
1433
                        rename(tmpnam, real_path)
1434
                        return real_path
1435
                raise
1436
1437
        except os.error:
1438
            manager.extraction_error()  # report a user-friendly error
1439
1440
        return real_path
1441
1442
    def _get_eager_resources(self):
1443
        if self.eagers is None:
1444
            eagers = []
1445
            for name in ('native_libs.txt', 'eager_resources.txt'):
1446
                if self.has_metadata(name):
1447
                    eagers.extend(self.get_metadata_lines(name))
1448
            self.eagers = eagers
1449
        return self.eagers
1450
1451
    def _index(self):
1452
        try:
1453
            return self._dirindex
1454
        except AttributeError:
1455
            ind = {}
1456
            for path in self.zipinfo:
1457
                parts = path.split(os.sep)
1458
                while parts:
1459
                    parent = os.sep.join(parts[:-1])
1460
                    if parent in ind:
1461
                        ind[parent].append(parts[-1])
1462
                        break
1463
                    else:
1464
                        ind[parent] = [parts.pop()]
1465
            self._dirindex = ind
1466
            return ind
1467
1468
    def _has(self, fspath):
1469
        zip_path = self._zipinfo_name(fspath)
1470
        return zip_path in self.zipinfo or zip_path in self._index()
1471
1472
    def _isdir(self,fspath):
1473
        return self._zipinfo_name(fspath) in self._index()
1474
1475
    def _listdir(self,fspath):
1476
        return list(self._index().get(self._zipinfo_name(fspath), ()))
1477
1478
    def _eager_to_zip(self,resource_name):
1479
        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1480
1481
    def _resource_to_zip(self,resource_name):
1482
        return self._zipinfo_name(self._fn(self.module_path,resource_name))
1483
1484
register_loader_type(zipimport.zipimporter, ZipProvider)
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
class FileMetadata(EmptyProvider):
1510
    """Metadata handler for standalone PKG-INFO files
1511

1512
    Usage::
1513

1514
        metadata = FileMetadata("/path/to/PKG-INFO")
1515

1516
    This provider rejects all data and metadata requests except for PKG-INFO,
1517
    which is treated as existing, and will be the contents of the file at
1518
    the provided location.
1519
    """
1520
1521
    def __init__(self,path):
1522
        self.path = path
1523
1524
    def has_metadata(self,name):
1525
        return name=='PKG-INFO'
1526
1527
    def get_metadata(self,name):
1528
        if name=='PKG-INFO':
1529
            f = open(self.path,'rU')
1530
            metadata = f.read()
1531
            f.close()
1532
            return metadata
1533
        raise KeyError("No metadata except PKG-INFO is available")
1534
1535
    def get_metadata_lines(self,name):
1536
        return yield_lines(self.get_metadata(name))
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
class PathMetadata(DefaultProvider):
1554
    """Metadata provider for egg directories
1555

1556
    Usage::
1557

1558
        # Development eggs:
1559

1560
        egg_info = "/path/to/PackageName.egg-info"
1561
        base_dir = os.path.dirname(egg_info)
1562
        metadata = PathMetadata(base_dir, egg_info)
1563
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1564
        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1565

1566
        # Unpacked egg directories:
1567

1568
        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1569
        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1570
        dist = Distribution.from_filename(egg_path, metadata=metadata)
1571
    """
1572
1573
    def __init__(self, path, egg_info):
1574
        self.module_path = path
1575
        self.egg_info = egg_info
1576
1577
1578
class EggMetadata(ZipProvider):
1579
    """Metadata provider for .egg files"""
1580
1581
    def __init__(self, importer):
1582
        """Create a metadata provider from a zipimporter"""
1583
1584
        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1585
        self.zip_pre = importer.archive+os.sep
1586
        self.loader = importer
1587
        if importer.prefix:
1588
            self.module_path = os.path.join(importer.archive, importer.prefix)
1589
        else:
1590
            self.module_path = importer.archive
1591
        self._setup_prefix()
1592
1593
1594
class ImpWrapper:
1595
    """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1596
1597
    def __init__(self, path=None):
1598
        self.path = path
1599
1600
    def find_module(self, fullname, path=None):
1601
        subname = fullname.split(".")[-1]
1602
        if subname != fullname and self.path is None:
1603
            return None
1604
        if self.path is None:
1605
            path = None
1606
        else:
1607
            path = [self.path]
1608
        try:
1609
            file, filename, etc = imp.find_module(subname, path)
1610
        except ImportError:
1611
            return None
1612
        return ImpLoader(file, filename, etc)
1613
1614
1615
class ImpLoader:
1616
    """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1617
1618
    def __init__(self, file, filename, etc):
1619
        self.file = file
1620
        self.filename = filename
1621
        self.etc = etc
1622
1623
    def load_module(self, fullname):
1624
        try:
1625
            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1626
        finally:
1627
            if self.file: self.file.close()
1628
        # Note: we don't set __loader__ because we want the module to look
1629
        # normal; i.e. this is just a wrapper for standard import machinery
1630
        return mod
1631
1632
1633
1634
1635
def get_importer(path_item):
1636
    """Retrieve a PEP 302 "importer" for the given path item
1637

1638
    If there is no importer, this returns a wrapper around the builtin import
1639
    machinery.  The returned importer is only cached if it was created by a
1640
    path hook.
1641
    """
1642
    try:
1643
        importer = sys.path_importer_cache[path_item]
1644
    except KeyError:
1645
        for hook in sys.path_hooks:
1646
            try:
1647
                importer = hook(path_item)
1648
            except ImportError:
1649
                pass
1650
            else:
1651
                break
1652
        else:
1653
            importer = None
1654
1655
    sys.path_importer_cache.setdefault(path_item,importer)
1656
    if importer is None:
1657
        try:
1658
            importer = ImpWrapper(path_item)
1659
        except ImportError:
1660
            pass
1661
    return importer
1662
1663
try:
1664
    from pkgutil import get_importer, ImpImporter
1665
except ImportError:
1666
    pass    # Python 2.3 or 2.4, use our own implementation
1667
else:
1668
    ImpWrapper = ImpImporter    # Python 2.5, use pkgutil's implementation
1669
    del ImpLoader, ImpImporter
1670
1671
1672
1673
1674
1675
1676
_declare_state('dict', _distribution_finders = {})
1677
1678
def register_finder(importer_type, distribution_finder):
1679
    """Register `distribution_finder` to find distributions in sys.path items
1680

1681
    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1682
    handler), and `distribution_finder` is a callable that, passed a path
1683
    item and the importer instance, yields ``Distribution`` instances found on
1684
    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1685
    _distribution_finders[importer_type] = distribution_finder
1686
1687
1688
def find_distributions(path_item, only=False):
1689
    """Yield distributions accessible via `path_item`"""
1690
    importer = get_importer(path_item)
1691
    finder = _find_adapter(_distribution_finders, importer)
1692
    return finder(importer, path_item, only)
1693
1694
def find_in_zip(importer, path_item, only=False):
1695
    metadata = EggMetadata(importer)
1696
    if metadata.has_metadata('PKG-INFO'):
1697
        yield Distribution.from_filename(path_item, metadata=metadata)
1698
    if only:
1699
        return  # don't yield nested distros
1700
    for subitem in metadata.resource_listdir('/'):
1701
        if subitem.endswith('.egg'):
1702
            subpath = os.path.join(path_item, subitem)
1703
            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1704
                yield dist
1705
1706
register_finder(zipimport.zipimporter, find_in_zip)
1707
1708
def StringIO(*args, **kw):
1709
    """Thunk to load the real StringIO on demand"""
1710
    global StringIO
1711
    try:
1712
        from cStringIO import StringIO
1713
    except ImportError:
1714
        from StringIO import StringIO
1715
    return StringIO(*args,**kw)
1716
1717
def find_nothing(importer, path_item, only=False):
1718
    return ()
1719
register_finder(object,find_nothing)
1720
1721
def find_on_path(importer, path_item, only=False):
1722
    """Yield distributions accessible on a sys.path directory"""
1723
    path_item = _normalize_cached(path_item)
1724
1725
    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1726
        if path_item.lower().endswith('.egg'):
1727
            # unpacked egg
1728
            yield Distribution.from_filename(
1729
                path_item, metadata=PathMetadata(
1730
                    path_item, os.path.join(path_item,'EGG-INFO')
1731
                )
1732
            )
1733
        else:
1734
            # scan for .egg and .egg-info in directory
1735
            for entry in os.listdir(path_item):
1736
                lower = entry.lower()
1737
                if lower.endswith('.egg-info'):
1738
                    fullpath = os.path.join(path_item, entry)
1739
                    if os.path.isdir(fullpath):
1740
                        # egg-info directory, allow getting metadata
1741
                        metadata = PathMetadata(path_item, fullpath)
1742
                    else:
1743
                        metadata = FileMetadata(fullpath)
1744
                    yield Distribution.from_location(
1745
                        path_item,entry,metadata,precedence=DEVELOP_DIST
1746
                    )
1747
                elif not only and lower.endswith('.egg'):
1748
                    for dist in find_distributions(os.path.join(path_item, entry)):
1749
                        yield dist
1750
                elif not only and lower.endswith('.egg-link'):
1751
                    for line in open(os.path.join(path_item, entry)):
1752
                        if not line.strip(): continue
1753
                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
1754
                            yield item
1755
                        break
1756
register_finder(ImpWrapper,find_on_path)
1757
1758
_declare_state('dict', _namespace_handlers={})
1759
_declare_state('dict', _namespace_packages={})
1760
1761
1762
def register_namespace_handler(importer_type, namespace_handler):
1763
    """Register `namespace_handler` to declare namespace packages
1764

1765
    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1766
    handler), and `namespace_handler` is a callable like this::
1767

1768
        def namespace_handler(importer,path_entry,moduleName,module):
1769
            # return a path_entry to use for child packages
1770

1771
    Namespace handlers are only called if the importer object has already
1772
    agreed that it can handle the relevant path item, and they should only
1773
    return a subpath if the module __path__ does not already contain an
1774
    equivalent subpath.  For an example namespace handler, see
1775
    ``pkg_resources.file_ns_handler``.
1776
    """
1777
    _namespace_handlers[importer_type] = namespace_handler
1778
1779
def _handle_ns(packageName, path_item):
1780
    """Ensure that named package includes a subpath of path_item (if needed)"""
1781
    importer = get_importer(path_item)
1782
    if importer is None:
1783
        return None
1784
    loader = importer.find_module(packageName)
1785
    if loader is None:
1786
        return None
1787
    module = sys.modules.get(packageName)
1788
    if module is None:
1789
        module = sys.modules[packageName] = types.ModuleType(packageName)
1790
        module.__path__ = []; _set_parent_ns(packageName)
1791
    elif not hasattr(module,'__path__'):
1792
        raise TypeError("Not a package:", packageName)
1793
    handler = _find_adapter(_namespace_handlers, importer)
1794
    subpath = handler(importer,path_item,packageName,module)
1795
    if subpath is not None:
1796
        path = module.__path__; path.append(subpath)
1797
        loader.load_module(packageName); module.__path__ = path
1798
    return subpath
1799
1800
def declare_namespace(packageName):
1801
    """Declare that package 'packageName' is a namespace package"""
1802
1803
    imp.acquire_lock()
1804
    try:
1805
        if packageName in _namespace_packages:
1806
            return
1807
1808
        path, parent = sys.path, None
1809
        if '.' in packageName:
1810
            parent = '.'.join(packageName.split('.')[:-1])
1811
            declare_namespace(parent)
1812
            if parent not in _namespace_packages:
1813
                __import__(parent)
1814
            try:
1815
                path = sys.modules[parent].__path__
1816
            except AttributeError:
1817
                raise TypeError("Not a package:", parent)
1818
1819
        # Track what packages are namespaces, so when new path items are added,
1820
        # they can be updated
1821
        _namespace_packages.setdefault(parent,[]).append(packageName)
1822
        _namespace_packages.setdefault(packageName,[])
1823
1824
        for path_item in path:
1825
            # Ensure all the parent's path items are reflected in the child,
1826
            # if they apply
1827
            _handle_ns(packageName, path_item)
1828
1829
    finally:
1830
        imp.release_lock()
1831
1832
def fixup_namespace_packages(path_item, parent=None):
1833
    """Ensure that previously-declared namespace packages include path_item"""
1834
    imp.acquire_lock()
1835
    try:
1836
        for package in _namespace_packages.get(parent,()):
1837
            subpath = _handle_ns(package, path_item)
1838
            if subpath: fixup_namespace_packages(subpath,package)
1839
    finally:
1840
        imp.release_lock()
1841
1842
def file_ns_handler(importer, path_item, packageName, module):
1843
    """Compute an ns-package subpath for a filesystem or zipfile importer"""
1844
1845
    subpath = os.path.join(path_item, packageName.split('.')[-1])
1846
    normalized = _normalize_cached(subpath)
1847
    for item in module.__path__:
1848
        if _normalize_cached(item)==normalized:
1849
            break
1850
    else:
1851
        # Only return the path if it's not already there
1852
        return subpath
1853
1854
register_namespace_handler(ImpWrapper,file_ns_handler)
1855
register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1856
1857
1858
def null_ns_handler(importer, path_item, packageName, module):
1859
    return None
1860
1861
register_namespace_handler(object,null_ns_handler)
1862
1863
1864
def normalize_path(filename):
1865
    """Normalize a file/dir name for comparison purposes"""
1866
    return os.path.normcase(os.path.realpath(filename))
1867
1868
def _normalize_cached(filename,_cache={}):
1869
    try:
1870
        return _cache[filename]
1871
    except KeyError:
1872
        _cache[filename] = result = normalize_path(filename)
1873
        return result
1874
1875
def _set_parent_ns(packageName):
1876
    parts = packageName.split('.')
1877
    name = parts.pop()
1878
    if parts:
1879
        parent = '.'.join(parts)
1880
        setattr(sys.modules[parent], name, sys.modules[packageName])
1881
1882
1883
def yield_lines(strs):
1884
    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1885
    if isinstance(strs,basestring):
1886
        for s in strs.splitlines():
1887
            s = s.strip()
1888
            if s and not s.startswith('#'):     # skip blank lines/comments
1889
                yield s
1890
    else:
1891
        for ss in strs:
1892
            for s in yield_lines(ss):
1893
                yield s
1894
1895
LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
1896
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
1897
DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
1898
VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
1899
COMMA    = re.compile(r"\s*,").match               # comma between items
1900
OBRACKET = re.compile(r"\s*\[").match
1901
CBRACKET = re.compile(r"\s*\]").match
1902
MODULE   = re.compile(r"\w+(\.\w+)*$").match
1903
EGG_NAME = re.compile(
1904
    r"(?P<name>[^-]+)"
1905
    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1906
    re.VERBOSE | re.IGNORECASE
1907
).match
1908
1909
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1910
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1911
1912
def _parse_version_parts(s):
1913
    for part in component_re.split(s):
1914
        part = replace(part,part)
1915
        if not part or part=='.':
1916
            continue
1917
        if part[:1] in '0123456789':
1918
            yield part.zfill(8)    # pad for numeric comparison
1919
        else:
1920
            yield '*'+part
1921
1922
    yield '*final'  # ensure that alpha/beta/candidate are before final
1923
1924
def parse_version(s):
1925
    """Convert a version string to a chronologically-sortable key
1926

1927
    This is a rough cross between distutils' StrictVersion and LooseVersion;
1928
    if you give it versions that would work with StrictVersion, then it behaves
1929
    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1930
    *possible* to create pathological version coding schemes that will fool
1931
    this parser, but they should be very rare in practice.
1932

1933
    The returned value will be a tuple of strings.  Numeric portions of the
1934
    version are padded to 8 digits so they will compare numerically, but
1935
    without relying on how numbers compare relative to strings.  Dots are
1936
    dropped, but dashes are retained.  Trailing zeros between alpha segments
1937
    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1938
    "2.4". Alphanumeric parts are lower-cased.
1939

1940
    The algorithm assumes that strings like "-" and any alpha string that
1941
    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
1942
    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1943
    considered newer than "2.4-1", which in turn is newer than "2.4".
1944

1945
    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1946
    come before "final" alphabetically) are assumed to be pre-release versions,
1947
    so that the version "2.4" is considered newer than "2.4a1".
1948

1949
    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1950
    "rc" are treated as if they were "c", i.e. as though they were release
1951
    candidates, and therefore are not as new as a version string that does not
1952
    contain them, and "dev" is replaced with an '@' so that it sorts lower than
1953
    than any other pre-release tag.
1954
    """
1955
    parts = []
1956
    for part in _parse_version_parts(s.lower()):
1957
        if part.startswith('*'):
1958
            if part<'*final':   # remove '-' before a prerelease tag
1959
                while parts and parts[-1]=='*final-': parts.pop()
1960
            # remove trailing zeros from each series of numeric parts
1961
            while parts and parts[-1]=='00000000':
1962
                parts.pop()
1963
        parts.append(part)
1964
    return tuple(parts)
1965
1966
class EntryPoint(object):
1967
    """Object representing an advertised importable object"""
1968
1969
    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1970
        if not MODULE(module_name):
1971
            raise ValueError("Invalid module name", module_name)
1972
        self.name = name
1973
        self.module_name = module_name
1974
        self.attrs = tuple(attrs)
1975
        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1976
        self.dist = dist
1977
1978
    def __str__(self):
1979
        s = "%s = %s" % (self.name, self.module_name)
1980
        if self.attrs:
1981
            s += ':' + '.'.join(self.attrs)
1982
        if self.extras:
1983
            s += ' [%s]' % ','.join(self.extras)
1984
        return s
1985
1986
    def __repr__(self):
1987
        return "EntryPoint.parse(%r)" % str(self)
1988
1989
    def load(self, require=True, env=None, installer=None):
1990
        if require: self.require(env, installer)
1991
        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1992
        for attr in self.attrs:
1993
            try:
1994
                entry = getattr(entry,attr)
1995
            except AttributeError:
1996
                raise ImportError("%r has no %r attribute" % (entry,attr))
1997
        return entry
1998
1999
    def require(self, env=None, installer=None):
2000
        if self.extras and not self.dist:
2001
            raise UnknownExtra("Can't require() without a distribution", self)
2002
        map(working_set.add,
2003
            working_set.resolve(self.dist.requires(self.extras),env,installer))
2004
2005
2006
2007
    #@classmethod
2008
    def parse(cls, src, dist=None):
2009
        """Parse a single entry point from string `src`
2010

2011
        Entry point syntax follows the form::
2012

2013
            name = some.module:some.attr [extra1,extra2]
2014

2015
        The entry name and module name are required, but the ``:attrs`` and
2016
        ``[extras]`` parts are optional
2017
        """
2018
        try:
2019
            attrs = extras = ()
2020
            name,value = src.split('=',1)
2021
            if '[' in value:
2022
                value,extras = value.split('[',1)
2023
                req = Requirement.parse("x["+extras)
2024
                if req.specs: raise ValueError
2025
                extras = req.extras
2026
            if ':' in value:
2027
                value,attrs = value.split(':',1)
2028
                if not MODULE(attrs.rstrip()):
2029
                    raise ValueError
2030
                attrs = attrs.rstrip().split('.')
2031
        except ValueError:
2032
            raise ValueError(
2033
                "EntryPoint must be in 'name=module:attrs [extras]' format",
2034
                src
2035
            )
2036
        else:
2037
            return cls(name.strip(), value.strip(), attrs, extras, dist)
2038
2039
    parse = classmethod(parse)
2040
2041
2042
2043
2044
2045
2046
2047
2048
    #@classmethod
2049
    def parse_group(cls, group, lines, dist=None):
2050
        """Parse an entry point group"""
2051
        if not MODULE(group):
2052
            raise ValueError("Invalid group name", group)
2053
        this = {}
2054
        for line in yield_lines(lines):
2055
            ep = cls.parse(line, dist)
2056
            if ep.name in this:
2057
                raise ValueError("Duplicate entry point", group, ep.name)
2058
            this[ep.name]=ep
2059
        return this
2060
2061
    parse_group = classmethod(parse_group)
2062
2063
    #@classmethod
2064
    def parse_map(cls, data, dist=None):
2065
        """Parse a map of entry point groups"""
2066
        if isinstance(data,dict):
2067
            data = data.items()
2068
        else:
2069
            data = split_sections(data)
2070
        maps = {}
2071
        for group, lines in data:
2072
            if group is None:
2073
                if not lines:
2074
                    continue
2075
                raise ValueError("Entry points must be listed in groups")
2076
            group = group.strip()
2077
            if group in maps:
2078
                raise ValueError("Duplicate group name", group)
2079
            maps[group] = cls.parse_group(group, lines, dist)
2080
        return maps
2081
2082
    parse_map = classmethod(parse_map)
2083
2084
2085
def _remove_md5_fragment(location):
2086
    if not location:
2087
        return ''
2088
    parsed = urlparse(location)
2089
    if parsed[-1].startswith('md5='):
2090
        return urlunparse(parsed[:-1] + ('',))
2091
    return location
2092
2093
2094
class Distribution(object):
2095
    """Wrap an actual or potential sys.path entry w/metadata"""
2096
    def __init__(self,
2097
        location=None, metadata=None, project_name=None, version=None,
2098
        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2099
    ):
2100
        self.project_name = safe_name(project_name or 'Unknown')
2101
        if version is not None:
2102
            self._version = safe_version(version)
2103
        self.py_version = py_version
2104
        self.platform = platform
2105
        self.location = location
2106
        self.precedence = precedence
2107
        self._provider = metadata or empty_provider
2108
2109
    #@classmethod
2110
    def from_location(cls,location,basename,metadata=None,**kw):
2111
        project_name, version, py_version, platform = [None]*4
2112
        basename, ext = os.path.splitext(basename)
2113
        if ext.lower() in (".egg",".egg-info"):
2114
            match = EGG_NAME(basename)
2115
            if match:
2116
                project_name, version, py_version, platform = match.group(
2117
                    'name','ver','pyver','plat'
2118
                )
2119
        return cls(
2120
            location, metadata, project_name=project_name, version=version,
2121
            py_version=py_version, platform=platform, **kw
2122
        )
2123
    from_location = classmethod(from_location)
2124
2125
2126
    hashcmp = property(
2127
        lambda self: (
2128
            getattr(self,'parsed_version',()),
2129
            self.precedence,
2130
            self.key,
2131
            _remove_md5_fragment(self.location),
2132
            self.py_version,
2133
            self.platform
2134
        )
2135
    )
2136
    def __hash__(self): return hash(self.hashcmp)
2137
    def __lt__(self, other):
2138
        return self.hashcmp < other.hashcmp
2139
    def __le__(self, other):
2140
        return self.hashcmp <= other.hashcmp
2141
    def __gt__(self, other):
2142
        return self.hashcmp > other.hashcmp
2143
    def __ge__(self, other):
2144
        return self.hashcmp >= other.hashcmp
2145
    def __eq__(self, other):
2146
        if not isinstance(other, self.__class__):
2147
            # It's not a Distribution, so they are not equal
2148
            return False
2149
        return self.hashcmp == other.hashcmp
2150
    def __ne__(self, other):
2151
        return not self == other
2152
2153
    # These properties have to be lazy so that we don't have to load any
2154
    # metadata until/unless it's actually needed.  (i.e., some distributions
2155
    # may not know their name or version without loading PKG-INFO)
2156
2157
    #@property
2158
    def key(self):
2159
        try:
2160
            return self._key
2161
        except AttributeError:
2162
            self._key = key = self.project_name.lower()
2163
            return key
2164
    key = property(key)
2165
2166
    #@property
2167
    def parsed_version(self):
2168
        try:
2169
            return self._parsed_version
2170
        except AttributeError:
2171
            self._parsed_version = pv = parse_version(self.version)
2172
            return pv
2173
2174
    parsed_version = property(parsed_version)
2175
2176
    #@property
2177
    def version(self):
2178
        try:
2179
            return self._version
2180
        except AttributeError:
2181
            for line in self._get_metadata('PKG-INFO'):
2182
                if line.lower().startswith('version:'):
2183
                    self._version = safe_version(line.split(':',1)[1].strip())
2184
                    return self._version
2185
            else:
2186
                raise ValueError(
2187
                    "Missing 'Version:' header and/or PKG-INFO file", self
2188
                )
2189
    version = property(version)
2190
2191
2192
2193
2194
    #@property
2195
    def _dep_map(self):
2196
        try:
2197
            return self.__dep_map
2198
        except AttributeError:
2199
            dm = self.__dep_map = {None: []}
2200
            for name in 'requires.txt', 'depends.txt':
2201
                for extra,reqs in split_sections(self._get_metadata(name)):
2202
                    if extra: extra = safe_extra(extra)
2203
                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2204
            return dm
2205
    _dep_map = property(_dep_map)
2206
2207
    def requires(self,extras=()):
2208
        """List of Requirements needed for this distro if `extras` are used"""
2209
        dm = self._dep_map
2210
        deps = []
2211
        deps.extend(dm.get(None,()))
2212
        for ext in extras:
2213
            try:
2214
                deps.extend(dm[safe_extra(ext)])
2215
            except KeyError:
2216
                raise UnknownExtra(
2217
                    "%s has no such extra feature %r" % (self, ext)
2218
                )
2219
        return deps
2220
2221
    def _get_metadata(self,name):
2222
        if self.has_metadata(name):
2223
            for line in self.get_metadata_lines(name):
2224
                yield line
2225
2226
    def activate(self,path=None):
2227
        """Ensure distribution is importable on `path` (default=sys.path)"""
2228
        if path is None: path = sys.path
2229
        self.insert_on(path)
2230
        if path is sys.path:
2231
            fixup_namespace_packages(self.location)
2232
            map(declare_namespace, self._get_metadata('namespace_packages.txt'))
2233
2234
2235
    def egg_name(self):
2236
        """Return what this distribution's standard .egg filename should be"""
2237
        filename = "%s-%s-py%s" % (
2238
            to_filename(self.project_name), to_filename(self.version),
2239
            self.py_version or PY_MAJOR
2240
        )
2241
2242
        if self.platform:
2243
            filename += '-'+self.platform
2244
        return filename
2245
2246
    def __repr__(self):
2247
        if self.location:
2248
            return "%s (%s)" % (self,self.location)
2249
        else:
2250
            return str(self)
2251
2252
    def __str__(self):
2253
        try: version = getattr(self,'version',None)
2254
        except ValueError: version = None
2255
        version = version or "[unknown version]"
2256
        return "%s %s" % (self.project_name,version)
2257
2258
    def __getattr__(self,attr):
2259
        """Delegate all unrecognized public attributes to .metadata provider"""
2260
        if attr.startswith('_'):
2261
            raise AttributeError,attr
2262
        return getattr(self._provider, attr)
2263
2264
    #@classmethod
2265
    def from_filename(cls,filename,metadata=None, **kw):
2266
        return cls.from_location(
2267
            _normalize_cached(filename), os.path.basename(filename), metadata,
2268
            **kw
2269
        )
2270
    from_filename = classmethod(from_filename)
2271
2272
    def as_requirement(self):
2273
        """Return a ``Requirement`` that matches this distribution exactly"""
2274
        return Requirement.parse('%s==%s' % (self.project_name, self.version))
2275
2276
    def load_entry_point(self, group, name):
2277
        """Return the `name` entry point of `group` or raise ImportError"""
2278
        ep = self.get_entry_info(group,name)
2279
        if ep is None:
2280
            raise ImportError("Entry point %r not found" % ((group,name),))
2281
        return ep.load()
2282
2283
    def get_entry_map(self, group=None):
2284
        """Return the entry point map for `group`, or the full entry map"""
2285
        try:
2286
            ep_map = self._ep_map
2287
        except AttributeError:
2288
            ep_map = self._ep_map = EntryPoint.parse_map(
2289
                self._get_metadata('entry_points.txt'), self
2290
            )
2291
        if group is not None:
2292
            return ep_map.get(group,{})
2293
        return ep_map
2294
2295
    def get_entry_info(self, group, name):
2296
        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2297
        return self.get_entry_map(group).get(name)
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
    def insert_on(self, path, loc = None):
2318
        """Insert self.location in path before its nearest parent directory"""
2319
2320
        loc = loc or self.location
2321
2322
        if self.project_name == 'setuptools':
2323
            try:
2324
                version = self.version
2325
            except ValueError:
2326
                version = ''
2327
            if '0.7' in version:
2328
                raise ValueError(
2329
                    "A 0.7-series setuptools cannot be installed "
2330
                    "with distribute. Found one at %s" % str(self.location))
2331
2332
        if not loc:
2333
            return
2334
2335
        if path is sys.path:
2336
            self.check_version_conflict()
2337
2338
        nloc = _normalize_cached(loc)
2339
        bdir = os.path.dirname(nloc)
2340
        npath= map(_normalize_cached, path)
2341
2342
        bp = None
2343
        for p, item in enumerate(npath):
2344
            if item==nloc:
2345
                break
2346
            elif item==bdir and self.precedence==EGG_DIST:
2347
                # if it's an .egg, give it precedence over its directory
2348
                path.insert(p, loc)
2349
                npath.insert(p, nloc)
2350
                break
2351
        else:
2352
            path.append(loc)
2353
            return
2354
2355
        # p is the spot where we found or inserted loc; now remove duplicates
2356
        while 1:
2357
            try:
2358
                np = npath.index(nloc, p+1)
2359
            except ValueError:
2360
                break
2361
            else:
2362
                del npath[np], path[np]
2363
                p = np  # ha!
2364
2365
        return
2366
2367
2368
2369
    def check_version_conflict(self):
2370
        if self.key=='distribute':
2371
            return      # ignore the inevitable setuptools self-conflicts  :(
2372
2373
        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2374
        loc = normalize_path(self.location)
2375
        for modname in self._get_metadata('top_level.txt'):
2376
            if (modname not in sys.modules or modname in nsp
2377
                or modname in _namespace_packages
2378
            ):
2379
                continue
2380
            if modname in ('pkg_resources', 'setuptools', 'site'):
2381
                continue
2382
            fn = getattr(sys.modules[modname], '__file__', None)
2383
            if fn and (normalize_path(fn).startswith(loc) or
2384
                       fn.startswith(self.location)):
2385
                continue
2386
            issue_warning(
2387
                "Module %s was already imported from %s, but %s is being added"
2388
                " to sys.path" % (modname, fn, self.location),
2389
            )
2390
2391
    def has_version(self):
2392
        try:
2393
            self.version
2394
        except ValueError:
2395
            issue_warning("Unbuilt egg for "+repr(self))
2396
            return False
2397
        return True
2398
2399
    def clone(self,**kw):
2400
        """Copy this distribution, substituting in any changed keyword args"""
2401
        for attr in (
2402
            'project_name', 'version', 'py_version', 'platform', 'location',
2403
            'precedence'
2404
        ):
2405
            kw.setdefault(attr, getattr(self,attr,None))
2406
        kw.setdefault('metadata', self._provider)
2407
        return self.__class__(**kw)
2408
2409
2410
2411
2412
    #@property
2413
    def extras(self):
2414
        return [dep for dep in self._dep_map if dep]
2415
    extras = property(extras)
2416
2417
2418
def issue_warning(*args,**kw):
2419
    level = 1
2420
    g = globals()
2421
    try:
2422
        # find the first stack frame that is *not* code in
2423
        # the pkg_resources module, to use for the warning
2424
        while sys._getframe(level).f_globals is g:
2425
            level += 1
2426
    except ValueError:
2427
        pass
2428
    from warnings import warn
2429
    warn(stacklevel = level+1, *args, **kw)
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
def parse_requirements(strs):
2454
    """Yield ``Requirement`` objects for each specification in `strs`
2455

2456
    `strs` must be an instance of ``basestring``, or a (possibly-nested)
2457
    iterable thereof.
2458
    """
2459
    # create a steppable iterator, so we can handle \-continuations
2460
    lines = iter(yield_lines(strs))
2461
2462
    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2463
2464
        items = []
2465
2466
        while not TERMINATOR(line,p):
2467
            if CONTINUE(line,p):
2468
                try:
2469
                    line = lines.next(); p = 0
2470
                except StopIteration:
2471
                    raise ValueError(
2472
                        "\\ must not appear on the last nonblank line"
2473
                    )
2474
2475
            match = ITEM(line,p)
2476
            if not match:
2477
                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2478
2479
            items.append(match.group(*groups))
2480
            p = match.end()
2481
2482
            match = COMMA(line,p)
2483
            if match:
2484
                p = match.end() # skip the comma
2485
            elif not TERMINATOR(line,p):
2486
                raise ValueError(
2487
                    "Expected ',' or end-of-list in",line,"at",line[p:]
2488
                )
2489
2490
        match = TERMINATOR(line,p)
2491
        if match: p = match.end()   # skip the terminator, if any
2492
        return line, p, items
2493
2494
    for line in lines:
2495
        match = DISTRO(line)
2496
        if not match:
2497
            raise ValueError("Missing distribution spec", line)
2498
        project_name = match.group(1)
2499
        p = match.end()
2500
        extras = []
2501
2502
        match = OBRACKET(line,p)
2503
        if match:
2504
            p = match.end()
2505
            line, p, extras = scan_list(
2506
                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2507
            )
2508
2509
        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2510
        specs = [(op,safe_version(val)) for op,val in specs]
2511
        yield Requirement(project_name, specs, extras)
2512
2513
2514
def _sort_dists(dists):
2515
    tmp = [(dist.hashcmp,dist) for dist in dists]
2516
    tmp.sort()
2517
    dists[::-1] = [d for hc,d in tmp]
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
class Requirement:
2536
    def __init__(self, project_name, specs, extras):
2537
        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2538
        self.unsafe_name, project_name = project_name, safe_name(project_name)
2539
        self.project_name, self.key = project_name, project_name.lower()
2540
        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2541
        index.sort()
2542
        self.specs = [(op,ver) for parsed,trans,op,ver in index]
2543
        self.index, self.extras = index, tuple(map(safe_extra,extras))
2544
        self.hashCmp = (
2545
            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2546
            frozenset(self.extras)
2547
        )
2548
        self.__hash = hash(self.hashCmp)
2549
2550
    def __str__(self):
2551
        specs = ','.join([''.join(s) for s in self.specs])
2552
        extras = ','.join(self.extras)
2553
        if extras: extras = '[%s]' % extras
2554
        return '%s%s%s' % (self.project_name, extras, specs)
2555
2556
    def __eq__(self,other):
2557
        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2558
2559
    def __contains__(self,item):
2560
        if isinstance(item,Distribution):
2561
            if item.key <> self.key: return False
2562
            if self.index: item = item.parsed_version  # only get if we need it
2563
        elif isinstance(item,basestring):
2564
            item = parse_version(item)
2565
        last = None
2566
        compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
2567
        for parsed,trans,op,ver in self.index:
2568
            action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
2569
            if action=='F':     return False
2570
            elif action=='T':   return True
2571
            elif action=='+':   last = True
2572
            elif action=='-' or last is None:   last = False
2573
        if last is None: last = True    # no rules encountered
2574
        return last
2575
2576
2577
    def __hash__(self):
2578
        return self.__hash
2579
2580
    def __repr__(self): return "Requirement.parse(%r)" % str(self)
2581
2582
    #@staticmethod
2583
    def parse(s, replacement=True):
2584
        reqs = list(parse_requirements(s))
2585
        if reqs:
2586
            if len(reqs) == 1:
2587
                founded_req = reqs[0]
2588
                # if asked for setuptools distribution
2589
                # and if distribute is installed, we want to give
2590
                # distribute instead
2591
                if _override_setuptools(founded_req) and replacement:
2592
                    distribute = list(parse_requirements('distribute'))
2593
                    if len(distribute) == 1:
2594
                        return distribute[0]
2595
                    return founded_req
2596
                else:
2597
                    return founded_req
2598
2599
            raise ValueError("Expected only one requirement", s)
2600
        raise ValueError("No requirements found", s)
2601
2602
    parse = staticmethod(parse)
2603
2604
state_machine = {
2605
    #       =><
2606
    '<' :  '--T',
2607
    '<=':  'T-T',
2608
    '>' :  'F+F',
2609
    '>=':  'T+F',
2610
    '==':  'T..',
2611
    '!=':  'F++',
2612
}
2613
2614
2615
def _override_setuptools(req):
2616
    """Return True when distribute wants to override a setuptools dependency.
2617

2618
    We want to override when the requirement is setuptools and the version is
2619
    a variant of 0.6.
2620

2621
    """
2622
    if req.project_name == 'setuptools':
2623
        if not len(req.specs):
2624
            # Just setuptools: ok
2625
            return True
2626
        for comparator, version in req.specs:
2627
            if comparator in ['==', '>=', '>']:
2628
                if '0.7' in version:
2629
                    # We want some setuptools not from the 0.6 series.
2630
                    return False
2631
        return True
2632
    return False
2633
2634
2635
def _get_mro(cls):
2636
    """Get an mro for a type or classic class"""
2637
    if not isinstance(cls,type):
2638
        class cls(cls,object): pass
2639
        return cls.__mro__[1:]
2640
    return cls.__mro__
2641
2642
def _find_adapter(registry, ob):
2643
    """Return an adapter factory for `ob` from `registry`"""
2644
    for t in _get_mro(getattr(ob, '__class__', type(ob))):
2645
        if t in registry:
2646
            return registry[t]
2647
2648
2649
def ensure_directory(path):
2650
    """Ensure that the parent directory of `path` exists"""
2651
    dirname = os.path.dirname(path)
2652
    if not os.path.isdir(dirname):
2653
        os.makedirs(dirname)
2654
2655
def split_sections(s):
2656
    """Split a string or iterable thereof into (section,content) pairs
2657

2658
    Each ``section`` is a stripped version of the section header ("[section]")
2659
    and each ``content`` is a list of stripped lines excluding blank lines and
2660
    comment-only lines.  If there are any such lines before the first section
2661
    header, they're returned in a first ``section`` of ``None``.
2662
    """
2663
    section = None
2664
    content = []
2665
    for line in yield_lines(s):
2666
        if line.startswith("["):
2667
            if line.endswith("]"):
2668
                if section or content:
2669
                    yield section, content
2670
                section = line[1:-1].strip()
2671
                content = []
2672
            else:
2673
                raise ValueError("Invalid section heading", line)
2674
        else:
2675
            content.append(line)
2676
2677
    # wrap up last segment
2678
    yield section, content
2679
2680
def _mkstemp(*args,**kw):
2681
    from tempfile import mkstemp
2682
    old_open = os.open
2683
    try:
2684
        os.open = os_open   # temporarily bypass sandboxing
2685
        return mkstemp(*args,**kw)
2686
    finally:
2687
        os.open = old_open  # and then put it back
2688
2689
2690
# Set up global resource manager (deliberately not state-saved)
2691
_manager = ResourceManager()
2692
def _initialize(g):
2693
    for name in dir(_manager):
2694
        if not name.startswith('_'):
2695
            g[name] = getattr(_manager, name)
2696
_initialize(globals())
2697
2698
# Prepare the master working set and make the ``require()`` API available
2699
_declare_state('object', working_set = WorkingSet())
2700
2701
try:
2702
    # Does the main program list any requirements?
2703
    from __main__ import __requires__
2704
except ImportError:
2705
    pass # No: just use the default working set based on sys.path
2706
else:
2707
    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2708
    try:
2709
        working_set.require(__requires__)
2710
    except VersionConflict:     # try it without defaults already on sys.path
2711
        working_set = WorkingSet([])    # by starting with an empty path
2712
        for dist in working_set.resolve(
2713
            parse_requirements(__requires__), Environment()
2714
        ):
2715
            working_set.add(dist)
2716
        for entry in sys.path:  # add any missing entries from sys.path
2717
            if entry not in working_set.entries:
2718
                working_set.add_entry(entry)
2719
        sys.path[:] = working_set.entries   # then copy back to sys.path
2720
2721
require = working_set.require
2722
iter_entry_points = working_set.iter_entry_points
2723
add_activation_listener = working_set.subscribe
2724
run_script = working_set.run_script
2725
run_main = run_script   # backward compatibility
2726
# Activate all distributions already on sys.path, and ensure that
2727
# all distributions added to the working set in the future (e.g. by
2728
# calling ``require()``) will get activated as well.
2729
add_activation_listener(lambda dist: dist.activate())
2730
working_set.entries=[]; map(working_set.add_entry,sys.path) # match order