Commit 977b8443a7e1664c6da583f0b33cc825a1cae434

Authored by Charles Oliveira
1 parent e0db3da9

Added function add_puppet_repo

Showing 2 changed files with 2699 additions and 41 deletions   Show diff stats
puppet/bootstrap.py
... ... @@ -2,7 +2,6 @@
2 2  
3 3 import os
4 4 import re
5   -import locale
6 5 import platform
7 6 import subprocess
8 7 import urllib
... ... @@ -11,15 +10,10 @@ from distutils.version import StrictVersion
11 10 from shutil import copyfile
12 11  
13 12 try:
14   - from pkg_resources import parse_requirements
  13 + from pkg_resources import parse_requirements, to_filename
15 14 except ImportError:
16   - # Needed dependency for import pkg_resources
17   - pkg_res = ['apt-get', 'install', 'python-pkg-resources', '-y']
18   - subprocess.call(pkg_res)
19   - from pkg_resources import parse_requirements
20   -finally:
21   - from pkg_resources import to_filename
22   -
  15 + # Import local version if not installed
  16 + from pkg_resources_local import parse_requirements, to_filename
23 17  
24 18 PUPPET_TARGET_VERSION = "3.6.2"
25 19 PUPPET_DIR = os.path.join(os.path.dirname(__file__))
... ... @@ -31,15 +25,38 @@ DIST_CMD = {
31 25 {'pkg_flags': '-y'},
32 26 {'rep_manager': 'dpkg'},
33 27 {'rep_flags': '-i'},
  28 + {'puppet_repo': 'https://apt.puppetlabs.com/'},
  29 + {'puppet_pkg': 'puppetlabs-release-%s.deb'},
34 30 ],
35 31 'centos': [
36 32 {'pkg_manager': 'yum'},
37 33 {'pkg_flags': '-y'},
38   - {'pkg_manager': 'yum'},
  34 + {'rep_manager': 'rpm'},
39 35 {'rep_flags': '-ivh'},
  36 + {'puppet_repo': 'http://yum.puppetlabs.com/'},
  37 + {'puppet_pkg': 'puppetlabs-release-el-%s.noarch.rpm'},
40 38 ],
41 39 }
42 40  
  41 +
  42 +def add_puppet_repository():
  43 + distro, release = get_release_name()
  44 + cmd_dict = DIST_CMD[distro]
  45 + rep_manager = cmd_dict['rep_manager']
  46 + flags = cmd_dict['rep_flags']
  47 + puppet_repo = cmd_dict['puppet_repo']
  48 + puppet_pkg = cmd_dict['puppet_pkg'] % (release)
  49 +
  50 + # Download repository file
  51 + tmp_file = '/tmp/%s' % (puppet_pkg)
  52 + download(puppet_repo + puppet_pkg, tmp_file)
  53 +
  54 + # Add repository
  55 + cmd = [rep_manager, flags, tmp_file]
  56 + if subprocess.call(cmd) != 0:
  57 + print('Repository %s already set' % puppet_pkg)
  58 +
  59 +
43 60 def package_install(package):
44 61 distro, release = get_release_name()
45 62 cmd_dict = DIST_CMD[distro]
... ... @@ -53,8 +70,8 @@ def distro_update():
53 70 distro, release = get_release_name()
54 71 cmd_dict = DIST_CMD[distro]
55 72 pkg_manager = cmd_dict['pkg_manager']
56   - flags = cmd_dict['flags']
57   - cmd = [pkg_manager, 'update', flags]
  73 + flags = cmd_dict['pkg_flags']
  74 + cmd = [pkg_manager, flags, 'update']
58 75 return subprocess.call(cmd)
59 76  
60 77  
... ... @@ -157,38 +174,14 @@ def main():
157 174 distro, release = get_release_name()
158 175 print('Distro %s, release %s' % (distro, release))
159 176  
160   - if iscentos(distro):
161   - cmd = 'rpm'
162   - flags = '-ivh'
163   - url = 'http://yum.puppetlabs.com/'
164   - pkg = 'puppetlabs-release-el-%s.noarch.rpm' % (release)
165   - update = ['yum', 'update', '-y']
166   - install = ['yum', 'install', 'puppet', '-y']
167   - elif isubuntu(distro):
168   - cmd = 'dpkg'
169   - flags = '-i'
170   - url = 'https://apt.puppetlabs.com/'
171   - pkg = 'puppetlabs-release-%s.deb' % (release)
172   - update = ['apt-get', 'update', '-y']
173   - install = ['apt-get', 'install', 'puppet', '-y']
174   -
175   - else:
176   - print('This distribuition is currently not supported!')
177   - print('exiting...')
178   - exit(1)
179   -
180   - tmp_file = '/tmp/%s' % (pkg)
181   - download(url + pkg, tmp_file)
182   - args = [cmd, flags, tmp_file]
183   -
184 177 # Add repository
185   - result = subprocess.call(args)
186   - if result != 0:
187   - print('Repository %s already set' % pkg)
  178 + add_puppet_repository()
188 179  
189 180 # Install Puppet
190   - subprocess.call(update)
191   - result = subprocess.call(install)
  181 + if isubuntu(distro):
  182 + distro_update()
  183 +
  184 + result = package_install('puppet')
192 185 if result != 0:
193 186 print('Failed installing puppet')
194 187 exit(result)
... ...
puppet/pkg_resources_local.py 0 → 100644
... ... @@ -0,0 +1,2665 @@
  1 +"""Package resource API
  2 +--------------------
  3 +
  4 +A resource is a logical file contained within a package, or a logical
  5 +subdirectory thereof. The package resource API expects resource names
  6 +to have their path parts separated with ``/``, *not* whatever the local
  7 +path separator is. Do not use os.path operations to manipulate resource
  8 +names being passed into the API.
  9 +
  10 +The package resource API is designed to work with normal filesystem packages,
  11 +.egg files, and unpacked .egg files. It can also work in a limited way with
  12 +.zip files and with custom PEP 302 loaders that support the ``get_data()``
  13 +method.
  14 +"""
  15 +
  16 +import sys, os, zipimport, time, re, imp, pkgutil # XXX
  17 +
  18 +try:
  19 + frozenset
  20 +except NameError:
  21 + from sets import ImmutableSet as frozenset
  22 +
  23 +# capture these to bypass sandboxing
  24 +from os import utime, rename, unlink, mkdir
  25 +from os import open as os_open
  26 +from os.path import isdir, split
  27 +
  28 +def _bypass_ensure_directory(name, mode=0777):
  29 + # Sandbox-bypassing version of ensure_directory()
  30 + dirname, filename = split(name)
  31 + if dirname and filename and not isdir(dirname):
  32 + _bypass_ensure_directory(dirname)
  33 + mkdir(dirname, mode)
  34 +
  35 +
  36 +
  37 +
  38 +
  39 +
  40 +
  41 +
  42 +_state_vars = {}
  43 +
  44 +def _declare_state(vartype, **kw):
  45 + g = globals()
  46 + for name, val in kw.iteritems():
  47 + g[name] = val
  48 + _state_vars[name] = vartype
  49 +
  50 +def __getstate__():
  51 + state = {}
  52 + g = globals()
  53 + for k, v in _state_vars.iteritems():
  54 + state[k] = g['_sget_'+v](g[k])
  55 + return state
  56 +
  57 +def __setstate__(state):
  58 + g = globals()
  59 + for k, v in state.iteritems():
  60 + g['_sset_'+_state_vars[k]](k, g[k], v)
  61 + return state
  62 +
  63 +def _sget_dict(val):
  64 + return val.copy()
  65 +
  66 +def _sset_dict(key, ob, state):
  67 + ob.clear()
  68 + ob.update(state)
  69 +
  70 +def _sget_object(val):
  71 + return val.__getstate__()
  72 +
  73 +def _sset_object(key, ob, state):
  74 + ob.__setstate__(state)
  75 +
  76 +_sget_none = _sset_none = lambda *args: None
  77 +
  78 +
  79 +
  80 +
  81 +
  82 +
  83 +def get_supported_platform():
  84 + """Return this platform's maximum compatible version.
  85 +
  86 + distutils.util.get_platform() normally reports the minimum version
  87 + of Mac OS X that would be required to *use* extensions produced by
  88 + distutils. But what we want when checking compatibility is to know the
  89 + version of Mac OS X that we are *running*. To allow usage of packages that
  90 + explicitly require a newer version of Mac OS X, we must also know the
  91 + current version of the OS.
  92 +
  93 + If this condition occurs for any other platform with a version in its
  94 + platform strings, this function should be extended accordingly.
  95 + """
  96 + plat = get_build_platform(); m = macosVersionString.match(plat)
  97 + if m is not None and sys.platform == "darwin":
  98 + try:
  99 + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
  100 + except ValueError:
  101 + pass # not Mac OS X
  102 + return plat
  103 +
  104 +
  105 +
  106 +
  107 +
  108 +
  109 +
  110 +
  111 +
  112 +
  113 +
  114 +
  115 +
  116 +
  117 +
  118 +
  119 +
  120 +
  121 +
  122 +
  123 +
  124 +__all__ = [
  125 + # Basic resource access and distribution/entry point discovery
  126 + 'require', 'run_script', 'get_provider', 'get_distribution',
  127 + 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
  128 + 'resource_string', 'resource_stream', 'resource_filename',
  129 + 'resource_listdir', 'resource_exists', 'resource_isdir',
  130 +
  131 + # Environmental control
  132 + 'declare_namespace', 'working_set', 'add_activation_listener',
  133 + 'find_distributions', 'set_extraction_path', 'cleanup_resources',
  134 + 'get_default_cache',
  135 +
  136 + # Primary implementation classes
  137 + 'Environment', 'WorkingSet', 'ResourceManager',
  138 + 'Distribution', 'Requirement', 'EntryPoint',
  139 +
  140 + # Exceptions
  141 + 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
  142 + 'ExtractionError',
  143 +
  144 + # Parsing functions and string utilities
  145 + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
  146 + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
  147 + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
  148 +
  149 + # filesystem utilities
  150 + 'ensure_directory', 'normalize_path',
  151 +
  152 + # Distribution "precedence" constants
  153 + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
  154 +
  155 + # "Provider" interfaces, implementations, and registration/lookup APIs
  156 + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
  157 + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
  158 + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
  159 + 'register_finder', 'register_namespace_handler', 'register_loader_type',
  160 + 'fixup_namespace_packages', 'get_importer',
  161 +
  162 + # Deprecated/backward compatibility only
  163 + 'run_main', 'AvailableDistributions',
  164 +]
  165 +class ResolutionError(Exception):
  166 + """Abstract base for dependency resolution errors"""
  167 + def __repr__(self):
  168 + return self.__class__.__name__+repr(self.args)
  169 +
  170 +class VersionConflict(ResolutionError):
  171 + """An already-installed version conflicts with the requested version"""
  172 +
  173 +class DistributionNotFound(ResolutionError):
  174 + """A requested distribution was not found"""
  175 +
  176 +class UnknownExtra(ResolutionError):
  177 + """Distribution doesn't have an "extra feature" of the given name"""
  178 +
  179 +_provider_factories = {}
  180 +PY_MAJOR = sys.version[:3]
  181 +EGG_DIST = 3
  182 +BINARY_DIST = 2
  183 +SOURCE_DIST = 1
  184 +CHECKOUT_DIST = 0
  185 +DEVELOP_DIST = -1
  186 +
  187 +def register_loader_type(loader_type, provider_factory):
  188 + """Register `provider_factory` to make providers for `loader_type`
  189 +
  190 + `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
  191 + and `provider_factory` is a function that, passed a *module* object,
  192 + returns an ``IResourceProvider`` for that module.
  193 + """
  194 + _provider_factories[loader_type] = provider_factory
  195 +
  196 +def get_provider(moduleOrReq):
  197 + """Return an IResourceProvider for the named module or requirement"""
  198 + if isinstance(moduleOrReq,Requirement):
  199 + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
  200 + try:
  201 + module = sys.modules[moduleOrReq]
  202 + except KeyError:
  203 + __import__(moduleOrReq)
  204 + module = sys.modules[moduleOrReq]
  205 + loader = getattr(module, '__loader__', None)
  206 + return _find_adapter(_provider_factories, loader)(module)
  207 +
  208 +def _macosx_vers(_cache=[]):
  209 + if not _cache:
  210 + from platform import mac_ver
  211 + _cache.append(mac_ver()[0].split('.'))
  212 + return _cache[0]
  213 +
  214 +def _macosx_arch(machine):
  215 + return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
  216 +
  217 +def get_build_platform():
  218 + """Return this platform's string for platform-specific distributions
  219 +
  220 + XXX Currently this is the same as ``distutils.util.get_platform()``, but it
  221 + needs some hacks for Linux and Mac OS X.
  222 + """
  223 + from distutils.util import get_platform
  224 + plat = get_platform()
  225 + if sys.platform == "darwin" and not plat.startswith('macosx-'):
  226 + try:
  227 + version = _macosx_vers()
  228 + machine = os.uname()[4].replace(" ", "_")
  229 + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
  230 + _macosx_arch(machine))
  231 + except ValueError:
  232 + # if someone is running a non-Mac darwin system, this will fall
  233 + # through to the default implementation
  234 + pass
  235 + return plat
  236 +
  237 +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
  238 +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
  239 +get_platform = get_build_platform # XXX backward compat
  240 +
  241 +
  242 +
  243 +
  244 +
  245 +
  246 +
  247 +def compatible_platforms(provided,required):
  248 + """Can code for the `provided` platform run on the `required` platform?
  249 +
  250 + Returns true if either platform is ``None``, or the platforms are equal.
  251 +
  252 + XXX Needs compatibility checks for Linux and other unixy OSes.
  253 + """
  254 + if provided is None or required is None or provided==required:
  255 + return True # easy case
  256 +
  257 + # Mac OS X special cases
  258 + reqMac = macosVersionString.match(required)
  259 + if reqMac:
  260 + provMac = macosVersionString.match(provided)
  261 +
  262 + # is this a Mac package?
  263 + if not provMac:
  264 + # this is backwards compatibility for packages built before
  265 + # setuptools 0.6. All packages built after this point will
  266 + # use the new macosx designation.
  267 + provDarwin = darwinVersionString.match(provided)
  268 + if provDarwin:
  269 + dversion = int(provDarwin.group(1))
  270 + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
  271 + if dversion == 7 and macosversion >= "10.3" or \
  272 + dversion == 8 and macosversion >= "10.4":
  273 +
  274 + #import warnings
  275 + #warnings.warn("Mac eggs should be rebuilt to "
  276 + # "use the macosx designation instead of darwin.",
  277 + # category=DeprecationWarning)
  278 + return True
  279 + return False # egg isn't macosx or legacy darwin
  280 +
  281 + # are they the same major version and machine type?
  282 + if provMac.group(1) != reqMac.group(1) or \
  283 + provMac.group(3) != reqMac.group(3):
  284 + return False
  285 +
  286 +
  287 +
  288 + # is the required OS major update >= the provided one?
  289 + if int(provMac.group(2)) > int(reqMac.group(2)):
  290 + return False
  291 +
  292 + return True
  293 +
  294 + # XXX Linux and other platforms' special cases should go here
  295 + return False
  296 +
  297 +
  298 +def run_script(dist_spec, script_name):
  299 + """Locate distribution `dist_spec` and run its `script_name` script"""
  300 + ns = sys._getframe(1).f_globals
  301 + name = ns['__name__']
  302 + ns.clear()
  303 + ns['__name__'] = name
  304 + require(dist_spec)[0].run_script(script_name, ns)
  305 +
  306 +run_main = run_script # backward compatibility
  307 +
  308 +def get_distribution(dist):
  309 + """Return a current distribution object for a Requirement or string"""
  310 + if isinstance(dist,basestring): dist = Requirement.parse(dist)
  311 + if isinstance(dist,Requirement): dist = get_provider(dist)
  312 + if not isinstance(dist,Distribution):
  313 + raise TypeError("Expected string, Requirement, or Distribution", dist)
  314 + return dist
  315 +
  316 +def load_entry_point(dist, group, name):
  317 + """Return `name` entry point of `group` for `dist` or raise ImportError"""
  318 + return get_distribution(dist).load_entry_point(group, name)
  319 +
  320 +def get_entry_map(dist, group=None):
  321 + """Return the entry point map for `group`, or the full entry map"""
  322 + return get_distribution(dist).get_entry_map(group)
  323 +
  324 +def get_entry_info(dist, group, name):
  325 + """Return the EntryPoint object for `group`+`name`, or ``None``"""
  326 + return get_distribution(dist).get_entry_info(group, name)
  327 +
  328 +
  329 +try:
  330 + from pkgutil import get_importer
  331 +except ImportError:
  332 + import _pkgutil as pkgutil
  333 + get_importer = pkgutil.get_importer
  334 +else:
  335 + import pkgutil
  336 +
  337 +
  338 +class IMetadataProvider:
  339 +
  340 + def has_metadata(name):
  341 + """Does the package's distribution contain the named metadata?"""
  342 +
  343 + def get_metadata(name):
  344 + """The named metadata resource as a string"""
  345 +
  346 + def get_metadata_lines(name):
  347 + """Yield named metadata resource as list of non-blank non-comment lines
  348 +
  349 + Leading and trailing whitespace is stripped from each line, and lines
  350 + with ``#`` as the first non-blank character are omitted."""
  351 +
  352 + def metadata_isdir(name):
  353 + """Is the named metadata a directory? (like ``os.path.isdir()``)"""
  354 +
  355 + def metadata_listdir(name):
  356 + """List of metadata names in the directory (like ``os.listdir()``)"""
  357 +
  358 + def run_script(script_name, namespace):
  359 + """Execute the named script in the supplied namespace dictionary"""
  360 +
  361 +
  362 +
  363 +
  364 +
  365 +
  366 +
  367 +
  368 +
  369 +
  370 +class IResourceProvider(IMetadataProvider):
  371 + """An object that provides access to package resources"""
  372 +
  373 + def get_resource_filename(manager, resource_name):
  374 + """Return a true filesystem path for `resource_name`
  375 +
  376 + `manager` must be an ``IResourceManager``"""
  377 +
  378 + def get_resource_stream(manager, resource_name):
  379 + """Return a readable file-like object for `resource_name`
  380 +
  381 + `manager` must be an ``IResourceManager``"""
  382 +
  383 + def get_resource_string(manager, resource_name):
  384 + """Return a string containing the contents of `resource_name`
  385 +
  386 + `manager` must be an ``IResourceManager``"""
  387 +
  388 + def has_resource(resource_name):
  389 + """Does the package contain the named resource?"""
  390 +
  391 + def resource_isdir(resource_name):
  392 + """Is the named resource a directory? (like ``os.path.isdir()``)"""
  393 +
  394 + def resource_listdir(resource_name):
  395 + """List of resource names in the directory (like ``os.listdir()``)"""
  396 +
  397 +
  398 +
  399 +
  400 +
  401 +
  402 +
  403 +
  404 +
  405 +
  406 +
  407 +
  408 +
  409 +
  410 +
  411 +class WorkingSet(object):
  412 + """A collection of active distributions on sys.path (or a similar list)"""
  413 +
  414 + def __init__(self, entries=None):
  415 + """Create working set from list of path entries (default=sys.path)"""
  416 + self.entries = []
  417 + self.entry_keys = {}
  418 + self.by_key = {}
  419 + self.callbacks = []
  420 +
  421 + if entries is None:
  422 + entries = sys.path
  423 +
  424 + for entry in entries:
  425 + self.add_entry(entry)
  426 +
  427 +
  428 + def add_entry(self, entry):
  429 + """Add a path item to ``.entries``, finding any distributions on it
  430 +
  431 + ``find_distributions(entry, True)`` is used to find distributions
  432 + corresponding to the path entry, and they are added. `entry` is
  433 + always appended to ``.entries``, even if it is already present.
  434 + (This is because ``sys.path`` can contain the same value more than
  435 + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
  436 + equal ``sys.path``.)
  437 + """
  438 + self.entry_keys.setdefault(entry, [])
  439 + self.entries.append(entry)
  440 + for dist in find_distributions(entry, True):
  441 + self.add(dist, entry, False)
  442 +
  443 +
  444 + def __contains__(self,dist):
  445 + """True if `dist` is the active distribution for its project"""
  446 + return self.by_key.get(dist.key) == dist
  447 +
  448 +
  449 +
  450 +
  451 +
  452 + def find(self, req):
  453 + """Find a distribution matching requirement `req`
  454 +
  455 + If there is an active distribution for the requested project, this
  456 + returns it as long as it meets the version requirement specified by
  457 + `req`. But, if there is an active distribution for the project and it
  458 + does *not* meet the `req` requirement, ``VersionConflict`` is raised.
  459 + If there is no active distribution for the requested project, ``None``
  460 + is returned.
  461 + """
  462 + dist = self.by_key.get(req.key)
  463 + if dist is not None and dist not in req:
  464 + raise VersionConflict(dist,req) # XXX add more info
  465 + else:
  466 + return dist
  467 +
  468 + def iter_entry_points(self, group, name=None):
  469 + """Yield entry point objects from `group` matching `name`
  470 +
  471 + If `name` is None, yields all entry points in `group` from all
  472 + distributions in the working set, otherwise only ones matching
  473 + both `group` and `name` are yielded (in distribution order).
  474 + """
  475 + for dist in self:
  476 + entries = dist.get_entry_map(group)
  477 + if name is None:
  478 + for ep in entries.values():
  479 + yield ep
  480 + elif name in entries:
  481 + yield entries[name]
  482 +
  483 + def run_script(self, requires, script_name):
  484 + """Locate distribution for `requires` and run `script_name` script"""
  485 + ns = sys._getframe(1).f_globals
  486 + name = ns['__name__']
  487 + ns.clear()
  488 + ns['__name__'] = name
  489 + self.require(requires)[0].run_script(script_name, ns)
  490 +
  491 +
  492 +
  493 + def __iter__(self):
  494 + """Yield distributions for non-duplicate projects in the working set
  495 +
  496 + The yield order is the order in which the items' path entries were
  497 + added to the working set.
  498 + """
  499 + seen = {}
  500 + for item in self.entries:
  501 + for key in self.entry_keys[item]:
  502 + if key not in seen:
  503 + seen[key]=1
  504 + yield self.by_key[key]
  505 +
  506 + def add(self, dist, entry=None, insert=True):
  507 + """Add `dist` to working set, associated with `entry`
  508 +
  509 + If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
  510 + On exit from this routine, `entry` is added to the end of the working
  511 + set's ``.entries`` (if it wasn't already present).
  512 +
  513 + `dist` is only added to the working set if it's for a project that
  514 + doesn't already have a distribution in the set. If it's added, any
  515 + callbacks registered with the ``subscribe()`` method will be called.
  516 + """
  517 + if insert:
  518 + dist.insert_on(self.entries, entry)
  519 +
  520 + if entry is None:
  521 + entry = dist.location
  522 + keys = self.entry_keys.setdefault(entry,[])
  523 + keys2 = self.entry_keys.setdefault(dist.location,[])
  524 + if dist.key in self.by_key:
  525 + return # ignore hidden distros
  526 +
  527 + self.by_key[dist.key] = dist
  528 + if dist.key not in keys:
  529 + keys.append(dist.key)
  530 + if dist.key not in keys2:
  531 + keys2.append(dist.key)
  532 + self._added_new(dist)
  533 +
  534 + def resolve(self, requirements, env=None, installer=None):
  535 + """List all distributions needed to (recursively) meet `requirements`
  536 +
  537 + `requirements` must be a sequence of ``Requirement`` objects. `env`,
  538 + if supplied, should be an ``Environment`` instance. If
  539 + not supplied, it defaults to all distributions available within any
  540 + entry or distribution in the working set. `installer`, if supplied,
  541 + will be invoked with each requirement that cannot be met by an
  542 + already-installed distribution; it should return a ``Distribution`` or
  543 + ``None``.
  544 + """
  545 +
  546 + requirements = list(requirements)[::-1] # set up the stack
  547 + processed = {} # set of processed requirements
  548 + best = {} # key -> dist
  549 + to_activate = []
  550 +
  551 + while requirements:
  552 + req = requirements.pop(0) # process dependencies breadth-first
  553 + if req in processed:
  554 + # Ignore cyclic or redundant dependencies
  555 + continue
  556 + dist = best.get(req.key)
  557 + if dist is None:
  558 + # Find the best distribution and add it to the map
  559 + dist = self.by_key.get(req.key)
  560 + if dist is None:
  561 + if env is None:
  562 + env = Environment(self.entries)
  563 + dist = best[req.key] = env.best_match(req, self, installer)
  564 + if dist is None:
  565 + raise DistributionNotFound(req) # XXX put more info here
  566 + to_activate.append(dist)
  567 + if dist not in req:
  568 + # Oops, the "best" so far conflicts with a dependency
  569 + raise VersionConflict(dist,req) # XXX put more info here
  570 + requirements.extend(dist.requires(req.extras)[::-1])
  571 + processed[req] = True
  572 +
  573 + return to_activate # return list of distros to activate
  574 +
  575 + def find_plugins(self,
  576 + plugin_env, full_env=None, installer=None, fallback=True
  577 + ):
  578 + """Find all activatable distributions in `plugin_env`
  579 +
  580 + Example usage::
  581 +
  582 + distributions, errors = working_set.find_plugins(
  583 + Environment(plugin_dirlist)
  584 + )
  585 + map(working_set.add, distributions) # add plugins+libs to sys.path
  586 + print "Couldn't load", errors # display errors
  587 +
  588 + The `plugin_env` should be an ``Environment`` instance that contains
  589 + only distributions that are in the project's "plugin directory" or
  590 + directories. The `full_env`, if supplied, should be an ``Environment``
  591 + contains all currently-available distributions. If `full_env` is not
  592 + supplied, one is created automatically from the ``WorkingSet`` this
  593 + method is called on, which will typically mean that every directory on
  594 + ``sys.path`` will be scanned for distributions.
  595 +
  596 + `installer` is a standard installer callback as used by the
  597 + ``resolve()`` method. The `fallback` flag indicates whether we should
  598 + attempt to resolve older versions of a plugin if the newest version
  599 + cannot be resolved.
  600 +
  601 + This method returns a 2-tuple: (`distributions`, `error_info`), where
  602 + `distributions` is a list of the distributions found in `plugin_env`
  603 + that were loadable, along with any other distributions that are needed
  604 + to resolve their dependencies. `error_info` is a dictionary mapping
  605 + unloadable plugin distributions to an exception instance describing the
  606 + error that occurred. Usually this will be a ``DistributionNotFound`` or
  607 + ``VersionConflict`` instance.
  608 + """
  609 +
  610 + plugin_projects = list(plugin_env)
  611 + plugin_projects.sort() # scan project names in alphabetic order
  612 +
  613 + error_info = {}
  614 + distributions = {}
  615 +
  616 + if full_env is None:
  617 + env = Environment(self.entries)
  618 + env += plugin_env
  619 + else:
  620 + env = full_env + plugin_env
  621 +
  622 + shadow_set = self.__class__([])
  623 + map(shadow_set.add, self) # put all our entries in shadow_set
  624 +
  625 + for project_name in plugin_projects:
  626 +
  627 + for dist in plugin_env[project_name]:
  628 +
  629 + req = [dist.as_requirement()]
  630 +
  631 + try:
  632 + resolvees = shadow_set.resolve(req, env, installer)
  633 +
  634 + except ResolutionError,v:
  635 + error_info[dist] = v # save error info
  636 + if fallback:
  637 + continue # try the next older version of project
  638 + else:
  639 + break # give up on this project, keep going
  640 +
  641 + else:
  642 + map(shadow_set.add, resolvees)
  643 + distributions.update(dict.fromkeys(resolvees))
  644 +
  645 + # success, no need to try any more versions of this project
  646 + break
  647 +
  648 + distributions = list(distributions)
  649 + distributions.sort()
  650 +
  651 + return distributions, error_info
  652 +
  653 +
  654 +
  655 +
  656 +
  657 + def require(self, *requirements):
  658 + """Ensure that distributions matching `requirements` are activated
  659 +
  660 + `requirements` must be a string or a (possibly-nested) sequence
  661 + thereof, specifying the distributions and versions required. The
  662 + return value is a sequence of the distributions that needed to be
  663 + activated to fulfill the requirements; all relevant distributions are
  664 + included, even if they were already activated in this working set.
  665 + """
  666 + needed = self.resolve(parse_requirements(requirements))
  667 +
  668 + for dist in needed:
  669 + self.add(dist)
  670 +
  671 + return needed
  672 +
  673 + def subscribe(self, callback):
  674 + """Invoke `callback` for all distributions (including existing ones)"""
  675 + if callback in self.callbacks:
  676 + return
  677 + self.callbacks.append(callback)
  678 + for dist in self:
  679 + callback(dist)
  680 +
  681 + def _added_new(self, dist):
  682 + for callback in self.callbacks:
  683 + callback(dist)
  684 +
  685 + def __getstate__(self):
  686 + return (
  687 + self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
  688 + self.callbacks[:]
  689 + )
  690 +
  691 + def __setstate__(self, (entries, keys, by_key, callbacks)):
  692 + self.entries = entries[:]
  693 + self.entry_keys = keys.copy()
  694 + self.by_key = by_key.copy()
  695 + self.callbacks = callbacks[:]
  696 +
  697 +
  698 +class Environment(object):
  699 + """Searchable snapshot of distributions on a search path"""
  700 +
  701 + def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
  702 + """Snapshot distributions available on a search path
  703 +
  704 + Any distributions found on `search_path` are added to the environment.
  705 + `search_path` should be a sequence of ``sys.path`` items. If not
  706 + supplied, ``sys.path`` is used.
  707 +
  708 + `platform` is an optional string specifying the name of the platform
  709 + that platform-specific distributions must be compatible with. If
  710 + unspecified, it defaults to the current platform. `python` is an
  711 + optional string naming the desired version of Python (e.g. ``'2.4'``);
  712 + it defaults to the current version.
  713 +
  714 + You may explicitly set `platform` (and/or `python`) to ``None`` if you
  715 + wish to map *all* distributions, not just those compatible with the
  716 + running platform or Python version.
  717 + """
  718 + self._distmap = {}
  719 + self._cache = {}
  720 + self.platform = platform
  721 + self.python = python
  722 + self.scan(search_path)
  723 +
  724 + def can_add(self, dist):
  725 + """Is distribution `dist` acceptable for this environment?
  726 +
  727 + The distribution must match the platform and python version
  728 + requirements specified when this environment was created, or False
  729 + is returned.
  730 + """
  731 + return (self.python is None or dist.py_version is None
  732 + or dist.py_version==self.python) \
  733 + and compatible_platforms(dist.platform,self.platform)
  734 +
  735 + def remove(self, dist):
  736 + """Remove `dist` from the environment"""
  737 + self._distmap[dist.key].remove(dist)
  738 +
  739 + def scan(self, search_path=None):
  740 + """Scan `search_path` for distributions usable in this environment
  741 +
  742 + Any distributions found are added to the environment.
  743 + `search_path` should be a sequence of ``sys.path`` items. If not
  744 + supplied, ``sys.path`` is used. Only distributions conforming to
  745 + the platform/python version defined at initialization are added.
  746 + """
  747 + if search_path is None:
  748 + search_path = sys.path
  749 +
  750 + for item in search_path:
  751 + for dist in find_distributions(item):
  752 + self.add(dist)
  753 +
  754 + def __getitem__(self,project_name):
  755 + """Return a newest-to-oldest list of distributions for `project_name`
  756 + """
  757 + try:
  758 + return self._cache[project_name]
  759 + except KeyError:
  760 + project_name = project_name.lower()
  761 + if project_name not in self._distmap:
  762 + return []
  763 +
  764 + if project_name not in self._cache:
  765 + dists = self._cache[project_name] = self._distmap[project_name]
  766 + _sort_dists(dists)
  767 +
  768 + return self._cache[project_name]
  769 +
  770 + def add(self,dist):
  771 + """Add `dist` if we ``can_add()`` it and it isn't already added"""
  772 + if self.can_add(dist) and dist.has_version():
  773 + dists = self._distmap.setdefault(dist.key,[])
  774 + if dist not in dists:
  775 + dists.append(dist)
  776 + if dist.key in self._cache:
  777 + _sort_dists(self._cache[dist.key])
  778 +
  779 +
  780 + def best_match(self, req, working_set, installer=None):
  781 + """Find distribution best matching `req` and usable on `working_set`
  782 +
  783 + This calls the ``find(req)`` method of the `working_set` to see if a
  784 + suitable distribution is already active. (This may raise
  785 + ``VersionConflict`` if an unsuitable version of the project is already
  786 + active in the specified `working_set`.) If a suitable distribution
  787 + isn't active, this method returns the newest distribution in the
  788 + environment that meets the ``Requirement`` in `req`. If no suitable
  789 + distribution is found, and `installer` is supplied, then the result of
  790 + calling the environment's ``obtain(req, installer)`` method will be
  791 + returned.
  792 + """
  793 + dist = working_set.find(req)
  794 + if dist is not None:
  795 + return dist
  796 + for dist in self[req.key]:
  797 + if dist in req:
  798 + return dist
  799 + return self.obtain(req, installer) # try and download/install
  800 +
  801 + def obtain(self, requirement, installer=None):
  802 + """Obtain a distribution matching `requirement` (e.g. via download)
  803 +
  804 + Obtain a distro that matches requirement (e.g. via download). In the
  805 + base ``Environment`` class, this routine just returns
  806 + ``installer(requirement)``, unless `installer` is None, in which case
  807 + None is returned instead. This method is a hook that allows subclasses
  808 + to attempt other ways of obtaining a distribution before falling back
  809 + to the `installer` argument."""
  810 + if installer is not None:
  811 + return installer(requirement)
  812 +
  813 + def __iter__(self):
  814 + """Yield the unique project names of the available distributions"""
  815 + for key in self._distmap.keys():
  816 + if self[key]: yield key
  817 +
  818 +
  819 +
  820 +
  821 + def __iadd__(self, other):
  822 + """In-place addition of a distribution or environment"""
  823 + if isinstance(other,Distribution):
  824 + self.add(other)
  825 + elif isinstance(other,Environment):
  826 + for project in other:
  827 + for dist in other[project]:
  828 + self.add(dist)
  829 + else:
  830 + raise TypeError("Can't add %r to environment" % (other,))
  831 + return self
  832 +
  833 + def __add__(self, other):
  834 + """Add an environment or distribution to an environment"""
  835 + new = self.__class__([], platform=None, python=None)
  836 + for env in self, other:
  837 + new += env
  838 + return new
  839 +
  840 +
  841 +AvailableDistributions = Environment # XXX backward compatibility
  842 +
  843 +
  844 +class ExtractionError(RuntimeError):
  845 + """An error occurred extracting a resource
  846 +
  847 + The following attributes are available from instances of this exception:
  848 +
  849 + manager
  850 + The resource manager that raised this exception
  851 +
  852 + cache_path
  853 + The base directory for resource extraction
  854 +
  855 + original_error
  856 + The exception instance that caused extraction to fail
  857 + """
  858 +
  859 +
  860 +
  861 +
  862 +class ResourceManager:
  863 + """Manage resource extraction and packages"""
  864 + extraction_path = None
  865 +
  866 + def __init__(self):
  867 + self.cached_files = {}
  868 +
  869 + def resource_exists(self, package_or_requirement, resource_name):
  870 + """Does the named resource exist?"""
  871 + return get_provider(package_or_requirement).has_resource(resource_name)
  872 +
  873 + def resource_isdir(self, package_or_requirement, resource_name):
  874 + """Is the named resource an existing directory?"""
  875 + return get_provider(package_or_requirement).resource_isdir(
  876 + resource_name
  877 + )
  878 +
  879 + def resource_filename(self, package_or_requirement, resource_name):
  880 + """Return a true filesystem path for specified resource"""
  881 + return get_provider(package_or_requirement).get_resource_filename(
  882 + self, resource_name
  883 + )
  884 +
  885 + def resource_stream(self, package_or_requirement, resource_name):
  886 + """Return a readable file-like object for specified resource"""
  887 + return get_provider(package_or_requirement).get_resource_stream(
  888 + self, resource_name
  889 + )
  890 +
  891 + def resource_string(self, package_or_requirement, resource_name):
  892 + """Return specified resource as a string"""
  893 + return get_provider(package_or_requirement).get_resource_string(
  894 + self, resource_name
  895 + )
  896 +
  897 + def resource_listdir(self, package_or_requirement, resource_name):
  898 + """List the contents of the named resource directory"""
  899 + return get_provider(package_or_requirement).resource_listdir(
  900 + resource_name
  901 + )
  902 +
  903 + def extraction_error(self):
  904 + """Give an error message for problems extracting file(s)"""
  905 +
  906 + old_exc = sys.exc_info()[1]
  907 + cache_path = self.extraction_path or get_default_cache()
  908 +
  909 + err = ExtractionError("""Can't extract file(s) to egg cache
  910 +
  911 +The following error occurred while trying to extract file(s) to the Python egg
  912 +cache:
  913 +
  914 + %s
  915 +
  916 +The Python egg cache directory is currently set to:
  917 +
  918 + %s
  919 +
  920 +Perhaps your account does not have write access to this directory? You can
  921 +change the cache directory by setting the PYTHON_EGG_CACHE environment
  922 +variable to point to an accessible directory.
  923 +""" % (old_exc, cache_path)
  924 + )
  925 + err.manager = self
  926 + err.cache_path = cache_path
  927 + err.original_error = old_exc
  928 + raise err
  929 +
  930 +
  931 +
  932 +
  933 +
  934 +
  935 +
  936 +
  937 +
  938 +
  939 +
  940 +
  941 +
  942 +
  943 +
  944 + def get_cache_path(self, archive_name, names=()):
  945 + """Return absolute location in cache for `archive_name` and `names`
  946 +
  947 + The parent directory of the resulting path will be created if it does
  948 + not already exist. `archive_name` should be the base filename of the
  949 + enclosing egg (which may not be the name of the enclosing zipfile!),
  950 + including its ".egg" extension. `names`, if provided, should be a
  951 + sequence of path name parts "under" the egg's extraction location.
  952 +
  953 + This method should only be called by resource providers that need to
  954 + obtain an extraction location, and only for names they intend to
  955 + extract, as it tracks the generated names for possible cleanup later.
  956 + """
  957 + extract_path = self.extraction_path or get_default_cache()
  958 + target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
  959 + try:
  960 + _bypass_ensure_directory(target_path)
  961 + except:
  962 + self.extraction_error()
  963 +
  964 + self.cached_files[target_path] = 1
  965 + return target_path
  966 +
  967 +
  968 +
  969 +
  970 +
  971 +
  972 +
  973 +
  974 +
  975 +
  976 +
  977 +
  978 +
  979 +
  980 +
  981 +
  982 +
  983 +
  984 +
  985 + def postprocess(self, tempname, filename):
  986 + """Perform any platform-specific postprocessing of `tempname`
  987 +
  988 + This is where Mac header rewrites should be done; other platforms don't
  989 + have anything special they should do.
  990 +
  991 + Resource providers should call this method ONLY after successfully
  992 + extracting a compressed resource. They must NOT call it on resources
  993 + that are already in the filesystem.
  994 +
  995 + `tempname` is the current (temporary) name of the file, and `filename`
  996 + is the name it will be renamed to by the caller after this routine
  997 + returns.
  998 + """
  999 +
  1000 + if os.name == 'posix':
  1001 + # Make the resource executable
  1002 + mode = ((os.stat(tempname).st_mode) | 0555) & 07777
  1003 + os.chmod(tempname, mode)
  1004 +
  1005 +
  1006 +
  1007 +
  1008 +
  1009 +
  1010 +
  1011 +
  1012 +
  1013 +
  1014 +
  1015 +
  1016 +
  1017 +
  1018 +
  1019 +
  1020 +
  1021 +
  1022 +
  1023 +
  1024 +
  1025 +
  1026 + def set_extraction_path(self, path):
  1027 + """Set the base path where resources will be extracted to, if needed.
  1028 +
  1029 + If you do not call this routine before any extractions take place, the
  1030 + path defaults to the return value of ``get_default_cache()``. (Which
  1031 + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
  1032 + platform-specific fallbacks. See that routine's documentation for more
  1033 + details.)
  1034 +
  1035 + Resources are extracted to subdirectories of this path based upon
  1036 + information given by the ``IResourceProvider``. You may set this to a
  1037 + temporary directory, but then you must call ``cleanup_resources()`` to
  1038 + delete the extracted files when done. There is no guarantee that
  1039 + ``cleanup_resources()`` will be able to remove all extracted files.
  1040 +
  1041 + (Note: you may not change the extraction path for a given resource
  1042 + manager once resources have been extracted, unless you first call
  1043 + ``cleanup_resources()``.)
  1044 + """
  1045 + if self.cached_files:
  1046 + raise ValueError(
  1047 + "Can't change extraction path, files already extracted"
  1048 + )
  1049 +
  1050 + self.extraction_path = path
  1051 +
  1052 + def cleanup_resources(self, force=False):
  1053 + """
  1054 + Delete all extracted resource files and directories, returning a list
  1055 + of the file and directory names that could not be successfully removed.
  1056 + This function does not have any concurrency protection, so it should
  1057 + generally only be called when the extraction path is a temporary
  1058 + directory exclusive to a single process. This method is not
  1059 + automatically called; you must call it explicitly or register it as an
  1060 + ``atexit`` function if you wish to ensure cleanup of a temporary
  1061 + directory used for extractions.
  1062 + """
  1063 + # XXX
  1064 +
  1065 +
  1066 +
  1067 +def get_default_cache():
  1068 + """Determine the default cache location
  1069 +
  1070 + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
  1071 + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
  1072 + "Application Data" directory. On all other systems, it's "~/.python-eggs".
  1073 + """
  1074 + try:
  1075 + return os.environ['PYTHON_EGG_CACHE']
  1076 + except KeyError:
  1077 + pass
  1078 +
  1079 + if os.name!='nt':
  1080 + return os.path.expanduser('~/.python-eggs')
  1081 +
  1082 + app_data = 'Application Data' # XXX this may be locale-specific!
  1083 + app_homes = [
  1084 + (('APPDATA',), None), # best option, should be locale-safe
  1085 + (('USERPROFILE',), app_data),
  1086 + (('HOMEDRIVE','HOMEPATH'), app_data),
  1087 + (('HOMEPATH',), app_data),
  1088 + (('HOME',), None),
  1089 + (('WINDIR',), app_data), # 95/98/ME
  1090 + ]
  1091 +
  1092 + for keys, subdir in app_homes:
  1093 + dirname = ''
  1094 + for key in keys:
  1095 + if key in os.environ:
  1096 + dirname = os.path.join(dirname, os.environ[key])
  1097 + else:
  1098 + break
  1099 + else:
  1100 + if subdir:
  1101 + dirname = os.path.join(dirname,subdir)
  1102 + return os.path.join(dirname, 'Python-Eggs')
  1103 + else:
  1104 + raise RuntimeError(
  1105 + "Please set the PYTHON_EGG_CACHE enviroment variable"
  1106 + )
  1107 +
  1108 +def safe_name(name):
  1109 + """Convert an arbitrary string to a standard distribution name
  1110 +
  1111 + Any runs of non-alphanumeric/. characters are replaced with a single '-'.
  1112 + """
  1113 + return re.sub('[^A-Za-z0-9.]+', '-', name)
  1114 +
  1115 +
  1116 +def safe_version(version):
  1117 + """Convert an arbitrary string to a standard version string
  1118 +
  1119 + Spaces become dots, and all other non-alphanumeric characters become
  1120 + dashes, with runs of multiple dashes condensed to a single dash.
  1121 + """
  1122 + version = version.replace(' ','.')
  1123 + return re.sub('[^A-Za-z0-9.]+', '-', version)
  1124 +
  1125 +
  1126 +def safe_extra(extra):
  1127 + """Convert an arbitrary string to a standard 'extra' name
  1128 +
  1129 + Any runs of non-alphanumeric characters are replaced with a single '_',
  1130 + and the result is always lowercased.
  1131 + """
  1132 + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
  1133 +
  1134 +
  1135 +def to_filename(name):
  1136 + """Convert a project or version name to its filename-escaped form
  1137 +
  1138 + Any '-' characters are currently replaced with '_'.
  1139 + """
  1140 + return name.replace('-','_')
  1141 +
  1142 +
  1143 +
  1144 +
  1145 +
  1146 +
  1147 +
  1148 +
  1149 +_marker_names = {
  1150 + 'os': ['name'], 'sys': ['platform'],
  1151 + 'platform': ['version','machine','python_implementation'],
  1152 + 'python_version': [], 'python_full_version': [], 'extra':[],
  1153 +}
  1154 +
  1155 +_marker_values = {
  1156 + 'os_name': lambda: os.name,
  1157 + 'sys_platform': lambda: sys.platform,
  1158 + 'python_full_version': lambda: sys.version.split()[0],
  1159 + 'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]),
  1160 + 'platform_version': lambda: _platinfo('version'),
  1161 + 'platform_machine': lambda: _platinfo('machine'),
  1162 + 'python_implementation': lambda: _platinfo('python_implementation') or _pyimp(),
  1163 +}
  1164 +
  1165 +def _platinfo(attr):
  1166 + try:
  1167 + import platform
  1168 + except ImportError:
  1169 + return ''
  1170 + return getattr(platform, attr, lambda:'')()
  1171 +
  1172 +def _pyimp():
  1173 + if sys.platform=='cli':
  1174 + return 'IronPython'
  1175 + elif sys.platform.startswith('java'):
  1176 + return 'Jython'
  1177 + elif '__pypy__' in sys.builtin_module_names:
  1178 + return 'PyPy'
  1179 + else:
  1180 + return 'CPython'
  1181 +
  1182 +def invalid_marker(text):
  1183 + """Validate text as a PEP 426 environment marker; return exception or False"""
  1184 + try:
  1185 + evaluate_marker(text)
  1186 + except SyntaxError:
  1187 + return sys.exc_info()[1]
  1188 + return False
  1189 +
  1190 +def evaluate_marker(text, extra=None, _ops={}):
  1191 + """Evaluate a PEP 426 environment marker; SyntaxError if marker is invalid"""
  1192 +
  1193 + if not _ops:
  1194 +
  1195 + from token import NAME, STRING
  1196 + import token, symbol, operator
  1197 +
  1198 + def and_test(nodelist):
  1199 + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
  1200 + return reduce(operator.and_, [interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
  1201 +
  1202 + def test(nodelist):
  1203 + # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
  1204 + return reduce(operator.or_, [interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
  1205 +
  1206 + def atom(nodelist):
  1207 + t = nodelist[1][0]
  1208 + if t == token.LPAR:
  1209 + if nodelist[2][0] == token.RPAR:
  1210 + raise SyntaxError("Empty parentheses")
  1211 + return interpret(nodelist[2])
  1212 + raise SyntaxError("Language feature not supported in environment markers")
  1213 +
  1214 + def comparison(nodelist):
  1215 + if len(nodelist)>4:
  1216 + raise SyntaxError("Chained comparison not allowed in environment markers")
  1217 + comp = nodelist[2][1]
  1218 + cop = comp[1]
  1219 + if comp[0] == NAME:
  1220 + if len(nodelist[2]) == 3:
  1221 + if cop == 'not':
  1222 + cop = 'not in'
  1223 + else:
  1224 + cop = 'is not'
  1225 + try:
  1226 + cop = _ops[cop]
  1227 + except KeyError:
  1228 + raise SyntaxError(repr(cop)+" operator not allowed in environment markers")
  1229 + return cop(evaluate(nodelist[1]), evaluate(nodelist[3]))
  1230 +
  1231 + _ops.update({
  1232 + symbol.test: test, symbol.and_test: and_test, symbol.atom: atom,
  1233 + symbol.comparison: comparison, 'not in': lambda x,y: x not in y,
  1234 + 'in': lambda x,y: x in y, '==': operator.eq, '!=': operator.ne,
  1235 + })
  1236 + if hasattr(symbol,'or_test'):
  1237 + _ops[symbol.or_test] = test
  1238 +
  1239 + def interpret(nodelist):
  1240 + while len(nodelist)==2: nodelist = nodelist[1]
  1241 + try:
  1242 + op = _ops[nodelist[0]]
  1243 + except KeyError:
  1244 + raise SyntaxError("Comparison or logical expression expected")
  1245 + raise SyntaxError("Language feature not supported in environment markers: "+symbol.sym_name[nodelist[0]])
  1246 + return op(nodelist)
  1247 +
  1248 + def evaluate(nodelist):
  1249 + while len(nodelist)==2: nodelist = nodelist[1]
  1250 + kind = nodelist[0]
  1251 + name = nodelist[1]
  1252 + #while len(name)==2: name = name[1]
  1253 + if kind==NAME:
  1254 + try:
  1255 + op = _marker_values[name]
  1256 + except KeyError:
  1257 + raise SyntaxError("Unknown name %r" % name)
  1258 + return op()
  1259 + if kind==STRING:
  1260 + s = nodelist[1]
  1261 + if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \
  1262 + or '\\' in s:
  1263 + raise SyntaxError(
  1264 + "Only plain strings allowed in environment markers")
  1265 + return s[1:-1]
  1266 + raise SyntaxError("Language feature not supported in environment markers")
  1267 +
  1268 + import parser
  1269 + return interpret(parser.expr(text).totuple(1)[1])
  1270 +
  1271 +
  1272 +class NullProvider:
  1273 + """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
  1274 +
  1275 + egg_name = None
  1276 + egg_info = None
  1277 + loader = None
  1278 +
  1279 + def __init__(self, module):
  1280 + self.loader = getattr(module, '__loader__', None)
  1281 + self.module_path = os.path.dirname(getattr(module, '__file__', ''))
  1282 +
  1283 + def get_resource_filename(self, manager, resource_name):
  1284 + return self._fn(self.module_path, resource_name)
  1285 +
  1286 + def get_resource_stream(self, manager, resource_name):
  1287 + return StringIO(self.get_resource_string(manager, resource_name))
  1288 +
  1289 + def get_resource_string(self, manager, resource_name):
  1290 + return self._get(self._fn(self.module_path, resource_name))
  1291 +
  1292 + def has_resource(self, resource_name):
  1293 + return self._has(self._fn(self.module_path, resource_name))
  1294 +
  1295 + def has_metadata(self, name):
  1296 + return self.egg_info and self._has(self._fn(self.egg_info,name))
  1297 +
  1298 + def get_metadata(self, name):
  1299 + if not self.egg_info:
  1300 + return ""
  1301 + return self._get(self._fn(self.egg_info,name))
  1302 +
  1303 + def get_metadata_lines(self, name):
  1304 + return yield_lines(self.get_metadata(name))
  1305 +
  1306 + def resource_isdir(self,resource_name):
  1307 + return self._isdir(self._fn(self.module_path, resource_name))
  1308 +
  1309 + def metadata_isdir(self,name):
  1310 + return self.egg_info and self._isdir(self._fn(self.egg_info,name))
  1311 +
  1312 +
  1313 + def resource_listdir(self,resource_name):
  1314 + return self._listdir(self._fn(self.module_path,resource_name))
  1315 +
  1316 + def metadata_listdir(self,name):
  1317 + if self.egg_info:
  1318 + return self._listdir(self._fn(self.egg_info,name))
  1319 + return []
  1320 +
  1321 + def run_script(self,script_name,namespace):
  1322 + script = 'scripts/'+script_name
  1323 + if not self.has_metadata(script):
  1324 + raise ResolutionError("No script named %r" % script_name)
  1325 + script_text = self.get_metadata(script).replace('\r\n','\n')
  1326 + script_text = script_text.replace('\r','\n')
  1327 + script_filename = self._fn(self.egg_info,script)
  1328 + namespace['__file__'] = script_filename
  1329 + if os.path.exists(script_filename):
  1330 + execfile(script_filename, namespace, namespace)
  1331 + else:
  1332 + from linecache import cache
  1333 + cache[script_filename] = (
  1334 + len(script_text), 0, script_text.split('\n'), script_filename
  1335 + )
  1336 + script_code = compile(script_text,script_filename,'exec')
  1337 + exec script_code in namespace, namespace
  1338 +
  1339 + def _has(self, path):
  1340 + raise NotImplementedError(
  1341 + "Can't perform this operation for unregistered loader type"
  1342 + )
  1343 +
  1344 + def _isdir(self, path):
  1345 + raise NotImplementedError(
  1346 + "Can't perform this operation for unregistered loader type"
  1347 + )
  1348 +
  1349 + def _listdir(self, path):
  1350 + raise NotImplementedError(
  1351 + "Can't perform this operation for unregistered loader type"
  1352 + )
  1353 +
  1354 + def _fn(self, base, resource_name):
  1355 + if resource_name:
  1356 + return os.path.join(base, *resource_name.split('/'))
  1357 + return base
  1358 +
  1359 + def _get(self, path):
  1360 + if hasattr(self.loader, 'get_data'):
  1361 + return self.loader.get_data(path)
  1362 + raise NotImplementedError(
  1363 + "Can't perform this operation for loaders without 'get_data()'"
  1364 + )
  1365 +
  1366 +register_loader_type(object, NullProvider)
  1367 +
  1368 +
  1369 +class EggProvider(NullProvider):
  1370 + """Provider based on a virtual filesystem"""
  1371 +
  1372 + def __init__(self,module):
  1373 + NullProvider.__init__(self,module)
  1374 + self._setup_prefix()
  1375 +
  1376 + def _setup_prefix(self):
  1377 + # we assume here that our metadata may be nested inside a "basket"
  1378 + # of multiple eggs; that's why we use module_path instead of .archive
  1379 + path = self.module_path
  1380 + old = None
  1381 + while path!=old:
  1382 + if path.lower().endswith('.egg'):
  1383 + self.egg_name = os.path.basename(path)
  1384 + self.egg_info = os.path.join(path, 'EGG-INFO')
  1385 + self.egg_root = path
  1386 + break
  1387 + old = path
  1388 + path, base = os.path.split(path)
  1389 +
  1390 +
  1391 +
  1392 +
  1393 +
  1394 +
  1395 +class DefaultProvider(EggProvider):
  1396 + """Provides access to package resources in the filesystem"""
  1397 +
  1398 + def _has(self, path):
  1399 + return os.path.exists(path)
  1400 +
  1401 + def _isdir(self,path):
  1402 + return os.path.isdir(path)
  1403 +
  1404 + def _listdir(self,path):
  1405 + return os.listdir(path)
  1406 +
  1407 + def get_resource_stream(self, manager, resource_name):
  1408 + return open(self._fn(self.module_path, resource_name), 'rb')
  1409 +
  1410 + def _get(self, path):
  1411 + stream = open(path, 'rb')
  1412 + try:
  1413 + return stream.read()
  1414 + finally:
  1415 + stream.close()
  1416 +
  1417 +register_loader_type(type(None), DefaultProvider)
  1418 +
  1419 +
  1420 +class EmptyProvider(NullProvider):
  1421 + """Provider that returns nothing for all requests"""
  1422 +
  1423 + _isdir = _has = lambda self,path: False
  1424 + _get = lambda self,path: ''
  1425 + _listdir = lambda self,path: []
  1426 + module_path = None
  1427 +
  1428 + def __init__(self):
  1429 + pass
  1430 +
  1431 +empty_provider = EmptyProvider()
  1432 +
  1433 +
  1434 +
  1435 +
  1436 +class ZipProvider(EggProvider):
  1437 + """Resource support for zips and eggs"""
  1438 +
  1439 + eagers = None
  1440 +
  1441 + def __init__(self, module):
  1442 + EggProvider.__init__(self,module)
  1443 + self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
  1444 + self.zip_pre = self.loader.archive+os.sep
  1445 +
  1446 + def _zipinfo_name(self, fspath):
  1447 + # Convert a virtual filename (full path to file) into a zipfile subpath
  1448 + # usable with the zipimport directory cache for our target archive
  1449 + if fspath.startswith(self.zip_pre):
  1450 + return fspath[len(self.zip_pre):]
  1451 + raise AssertionError(
  1452 + "%s is not a subpath of %s" % (fspath,self.zip_pre)
  1453 + )
  1454 +
  1455 + def _parts(self,zip_path):
  1456 + # Convert a zipfile subpath into an egg-relative path part list
  1457 + fspath = self.zip_pre+zip_path # pseudo-fs path
  1458 + if fspath.startswith(self.egg_root+os.sep):
  1459 + return fspath[len(self.egg_root)+1:].split(os.sep)
  1460 + raise AssertionError(
  1461 + "%s is not a subpath of %s" % (fspath,self.egg_root)
  1462 + )
  1463 +
  1464 + def get_resource_filename(self, manager, resource_name):
  1465 + if not self.egg_name:
  1466 + raise NotImplementedError(
  1467 + "resource_filename() only supported for .egg, not .zip"
  1468 + )
  1469 + # no need to lock for extraction, since we use temp names
  1470 + zip_path = self._resource_to_zip(resource_name)
  1471 + eagers = self._get_eager_resources()
  1472 + if '/'.join(self._parts(zip_path)) in eagers:
  1473 + for name in eagers:
  1474 + self._extract_resource(manager, self._eager_to_zip(name))
  1475 + return self._extract_resource(manager, zip_path)
  1476 +
  1477 + def _extract_resource(self, manager, zip_path):
  1478 +
  1479 + if zip_path in self._index():
  1480 + for name in self._index()[zip_path]:
  1481 + last = self._extract_resource(
  1482 + manager, os.path.join(zip_path, name)
  1483 + )
  1484 + return os.path.dirname(last) # return the extracted directory name
  1485 +
  1486 + zip_stat = self.zipinfo[zip_path]
  1487 + t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
  1488 + date_time = (
  1489 + (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
  1490 + (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
  1491 + )
  1492 + timestamp = time.mktime(date_time)
  1493 +
  1494 + try:
  1495 + real_path = manager.get_cache_path(
  1496 + self.egg_name, self._parts(zip_path)
  1497 + )
  1498 +
  1499 + if os.path.isfile(real_path):
  1500 + stat = os.stat(real_path)
  1501 + if stat.st_size==size and stat.st_mtime==timestamp:
  1502 + # size and stamp match, don't bother extracting
  1503 + return real_path
  1504 +
  1505 + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
  1506 + os.write(outf, self.loader.get_data(zip_path))
  1507 + os.close(outf)
  1508 + utime(tmpnam, (timestamp,timestamp))
  1509 + manager.postprocess(tmpnam, real_path)
  1510 +
  1511 + try:
  1512 + rename(tmpnam, real_path)
  1513 +
  1514 + except os.error:
  1515 + if os.path.isfile(real_path):
  1516 + stat = os.stat(real_path)
  1517 +
  1518 + if stat.st_size==size and stat.st_mtime==timestamp:
  1519 + # size and stamp match, somebody did it just ahead of
  1520 + # us, so we're done
  1521 + return real_path
  1522 + elif os.name=='nt': # Windows, del old file and retry
  1523 + unlink(real_path)
  1524 + rename(tmpnam, real_path)
  1525 + return real_path
  1526 + raise
  1527 +
  1528 + except os.error:
  1529 + manager.extraction_error() # report a user-friendly error
  1530 +
  1531 + return real_path
  1532 +
  1533 + def _get_eager_resources(self):
  1534 + if self.eagers is None:
  1535 + eagers = []
  1536 + for name in ('native_libs.txt', 'eager_resources.txt'):
  1537 + if self.has_metadata(name):
  1538 + eagers.extend(self.get_metadata_lines(name))
  1539 + self.eagers = eagers
  1540 + return self.eagers
  1541 +
  1542 + def _index(self):
  1543 + try:
  1544 + return self._dirindex
  1545 + except AttributeError:
  1546 + ind = {}
  1547 + for path in self.zipinfo:
  1548 + parts = path.split(os.sep)
  1549 + while parts:
  1550 + parent = os.sep.join(parts[:-1])
  1551 + if parent in ind:
  1552 + ind[parent].append(parts[-1])
  1553 + break
  1554 + else:
  1555 + ind[parent] = [parts.pop()]
  1556 + self._dirindex = ind
  1557 + return ind
  1558 +
  1559 + def _has(self, fspath):
  1560 + zip_path = self._zipinfo_name(fspath)
  1561 + return zip_path in self.zipinfo or zip_path in self._index()
  1562 +
  1563 + def _isdir(self,fspath):
  1564 + return self._zipinfo_name(fspath) in self._index()
  1565 +
  1566 + def _listdir(self,fspath):
  1567 + return list(self._index().get(self._zipinfo_name(fspath), ()))
  1568 +
  1569 + def _eager_to_zip(self,resource_name):
  1570 + return self._zipinfo_name(self._fn(self.egg_root,resource_name))
  1571 +
  1572 + def _resource_to_zip(self,resource_name):
  1573 + return self._zipinfo_name(self._fn(self.module_path,resource_name))
  1574 +
  1575 +register_loader_type(zipimport.zipimporter, ZipProvider)
  1576 +
  1577 +
  1578 +
  1579 +
  1580 +
  1581 +
  1582 +
  1583 +
  1584 +
  1585 +
  1586 +
  1587 +
  1588 +
  1589 +
  1590 +
  1591 +
  1592 +
  1593 +
  1594 +
  1595 +
  1596 +
  1597 +
  1598 +
  1599 +
  1600 +class FileMetadata(EmptyProvider):
  1601 + """Metadata handler for standalone PKG-INFO files
  1602 +
  1603 + Usage::
  1604 +
  1605 + metadata = FileMetadata("/path/to/PKG-INFO")
  1606 +
  1607 + This provider rejects all data and metadata requests except for PKG-INFO,
  1608 + which is treated as existing, and will be the contents of the file at
  1609 + the provided location.
  1610 + """
  1611 +
  1612 + def __init__(self,path):
  1613 + self.path = path
  1614 +
  1615 + def has_metadata(self,name):
  1616 + return name=='PKG-INFO'
  1617 +
  1618 + def get_metadata(self,name):
  1619 + if name=='PKG-INFO':
  1620 + return open(self.path,'rU').read()
  1621 + raise KeyError("No metadata except PKG-INFO is available")
  1622 +
  1623 + def get_metadata_lines(self,name):
  1624 + return yield_lines(self.get_metadata(name))
  1625 +
  1626 +
  1627 +
  1628 +
  1629 +
  1630 +
  1631 +
  1632 +
  1633 +
  1634 +
  1635 +
  1636 +
  1637 +
  1638 +
  1639 +
  1640 +
  1641 +class PathMetadata(DefaultProvider):
  1642 + """Metadata provider for egg directories
  1643 +
  1644 + Usage::
  1645 +
  1646 + # Development eggs:
  1647 +
  1648 + egg_info = "/path/to/PackageName.egg-info"
  1649 + base_dir = os.path.dirname(egg_info)
  1650 + metadata = PathMetadata(base_dir, egg_info)
  1651 + dist_name = os.path.splitext(os.path.basename(egg_info))[0]
  1652 + dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
  1653 +
  1654 + # Unpacked egg directories:
  1655 +
  1656 + egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
  1657 + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
  1658 + dist = Distribution.from_filename(egg_path, metadata=metadata)
  1659 + """
  1660 + def __init__(self, path, egg_info):
  1661 + self.module_path = path
  1662 + self.egg_info = egg_info
  1663 +
  1664 +
  1665 +class EggMetadata(ZipProvider):
  1666 + """Metadata provider for .egg files"""
  1667 +
  1668 + def __init__(self, importer):
  1669 + """Create a metadata provider from a zipimporter"""
  1670 +
  1671 + self.zipinfo = zipimport._zip_directory_cache[importer.archive]
  1672 + self.zip_pre = importer.archive+os.sep
  1673 + self.loader = importer
  1674 + if importer.prefix:
  1675 + self.module_path = os.path.join(importer.archive, importer.prefix)
  1676 + else:
  1677 + self.module_path = importer.archive
  1678 + self._setup_prefix()
  1679 +
  1680 +
  1681 +
  1682 +_declare_state('dict', _distribution_finders = {})
  1683 +
  1684 +def register_finder(importer_type, distribution_finder):
  1685 + """Register `distribution_finder` to find distributions in sys.path items
  1686 +
  1687 + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
  1688 + handler), and `distribution_finder` is a callable that, passed a path
  1689 + item and the importer instance, yields ``Distribution`` instances found on
  1690 + that path item. See ``pkg_resources.find_on_path`` for an example."""
  1691 + _distribution_finders[importer_type] = distribution_finder
  1692 +
  1693 +
  1694 +def find_distributions(path_item, only=False):
  1695 + """Yield distributions accessible via `path_item`"""
  1696 + importer = get_importer(path_item)
  1697 + finder = _find_adapter(_distribution_finders, importer)
  1698 + return finder(importer, path_item, only)
  1699 +
  1700 +def find_in_zip(importer, path_item, only=False):
  1701 + metadata = EggMetadata(importer)
  1702 + if metadata.has_metadata('PKG-INFO'):
  1703 + yield Distribution.from_filename(path_item, metadata=metadata)
  1704 + if only:
  1705 + return # don't yield nested distros
  1706 + for subitem in metadata.resource_listdir('/'):
  1707 + if subitem.endswith('.egg'):
  1708 + subpath = os.path.join(path_item, subitem)
  1709 + for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
  1710 + yield dist
  1711 +
  1712 +register_finder(zipimport.zipimporter, find_in_zip)
  1713 +
  1714 +def StringIO(*args, **kw):
  1715 + """Thunk to load the real StringIO on demand"""
  1716 + global StringIO
  1717 + try:
  1718 + from cStringIO import StringIO
  1719 + except ImportError:
  1720 + from StringIO import StringIO
  1721 + return StringIO(*args,**kw)
  1722 +
  1723 +def find_nothing(importer, path_item, only=False):
  1724 + return ()
  1725 +register_finder(object,find_nothing)
  1726 +
  1727 +def find_on_path(importer, path_item, only=False):
  1728 + """Yield distributions accessible on a sys.path directory"""
  1729 + path_item = _normalize_cached(path_item)
  1730 +
  1731 + if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
  1732 + if path_item.lower().endswith('.egg'):
  1733 + # unpacked egg
  1734 + yield Distribution.from_filename(
  1735 + path_item, metadata=PathMetadata(
  1736 + path_item, os.path.join(path_item,'EGG-INFO')
  1737 + )
  1738 + )
  1739 + else:
  1740 + # scan for .egg and .egg-info in directory
  1741 + for entry in os.listdir(path_item):
  1742 + lower = entry.lower()
  1743 + if lower.endswith('.egg-info'):
  1744 + fullpath = os.path.join(path_item, entry)
  1745 + if os.path.isdir(fullpath):
  1746 + # egg-info directory, allow getting metadata
  1747 + metadata = PathMetadata(path_item, fullpath)
  1748 + else:
  1749 + metadata = FileMetadata(fullpath)
  1750 + yield Distribution.from_location(
  1751 + path_item,entry,metadata,precedence=DEVELOP_DIST
  1752 + )
  1753 + elif not only and lower.endswith('.egg'):
  1754 + for dist in find_distributions(os.path.join(path_item, entry)):
  1755 + yield dist
  1756 + elif not only and lower.endswith('.egg-link'):
  1757 + for line in file(os.path.join(path_item, entry)):
  1758 + if not line.strip(): continue
  1759 + for item in find_distributions(os.path.join(path_item,line.rstrip())):
  1760 + yield item
  1761 + break
  1762 +register_finder(pkgutil.ImpImporter, find_on_path)
  1763 +
  1764 +_declare_state('dict', _namespace_handlers = {})
  1765 +_declare_state('dict', _namespace_packages = {})
  1766 +
  1767 +def register_namespace_handler(importer_type, namespace_handler):
  1768 + """Register `namespace_handler` to declare namespace packages
  1769 +
  1770 + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
  1771 + handler), and `namespace_handler` is a callable like this::
  1772 +
  1773 + def namespace_handler(importer,path_entry,moduleName,module):
  1774 + # return a path_entry to use for child packages
  1775 +
  1776 + Namespace handlers are only called if the importer object has already
  1777 + agreed that it can handle the relevant path item, and they should only
  1778 + return a subpath if the module __path__ does not already contain an
  1779 + equivalent subpath. For an example namespace handler, see
  1780 + ``pkg_resources.file_ns_handler``.
  1781 + """
  1782 + _namespace_handlers[importer_type] = namespace_handler
  1783 +
  1784 +def _handle_ns(packageName, path_item):
  1785 + """Ensure that named package includes a subpath of path_item (if needed)"""
  1786 + importer = get_importer(path_item)
  1787 + if importer is None:
  1788 + return None
  1789 + loader = importer.find_module(packageName)
  1790 + if loader is None:
  1791 + return None
  1792 + module = sys.modules.get(packageName)
  1793 + if module is None:
  1794 + module = sys.modules[packageName] = imp.new_module(packageName)
  1795 + module.__path__ = []; _set_parent_ns(packageName)
  1796 + elif not hasattr(module,'__path__'):
  1797 + raise TypeError("Not a package:", packageName)
  1798 + handler = _find_adapter(_namespace_handlers, importer)
  1799 + subpath = handler(importer,path_item,packageName,module)
  1800 + if subpath is not None:
  1801 + path = module.__path__; path.append(subpath)
  1802 + loader.load_module(packageName); module.__path__ = path
  1803 + return subpath
  1804 +
  1805 +def declare_namespace(packageName):
  1806 + """Declare that package 'packageName' is a namespace package"""
  1807 +
  1808 + imp.acquire_lock()
  1809 + try:
  1810 + if packageName in _namespace_packages:
  1811 + return
  1812 +
  1813 + path, parent = sys.path, None
  1814 + if '.' in packageName:
  1815 + parent = '.'.join(packageName.split('.')[:-1])
  1816 + declare_namespace(parent)
  1817 + __import__(parent)
  1818 + try:
  1819 + path = sys.modules[parent].__path__
  1820 + except AttributeError:
  1821 + raise TypeError("Not a package:", parent)
  1822 +
  1823 + # Track what packages are namespaces, so when new path items are added,
  1824 + # they can be updated
  1825 + _namespace_packages.setdefault(parent,[]).append(packageName)
  1826 + _namespace_packages.setdefault(packageName,[])
  1827 +
  1828 + for path_item in path:
  1829 + # Ensure all the parent's path items are reflected in the child,
  1830 + # if they apply
  1831 + _handle_ns(packageName, path_item)
  1832 +
  1833 + finally:
  1834 + imp.release_lock()
  1835 +
  1836 +def fixup_namespace_packages(path_item, parent=None):
  1837 + """Ensure that previously-declared namespace packages include path_item"""
  1838 + imp.acquire_lock()
  1839 + try:
  1840 + for package in _namespace_packages.get(parent,()):
  1841 + subpath = _handle_ns(package, path_item)
  1842 + if subpath: fixup_namespace_packages(subpath,package)
  1843 + finally:
  1844 + imp.release_lock()
  1845 +
  1846 +def file_ns_handler(importer, path_item, packageName, module):
  1847 + """Compute an ns-package subpath for a filesystem or zipfile importer"""
  1848 +
  1849 + subpath = os.path.join(path_item, packageName.split('.')[-1])
  1850 + normalized = _normalize_cached(subpath)
  1851 + for item in module.__path__:
  1852 + if _normalize_cached(item)==normalized:
  1853 + break
  1854 + else:
  1855 + # Only return the path if it's not already there
  1856 + return subpath
  1857 +
  1858 +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
  1859 +register_namespace_handler(zipimport.zipimporter, file_ns_handler)
  1860 +
  1861 +
  1862 +def null_ns_handler(importer, path_item, packageName, module):
  1863 + return None
  1864 +
  1865 +register_namespace_handler(object,null_ns_handler)
  1866 +
  1867 +
  1868 +def normalize_path(filename):
  1869 + """Normalize a file/dir name for comparison purposes"""
  1870 + return os.path.normcase(os.path.realpath(filename))
  1871 +
  1872 +def _normalize_cached(filename,_cache={}):
  1873 + try:
  1874 + return _cache[filename]
  1875 + except KeyError:
  1876 + _cache[filename] = result = normalize_path(filename)
  1877 + return result
  1878 +
  1879 +def _set_parent_ns(packageName):
  1880 + parts = packageName.split('.')
  1881 + name = parts.pop()
  1882 + if parts:
  1883 + parent = '.'.join(parts)
  1884 + setattr(sys.modules[parent], name, sys.modules[packageName])
  1885 +
  1886 +
  1887 +def yield_lines(strs):
  1888 + """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
  1889 + if isinstance(strs,basestring):
  1890 + for s in strs.splitlines():
  1891 + s = s.strip()
  1892 + if s and not s.startswith('#'): # skip blank lines/comments
  1893 + yield s
  1894 + else:
  1895 + for ss in strs:
  1896 + for s in yield_lines(ss):
  1897 + yield s
  1898 +
  1899 +LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
  1900 +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
  1901 +DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
  1902 +VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
  1903 +COMMA = re.compile(r"\s*,").match # comma between items
  1904 +OBRACKET = re.compile(r"\s*\[").match
  1905 +CBRACKET = re.compile(r"\s*\]").match
  1906 +MODULE = re.compile(r"\w+(\.\w+)*$").match
  1907 +EGG_NAME = re.compile(
  1908 + r"(?P<name>[^-]+)"
  1909 + r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
  1910 + re.VERBOSE | re.IGNORECASE
  1911 +).match
  1912 +
  1913 +component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
  1914 +replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
  1915 +
  1916 +def _parse_version_parts(s):
  1917 + for part in component_re.split(s):
  1918 + part = replace(part,part)
  1919 + if not part or part=='.':
  1920 + continue
  1921 + if part[:1] in '0123456789':
  1922 + yield part.zfill(8) # pad for numeric comparison
  1923 + else:
  1924 + yield '*'+part
  1925 +
  1926 + yield '*final' # ensure that alpha/beta/candidate are before final
  1927 +
  1928 +def parse_version(s):
  1929 + """Convert a version string to a chronologically-sortable key
  1930 +
  1931 + This is a rough cross between distutils' StrictVersion and LooseVersion;
  1932 + if you give it versions that would work with StrictVersion, then it behaves
  1933 + the same; otherwise it acts like a slightly-smarter LooseVersion. It is
  1934 + *possible* to create pathological version coding schemes that will fool
  1935 + this parser, but they should be very rare in practice.
  1936 +
  1937 + The returned value will be a tuple of strings. Numeric portions of the
  1938 + version are padded to 8 digits so they will compare numerically, but
  1939 + without relying on how numbers compare relative to strings. Dots are
  1940 + dropped, but dashes are retained. Trailing zeros between alpha segments
  1941 + or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
  1942 + "2.4". Alphanumeric parts are lower-cased.
  1943 +
  1944 + The algorithm assumes that strings like "-" and any alpha string that
  1945 + alphabetically follows "final" represents a "patch level". So, "2.4-1"
  1946 + is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
  1947 + considered newer than "2.4-1", which in turn is newer than "2.4".
  1948 +
  1949 + Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
  1950 + come before "final" alphabetically) are assumed to be pre-release versions,
  1951 + so that the version "2.4" is considered newer than "2.4a1".
  1952 +
  1953 + Finally, to handle miscellaneous cases, the strings "pre", "preview", and
  1954 + "rc" are treated as if they were "c", i.e. as though they were release
  1955 + candidates, and therefore are not as new as a version string that does not
  1956 + contain them, and "dev" is replaced with an '@' so that it sorts lower than
  1957 + than any other pre-release tag.
  1958 + """
  1959 + parts = []
  1960 + for part in _parse_version_parts(s.lower()):
  1961 + if part.startswith('*'):
  1962 + if part<'*final': # remove '-' before a prerelease tag
  1963 + while parts and parts[-1]=='*final-': parts.pop()
  1964 + # remove trailing zeros from each series of numeric parts
  1965 + while parts and parts[-1]=='00000000':
  1966 + parts.pop()
  1967 + parts.append(part)
  1968 + return tuple(parts)
  1969 +class EntryPoint(object):
  1970 + """Object representing an advertised importable object"""
  1971 +
  1972 + def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
  1973 + if not MODULE(module_name):
  1974 + raise ValueError("Invalid module name", module_name)
  1975 + self.name = name
  1976 + self.module_name = module_name
  1977 + self.attrs = tuple(attrs)
  1978 + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
  1979 + self.dist = dist
  1980 +
  1981 + def __str__(self):
  1982 + s = "%s = %s" % (self.name, self.module_name)
  1983 + if self.attrs:
  1984 + s += ':' + '.'.join(self.attrs)
  1985 + if self.extras:
  1986 + s += ' [%s]' % ','.join(self.extras)
  1987 + return s
  1988 +
  1989 + def __repr__(self):
  1990 + return "EntryPoint.parse(%r)" % str(self)
  1991 +
  1992 + def load(self, require=True, env=None, installer=None):
  1993 + if require: self.require(env, installer)
  1994 + entry = __import__(self.module_name, globals(),globals(), ['__name__'])
  1995 + for attr in self.attrs:
  1996 + try:
  1997 + entry = getattr(entry,attr)
  1998 + except AttributeError:
  1999 + raise ImportError("%r has no %r attribute" % (entry,attr))
  2000 + return entry
  2001 +
  2002 + def require(self, env=None, installer=None):
  2003 + if self.extras and not self.dist:
  2004 + raise UnknownExtra("Can't require() without a distribution", self)
  2005 + map(working_set.add,
  2006 + working_set.resolve(self.dist.requires(self.extras),env,installer))
  2007 +
  2008 +
  2009 +
  2010 + #@classmethod
  2011 + def parse(cls, src, dist=None):
  2012 + """Parse a single entry point from string `src`
  2013 +
  2014 + Entry point syntax follows the form::
  2015 +
  2016 + name = some.module:some.attr [extra1,extra2]
  2017 +
  2018 + The entry name and module name are required, but the ``:attrs`` and
  2019 + ``[extras]`` parts are optional
  2020 + """
  2021 + try:
  2022 + attrs = extras = ()
  2023 + name,value = src.split('=',1)
  2024 + if '[' in value:
  2025 + value,extras = value.split('[',1)
  2026 + req = Requirement.parse("x["+extras)
  2027 + if req.specs: raise ValueError
  2028 + extras = req.extras
  2029 + if ':' in value:
  2030 + value,attrs = value.split(':',1)
  2031 + if not MODULE(attrs.rstrip()):
  2032 + raise ValueError
  2033 + attrs = attrs.rstrip().split('.')
  2034 + except ValueError:
  2035 + raise ValueError(
  2036 + "EntryPoint must be in 'name=module:attrs [extras]' format",
  2037 + src
  2038 + )
  2039 + else:
  2040 + return cls(name.strip(), value.strip(), attrs, extras, dist)
  2041 +
  2042 + parse = classmethod(parse)
  2043 +
  2044 +
  2045 +
  2046 +
  2047 +
  2048 +
  2049 +
  2050 +
  2051 + #@classmethod
  2052 + def parse_group(cls, group, lines, dist=None):
  2053 + """Parse an entry point group"""
  2054 + if not MODULE(group):
  2055 + raise ValueError("Invalid group name", group)
  2056 + this = {}
  2057 + for line in yield_lines(lines):
  2058 + ep = cls.parse(line, dist)
  2059 + if ep.name in this:
  2060 + raise ValueError("Duplicate entry point", group, ep.name)
  2061 + this[ep.name]=ep
  2062 + return this
  2063 +
  2064 + parse_group = classmethod(parse_group)
  2065 +
  2066 + #@classmethod
  2067 + def parse_map(cls, data, dist=None):
  2068 + """Parse a map of entry point groups"""
  2069 + if isinstance(data,dict):
  2070 + data = data.items()
  2071 + else:
  2072 + data = split_sections(data)
  2073 + maps = {}
  2074 + for group, lines in data:
  2075 + if group is None:
  2076 + if not lines:
  2077 + continue
  2078 + raise ValueError("Entry points must be listed in groups")
  2079 + group = group.strip()
  2080 + if group in maps:
  2081 + raise ValueError("Duplicate group name", group)
  2082 + maps[group] = cls.parse_group(group, lines, dist)
  2083 + return maps
  2084 +
  2085 + parse_map = classmethod(parse_map)
  2086 +
  2087 +
  2088 +
  2089 +
  2090 +
  2091 +
  2092 +class Distribution(object):
  2093 + """Wrap an actual or potential sys.path entry w/metadata"""
  2094 + def __init__(self,
  2095 + location=None, metadata=None, project_name=None, version=None,
  2096 + py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
  2097 + ):
  2098 + self.project_name = safe_name(project_name or 'Unknown')
  2099 + if version is not None:
  2100 + self._version = safe_version(version)
  2101 + self.py_version = py_version
  2102 + self.platform = platform
  2103 + self.location = location
  2104 + self.precedence = precedence
  2105 + self._provider = metadata or empty_provider
  2106 +
  2107 + #@classmethod
  2108 + def from_location(cls,location,basename,metadata=None,**kw):
  2109 + project_name, version, py_version, platform = [None]*4
  2110 + basename, ext = os.path.splitext(basename)
  2111 + if ext.lower() in (".egg",".egg-info"):
  2112 + match = EGG_NAME(basename)
  2113 + if match:
  2114 + project_name, version, py_version, platform = match.group(
  2115 + 'name','ver','pyver','plat'
  2116 + )
  2117 + return cls(
  2118 + location, metadata, project_name=project_name, version=version,
  2119 + py_version=py_version, platform=platform, **kw
  2120 + )
  2121 + from_location = classmethod(from_location)
  2122 +
  2123 + hashcmp = property(
  2124 + lambda self: (
  2125 + getattr(self,'parsed_version',()), self.precedence, self.key,
  2126 + -len(self.location or ''), self.location, self.py_version,
  2127 + self.platform
  2128 + )
  2129 + )
  2130 + def __cmp__(self, other): return cmp(self.hashcmp, other)
  2131 + def __hash__(self): return hash(self.hashcmp)
  2132 +
  2133 + # These properties have to be lazy so that we don't have to load any
  2134 + # metadata until/unless it's actually needed. (i.e., some distributions
  2135 + # may not know their name or version without loading PKG-INFO)
  2136 +
  2137 + #@property
  2138 + def key(self):
  2139 + try:
  2140 + return self._key
  2141 + except AttributeError:
  2142 + self._key = key = self.project_name.lower()
  2143 + return key
  2144 + key = property(key)
  2145 +
  2146 + #@property
  2147 + def parsed_version(self):
  2148 + try:
  2149 + return self._parsed_version
  2150 + except AttributeError:
  2151 + self._parsed_version = pv = parse_version(self.version)
  2152 + return pv
  2153 +
  2154 + parsed_version = property(parsed_version)
  2155 +
  2156 + #@property
  2157 + def version(self):
  2158 + try:
  2159 + return self._version
  2160 + except AttributeError:
  2161 + for line in self._get_metadata('PKG-INFO'):
  2162 + if line.lower().startswith('version:'):
  2163 + self._version = safe_version(line.split(':',1)[1].strip())
  2164 + return self._version
  2165 + else:
  2166 + raise ValueError(
  2167 + "Missing 'Version:' header and/or PKG-INFO file", self
  2168 + )
  2169 + version = property(version)
  2170 +
  2171 +
  2172 +
  2173 +
  2174 + #@property
  2175 + def _dep_map(self):
  2176 + try:
  2177 + return self.__dep_map
  2178 + except AttributeError:
  2179 + dm = self.__dep_map = {None: []}
  2180 + for name in 'requires.txt', 'depends.txt':
  2181 + for extra,reqs in split_sections(self._get_metadata(name)):
  2182 + if extra:
  2183 + if ':' in extra:
  2184 + extra, marker = extra.split(':',1)
  2185 + if invalid_marker(marker):
  2186 + reqs=[] # XXX warn
  2187 + elif not evaluate_marker(marker):
  2188 + reqs=[]
  2189 + extra = safe_extra(extra) or None
  2190 + dm.setdefault(extra,[]).extend(parse_requirements(reqs))
  2191 + return dm
  2192 + _dep_map = property(_dep_map)
  2193 +
  2194 + def requires(self,extras=()):
  2195 + """List of Requirements needed for this distro if `extras` are used"""
  2196 + dm = self._dep_map
  2197 + deps = []
  2198 + deps.extend(dm.get(None,()))
  2199 + for ext in extras:
  2200 + try:
  2201 + deps.extend(dm[safe_extra(ext)])
  2202 + except KeyError:
  2203 + raise UnknownExtra(
  2204 + "%s has no such extra feature %r" % (self, ext)
  2205 + )
  2206 + return deps
  2207 +
  2208 + def _get_metadata(self,name):
  2209 + if self.has_metadata(name):
  2210 + for line in self.get_metadata_lines(name):
  2211 + yield line
  2212 +
  2213 +
  2214 +
  2215 + def activate(self,path=None):
  2216 + """Ensure distribution is importable on `path` (default=sys.path)"""
  2217 + if path is None: path = sys.path
  2218 + self.insert_on(path)
  2219 + if path is sys.path:
  2220 + fixup_namespace_packages(self.location)
  2221 + for pkg in self._get_metadata('namespace_packages.txt'):
  2222 + if pkg in sys.modules: declare_namespace(pkg)
  2223 +
  2224 + def egg_name(self):
  2225 + """Return what this distribution's standard .egg filename should be"""
  2226 + filename = "%s-%s-py%s" % (
  2227 + to_filename(self.project_name), to_filename(self.version),
  2228 + self.py_version or PY_MAJOR
  2229 + )
  2230 +
  2231 + if self.platform:
  2232 + filename += '-'+self.platform
  2233 + return filename
  2234 +
  2235 + def __repr__(self):
  2236 + if self.location:
  2237 + return "%s (%s)" % (self,self.location)
  2238 + else:
  2239 + return str(self)
  2240 +
  2241 + def __str__(self):
  2242 + try: version = getattr(self,'version',None)
  2243 + except ValueError: version = None
  2244 + version = version or "[unknown version]"
  2245 + return "%s %s" % (self.project_name,version)
  2246 +
  2247 + def __getattr__(self,attr):
  2248 + """Delegate all unrecognized public attributes to .metadata provider"""
  2249 + if attr.startswith('_'):
  2250 + raise AttributeError,attr
  2251 + return getattr(self._provider, attr)
  2252 +
  2253 +
  2254 +
  2255 +
  2256 + #@classmethod
  2257 + def from_filename(cls,filename,metadata=None, **kw):
  2258 + return cls.from_location(
  2259 + _normalize_cached(filename), os.path.basename(filename), metadata,
  2260 + **kw
  2261 + )
  2262 + from_filename = classmethod(from_filename)
  2263 +
  2264 + def as_requirement(self):
  2265 + """Return a ``Requirement`` that matches this distribution exactly"""
  2266 + return Requirement.parse('%s==%s' % (self.project_name, self.version))
  2267 +
  2268 + def load_entry_point(self, group, name):
  2269 + """Return the `name` entry point of `group` or raise ImportError"""
  2270 + ep = self.get_entry_info(group,name)
  2271 + if ep is None:
  2272 + raise ImportError("Entry point %r not found" % ((group,name),))
  2273 + return ep.load()
  2274 +
  2275 + def get_entry_map(self, group=None):
  2276 + """Return the entry point map for `group`, or the full entry map"""
  2277 + try:
  2278 + ep_map = self._ep_map
  2279 + except AttributeError:
  2280 + ep_map = self._ep_map = EntryPoint.parse_map(
  2281 + self._get_metadata('entry_points.txt'), self
  2282 + )
  2283 + if group is not None:
  2284 + return ep_map.get(group,{})
  2285 + return ep_map
  2286 +
  2287 + def get_entry_info(self, group, name):
  2288 + """Return the EntryPoint object for `group`+`name`, or ``None``"""
  2289 + return self.get_entry_map(group).get(name)
  2290 +
  2291 +
  2292 +
  2293 +
  2294 +
  2295 +
  2296 +
  2297 + def insert_on(self, path, loc = None):
  2298 + """Insert self.location in path before its nearest parent directory"""
  2299 +
  2300 + loc = loc or self.location
  2301 + if not loc:
  2302 + return
  2303 +
  2304 + nloc = _normalize_cached(loc)
  2305 + bdir = os.path.dirname(nloc)
  2306 + npath= [(p and _normalize_cached(p) or p) for p in path]
  2307 +
  2308 + bp = None
  2309 + for p, item in enumerate(npath):
  2310 + if item==nloc:
  2311 + break
  2312 + elif item==bdir and self.precedence==EGG_DIST:
  2313 + # if it's an .egg, give it precedence over its directory
  2314 + if path is sys.path:
  2315 + self.check_version_conflict()
  2316 + path.insert(p, loc)
  2317 + npath.insert(p, nloc)
  2318 + break
  2319 + else:
  2320 + if path is sys.path:
  2321 + self.check_version_conflict()
  2322 + path.append(loc)
  2323 + return
  2324 +
  2325 + # p is the spot where we found or inserted loc; now remove duplicates
  2326 + while 1:
  2327 + try:
  2328 + np = npath.index(nloc, p+1)
  2329 + except ValueError:
  2330 + break
  2331 + else:
  2332 + del npath[np], path[np]
  2333 + p = np # ha!
  2334 +
  2335 + return
  2336 +
  2337 +
  2338 + def check_version_conflict(self):
  2339 + if self.key=='setuptools':
  2340 + return # ignore the inevitable setuptools self-conflicts :(
  2341 +
  2342 + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
  2343 + loc = normalize_path(self.location)
  2344 + for modname in self._get_metadata('top_level.txt'):
  2345 + if (modname not in sys.modules or modname in nsp
  2346 + or modname in _namespace_packages
  2347 + ):
  2348 + continue
  2349 +
  2350 + fn = getattr(sys.modules[modname], '__file__', None)
  2351 + if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
  2352 + continue
  2353 + issue_warning(
  2354 + "Module %s was already imported from %s, but %s is being added"
  2355 + " to sys.path" % (modname, fn, self.location),
  2356 + )
  2357 +
  2358 + def has_version(self):
  2359 + try:
  2360 + self.version
  2361 + except ValueError:
  2362 + issue_warning("Unbuilt egg for "+repr(self))
  2363 + return False
  2364 + return True
  2365 +
  2366 + def clone(self,**kw):
  2367 + """Copy this distribution, substituting in any changed keyword args"""
  2368 + for attr in (
  2369 + 'project_name', 'version', 'py_version', 'platform', 'location',
  2370 + 'precedence'
  2371 + ):
  2372 + kw.setdefault(attr, getattr(self,attr,None))
  2373 + kw.setdefault('metadata', self._provider)
  2374 + return self.__class__(**kw)
  2375 +
  2376 +
  2377 +
  2378 +
  2379 + #@property
  2380 + def extras(self):
  2381 + return [dep for dep in self._dep_map if dep]
  2382 + extras = property(extras)
  2383 +
  2384 +
  2385 +def issue_warning(*args,**kw):
  2386 + level = 1
  2387 + g = globals()
  2388 + try:
  2389 + # find the first stack frame that is *not* code in
  2390 + # the pkg_resources module, to use for the warning
  2391 + while sys._getframe(level).f_globals is g:
  2392 + level += 1
  2393 + except ValueError:
  2394 + pass
  2395 + from warnings import warn
  2396 + warn(stacklevel = level+1, *args, **kw)
  2397 +
  2398 +
  2399 +
  2400 +
  2401 +
  2402 +
  2403 +
  2404 +
  2405 +
  2406 +
  2407 +
  2408 +
  2409 +
  2410 +
  2411 +
  2412 +
  2413 +
  2414 +
  2415 +
  2416 +
  2417 +
  2418 +
  2419 +
  2420 +def parse_requirements(strs):
  2421 + """Yield ``Requirement`` objects for each specification in `strs`
  2422 +
  2423 + `strs` must be an instance of ``basestring``, or a (possibly-nested)
  2424 + iterable thereof.
  2425 + """
  2426 + # create a steppable iterator, so we can handle \-continuations
  2427 + lines = iter(yield_lines(strs))
  2428 +
  2429 + def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
  2430 +
  2431 + items = []
  2432 +
  2433 + while not TERMINATOR(line,p):
  2434 + if CONTINUE(line,p):
  2435 + try:
  2436 + line = lines.next(); p = 0
  2437 + except StopIteration:
  2438 + raise ValueError(
  2439 + "\\ must not appear on the last nonblank line"
  2440 + )
  2441 +
  2442 + match = ITEM(line,p)
  2443 + if not match:
  2444 + raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
  2445 +
  2446 + items.append(match.group(*groups))
  2447 + p = match.end()
  2448 +
  2449 + match = COMMA(line,p)
  2450 + if match:
  2451 + p = match.end() # skip the comma
  2452 + elif not TERMINATOR(line,p):
  2453 + raise ValueError(
  2454 + "Expected ',' or end-of-list in",line,"at",line[p:]
  2455 + )
  2456 +
  2457 + match = TERMINATOR(line,p)
  2458 + if match: p = match.end() # skip the terminator, if any
  2459 + return line, p, items
  2460 +
  2461 + for line in lines:
  2462 + match = DISTRO(line)
  2463 + if not match:
  2464 + raise ValueError("Missing distribution spec", line)
  2465 + project_name = match.group(1)
  2466 + p = match.end()
  2467 + extras = []
  2468 +
  2469 + match = OBRACKET(line,p)
  2470 + if match:
  2471 + p = match.end()
  2472 + line, p, extras = scan_list(
  2473 + DISTRO, CBRACKET, line, p, (1,), "'extra' name"
  2474 + )
  2475 +
  2476 + line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
  2477 + specs = [(op,safe_version(val)) for op,val in specs]
  2478 + yield Requirement(project_name, specs, extras)
  2479 +
  2480 +
  2481 +def _sort_dists(dists):
  2482 + tmp = [(dist.hashcmp,dist) for dist in dists]
  2483 + tmp.sort()
  2484 + dists[::-1] = [d for hc,d in tmp]
  2485 +
  2486 +
  2487 +
  2488 +
  2489 +
  2490 +
  2491 +
  2492 +
  2493 +
  2494 +
  2495 +
  2496 +
  2497 +
  2498 +
  2499 +
  2500 +
  2501 +
  2502 +class Requirement:
  2503 + def __init__(self, project_name, specs, extras):
  2504 + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
  2505 + self.unsafe_name, project_name = project_name, safe_name(project_name)
  2506 + self.project_name, self.key = project_name, project_name.lower()
  2507 + index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
  2508 + index.sort()
  2509 + self.specs = [(op,ver) for parsed,trans,op,ver in index]
  2510 + self.index, self.extras = index, tuple(map(safe_extra,extras))
  2511 + self.hashCmp = (
  2512 + self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
  2513 + frozenset(self.extras)
  2514 + )
  2515 + self.__hash = hash(self.hashCmp)
  2516 +
  2517 + def __str__(self):
  2518 + specs = ','.join([''.join(s) for s in self.specs])
  2519 + extras = ','.join(self.extras)
  2520 + if extras: extras = '[%s]' % extras
  2521 + return '%s%s%s' % (self.project_name, extras, specs)
  2522 +
  2523 + def __eq__(self,other):
  2524 + return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
  2525 +
  2526 + def __contains__(self,item):
  2527 + if isinstance(item,Distribution):
  2528 + if item.key != self.key: return False
  2529 + if self.index: item = item.parsed_version # only get if we need it
  2530 + elif isinstance(item,basestring):
  2531 + item = parse_version(item)
  2532 + last = None
  2533 + for parsed,trans,op,ver in self.index:
  2534 + action = trans[cmp(item,parsed)]
  2535 + if action=='F': return False
  2536 + elif action=='T': return True
  2537 + elif action=='+': last = True
  2538 + elif action=='-' or last is None: last = False
  2539 + if last is None: last = True # no rules encountered
  2540 + return last
  2541 +
  2542 +
  2543 + def __hash__(self):
  2544 + return self.__hash
  2545 +
  2546 + def __repr__(self): return "Requirement.parse(%r)" % str(self)
  2547 +
  2548 + #@staticmethod
  2549 + def parse(s):
  2550 + reqs = list(parse_requirements(s))
  2551 + if reqs:
  2552 + if len(reqs)==1:
  2553 + return reqs[0]
  2554 + raise ValueError("Expected only one requirement", s)
  2555 + raise ValueError("No requirements found", s)
  2556 +
  2557 + parse = staticmethod(parse)
  2558 +
  2559 +state_machine = {
  2560 + # =><
  2561 + '<' : '--T',
  2562 + '<=': 'T-T',
  2563 + '>' : 'F+F',
  2564 + '>=': 'T+F',
  2565 + '==': 'T..',
  2566 + '!=': 'F++',
  2567 +}
  2568 +
  2569 +
  2570 +def _get_mro(cls):
  2571 + """Get an mro for a type or classic class"""
  2572 + if not isinstance(cls,type):
  2573 + class cls(cls,object): pass
  2574 + return cls.__mro__[1:]
  2575 + return cls.__mro__
  2576 +
  2577 +def _find_adapter(registry, ob):
  2578 + """Return an adapter factory for `ob` from `registry`"""
  2579 + for t in _get_mro(getattr(ob, '__class__', type(ob))):
  2580 + if t in registry:
  2581 + return registry[t]
  2582 +
  2583 +
  2584 +def ensure_directory(path):
  2585 + """Ensure that the parent directory of `path` exists"""
  2586 + dirname = os.path.dirname(path)
  2587 + if not os.path.isdir(dirname):
  2588 + os.makedirs(dirname)
  2589 +
  2590 +def split_sections(s):
  2591 + """Split a string or iterable thereof into (section,content) pairs
  2592 +
  2593 + Each ``section`` is a stripped version of the section header ("[section]")
  2594 + and each ``content`` is a list of stripped lines excluding blank lines and
  2595 + comment-only lines. If there are any such lines before the first section
  2596 + header, they're returned in a first ``section`` of ``None``.
  2597 + """
  2598 + section = None
  2599 + content = []
  2600 + for line in yield_lines(s):
  2601 + if line.startswith("["):
  2602 + if line.endswith("]"):
  2603 + if section or content:
  2604 + yield section, content
  2605 + section = line[1:-1].strip()
  2606 + content = []
  2607 + else:
  2608 + raise ValueError("Invalid section heading", line)
  2609 + else:
  2610 + content.append(line)
  2611 +
  2612 + # wrap up last segment
  2613 + yield section, content
  2614 +
  2615 +def _mkstemp(*args,**kw):
  2616 + from tempfile import mkstemp
  2617 + old_open = os.open
  2618 + try:
  2619 + os.open = os_open # temporarily bypass sandboxing
  2620 + return mkstemp(*args,**kw)
  2621 + finally:
  2622 + os.open = old_open # and then put it back
  2623 +
  2624 +
  2625 +# Set up global resource manager (deliberately not state-saved)
  2626 +_manager = ResourceManager()
  2627 +def _initialize(g):
  2628 + for name in dir(_manager):
  2629 + if not name.startswith('_'):
  2630 + g[name] = getattr(_manager, name)
  2631 +_initialize(globals())
  2632 +
  2633 +# Prepare the master working set and make the ``require()`` API available
  2634 +_declare_state('object', working_set = WorkingSet())
  2635 +try:
  2636 + # Does the main program list any requirements?
  2637 + from __main__ import __requires__
  2638 +except ImportError:
  2639 + pass # No: just use the default working set based on sys.path
  2640 +else:
  2641 + # Yes: ensure the requirements are met, by prefixing sys.path if necessary
  2642 + try:
  2643 + working_set.require(__requires__)
  2644 + except VersionConflict: # try it without defaults already on sys.path
  2645 + working_set = WorkingSet([]) # by starting with an empty path
  2646 + for dist in working_set.resolve(
  2647 + parse_requirements(__requires__), Environment()
  2648 + ):
  2649 + working_set.add(dist)
  2650 + for entry in sys.path: # add any missing entries from sys.path
  2651 + if entry not in working_set.entries:
  2652 + working_set.add_entry(entry)
  2653 + sys.path[:] = working_set.entries # then copy back to sys.path
  2654 +
  2655 +require = working_set.require
  2656 +iter_entry_points = working_set.iter_entry_points
  2657 +add_activation_listener = working_set.subscribe
  2658 +run_script = working_set.run_script
  2659 +run_main = run_script # backward compatibility
  2660 +# Activate all distributions already on sys.path, and ensure that
  2661 +# all distributions added to the working set in the future (e.g. by
  2662 +# calling ``require()``) will get activated as well.
  2663 +add_activation_listener(lambda dist: dist.activate())
  2664 +working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
  2665 +
... ...