Initial commit
This commit is contained in:
228
venv/lib/python3.8/site-packages/setuptools/__init__.py
Normal file
228
venv/lib/python3.8/site-packages/setuptools/__init__.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import functools
|
||||
import distutils.core
|
||||
import distutils.filelist
|
||||
import re
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import convert_path
|
||||
from fnmatch import fnmatchcase
|
||||
|
||||
from ._deprecation_warning import SetuptoolsDeprecationWarning
|
||||
|
||||
from setuptools.extern.six import PY3, string_types
|
||||
from setuptools.extern.six.moves import filter, map
|
||||
|
||||
import setuptools.version
|
||||
from setuptools.extension import Extension
|
||||
from setuptools.dist import Distribution, Feature
|
||||
from setuptools.depends import Require
|
||||
from . import monkey
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
__all__ = [
|
||||
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
|
||||
'SetuptoolsDeprecationWarning',
|
||||
'find_packages'
|
||||
]
|
||||
|
||||
if PY3:
|
||||
__all__.append('find_namespace_packages')
|
||||
|
||||
__version__ = setuptools.version.__version__
|
||||
|
||||
bootstrap_install_from = None
|
||||
|
||||
# If we run 2to3 on .py files, should we also convert docstrings?
|
||||
# Default: yes; assume that we can detect doctests reliably
|
||||
run_2to3_on_doctests = True
|
||||
# Standard package names for fixer packages
|
||||
lib2to3_fixer_packages = ['lib2to3.fixes']
|
||||
|
||||
|
||||
class PackageFinder:
|
||||
"""
|
||||
Generate a list of all Python packages found within a directory
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def find(cls, where='.', exclude=(), include=('*',)):
|
||||
"""Return a list all Python packages found within directory 'where'
|
||||
|
||||
'where' is the root directory which will be searched for packages. It
|
||||
should be supplied as a "cross-platform" (i.e. URL-style) path; it will
|
||||
be converted to the appropriate local path syntax.
|
||||
|
||||
'exclude' is a sequence of package names to exclude; '*' can be used
|
||||
as a wildcard in the names, such that 'foo.*' will exclude all
|
||||
subpackages of 'foo' (but not 'foo' itself).
|
||||
|
||||
'include' is a sequence of package names to include. If it's
|
||||
specified, only the named packages will be included. If it's not
|
||||
specified, all found packages will be included. 'include' can contain
|
||||
shell style wildcard patterns just like 'exclude'.
|
||||
"""
|
||||
|
||||
return list(cls._find_packages_iter(
|
||||
convert_path(where),
|
||||
cls._build_filter('ez_setup', '*__pycache__', *exclude),
|
||||
cls._build_filter(*include)))
|
||||
|
||||
@classmethod
|
||||
def _find_packages_iter(cls, where, exclude, include):
|
||||
"""
|
||||
All the packages found in 'where' that pass the 'include' filter, but
|
||||
not the 'exclude' filter.
|
||||
"""
|
||||
for root, dirs, files in os.walk(where, followlinks=True):
|
||||
# Copy dirs to iterate over it, then empty dirs.
|
||||
all_dirs = dirs[:]
|
||||
dirs[:] = []
|
||||
|
||||
for dir in all_dirs:
|
||||
full_path = os.path.join(root, dir)
|
||||
rel_path = os.path.relpath(full_path, where)
|
||||
package = rel_path.replace(os.path.sep, '.')
|
||||
|
||||
# Skip directory trees that are not valid packages
|
||||
if ('.' in dir or not cls._looks_like_package(full_path)):
|
||||
continue
|
||||
|
||||
# Should this package be included?
|
||||
if include(package) and not exclude(package):
|
||||
yield package
|
||||
|
||||
# Keep searching subdirectories, as there may be more packages
|
||||
# down there, even if the parent was excluded.
|
||||
dirs.append(dir)
|
||||
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
"""Does a directory look like a package?"""
|
||||
return os.path.isfile(os.path.join(path, '__init__.py'))
|
||||
|
||||
@staticmethod
|
||||
def _build_filter(*patterns):
|
||||
"""
|
||||
Given a list of patterns, return a callable that will be true only if
|
||||
the input matches at least one of the patterns.
|
||||
"""
|
||||
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
|
||||
|
||||
|
||||
class PEP420PackageFinder(PackageFinder):
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
return True
|
||||
|
||||
|
||||
find_packages = PackageFinder.find
|
||||
|
||||
if PY3:
|
||||
find_namespace_packages = PEP420PackageFinder.find
|
||||
|
||||
|
||||
def _install_setup_requires(attrs):
|
||||
# Note: do not use `setuptools.Distribution` directly, as
|
||||
# our PEP 517 backend patch `distutils.core.Distribution`.
|
||||
dist = distutils.core.Distribution(dict(
|
||||
(k, v) for k, v in attrs.items()
|
||||
if k in ('dependency_links', 'setup_requires')
|
||||
))
|
||||
# Honor setup.cfg's options.
|
||||
dist.parse_config_files(ignore_option_errors=True)
|
||||
if dist.setup_requires:
|
||||
dist.fetch_build_eggs(dist.setup_requires)
|
||||
|
||||
|
||||
def setup(**attrs):
|
||||
# Make sure we have any requirements needed to interpret 'attrs'.
|
||||
_install_setup_requires(attrs)
|
||||
return distutils.core.setup(**attrs)
|
||||
|
||||
setup.__doc__ = distutils.core.setup.__doc__
|
||||
|
||||
|
||||
_Command = monkey.get_unpatched(distutils.core.Command)
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
__doc__ = _Command.__doc__
|
||||
|
||||
command_consumes_arguments = False
|
||||
|
||||
def __init__(self, dist, **kw):
|
||||
"""
|
||||
Construct the command for dist, updating
|
||||
vars(self) with any keyword parameters.
|
||||
"""
|
||||
_Command.__init__(self, dist)
|
||||
vars(self).update(kw)
|
||||
|
||||
def _ensure_stringlike(self, option, what, default=None):
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
setattr(self, option, default)
|
||||
return default
|
||||
elif not isinstance(val, string_types):
|
||||
raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
|
||||
% (option, what, val))
|
||||
return val
|
||||
|
||||
def ensure_string_list(self, option):
|
||||
r"""Ensure that 'option' is a list of strings. If 'option' is
|
||||
currently a string, we split it either on /,\s*/ or /\s+/, so
|
||||
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
||||
["foo", "bar", "baz"].
|
||||
"""
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
return
|
||||
elif isinstance(val, string_types):
|
||||
setattr(self, option, re.split(r',\s*|\s+', val))
|
||||
else:
|
||||
if isinstance(val, list):
|
||||
ok = all(isinstance(v, string_types) for v in val)
|
||||
else:
|
||||
ok = False
|
||||
if not ok:
|
||||
raise DistutilsOptionError(
|
||||
"'%s' must be a list of strings (got %r)"
|
||||
% (option, val))
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
vars(cmd).update(kw)
|
||||
return cmd
|
||||
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
Find all files under 'path'
|
||||
"""
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in os.walk(path, followlinks=True)
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
# Apply monkey patches
|
||||
monkey.patch_all()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,7 @@
|
||||
class SetuptoolsDeprecationWarning(Warning):
|
||||
"""
|
||||
Base class for warning deprecations in ``setuptools``
|
||||
|
||||
This class is not derived from ``DeprecationWarning``, and as such is
|
||||
visible by default.
|
||||
"""
|
||||
73
venv/lib/python3.8/site-packages/setuptools/_imp.py
Normal file
73
venv/lib/python3.8/site-packages/setuptools/_imp.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Re-implementation of find_module and get_frozen_object
|
||||
from the deprecated imp module.
|
||||
"""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import importlib.machinery
|
||||
|
||||
from .py34compat import module_from_spec
|
||||
|
||||
|
||||
PY_SOURCE = 1
|
||||
PY_COMPILED = 2
|
||||
C_EXTENSION = 3
|
||||
C_BUILTIN = 6
|
||||
PY_FROZEN = 7
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if spec is None:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
|
||||
spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
|
||||
|
||||
kind = -1
|
||||
file = None
|
||||
static = isinstance(spec.loader, type)
|
||||
if spec.origin == 'frozen' or static and issubclass(
|
||||
spec.loader, importlib.machinery.FrozenImporter):
|
||||
kind = PY_FROZEN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compability
|
||||
elif spec.origin == 'built-in' or static and issubclass(
|
||||
spec.loader, importlib.machinery.BuiltinImporter):
|
||||
kind = C_BUILTIN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compability
|
||||
elif spec.has_location:
|
||||
path = spec.origin
|
||||
suffix = os.path.splitext(path)[1]
|
||||
mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
|
||||
|
||||
if suffix in importlib.machinery.SOURCE_SUFFIXES:
|
||||
kind = PY_SOURCE
|
||||
elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
|
||||
kind = PY_COMPILED
|
||||
elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
|
||||
kind = C_EXTENSION
|
||||
|
||||
if kind in {PY_SOURCE, PY_COMPILED}:
|
||||
file = open(path, mode)
|
||||
else:
|
||||
path = None
|
||||
suffix = mode = ''
|
||||
|
||||
return file, path, (suffix, mode, kind)
|
||||
|
||||
|
||||
def get_frozen_object(module, paths=None):
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return spec.loader.get_code(module)
|
||||
|
||||
|
||||
def get_module(module, paths, info):
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return module_from_spec(spec)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,488 @@
|
||||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that every
|
||||
entry has an index that can be looked up.
|
||||
|
||||
Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
|
||||
and released under the MIT license.
|
||||
"""
|
||||
import itertools as it
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from collections.abc import MutableSet, Sequence
|
||||
except ImportError:
|
||||
# Python 2.7
|
||||
from collections import MutableSet, Sequence
|
||||
|
||||
SLICE_ALL = slice(None)
|
||||
__version__ = "3.1"
|
||||
|
||||
|
||||
def is_iterable(obj):
|
||||
"""
|
||||
Are we being asked to look up a list of things, instead of a single thing?
|
||||
We check for the `__iter__` attribute so that this can cover types that
|
||||
don't have to be known by this module, such as NumPy arrays.
|
||||
|
||||
Strings, however, should be considered as atomic values to look up, not
|
||||
iterables. The same goes for tuples, since they are immutable and therefore
|
||||
valid entries.
|
||||
|
||||
We don't need to check for the Python 2 `unicode` type, because it doesn't
|
||||
have an `__iter__` attribute anyway.
|
||||
"""
|
||||
return (
|
||||
hasattr(obj, "__iter__")
|
||||
and not isinstance(obj, str)
|
||||
and not isinstance(obj, tuple)
|
||||
)
|
||||
|
||||
|
||||
class OrderedSet(MutableSet, Sequence):
|
||||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that
|
||||
every entry has an index that can be looked up.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 1, 2, 3, 2])
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self.items = []
|
||||
self.map = {}
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of unique elements in the ordered set
|
||||
|
||||
Example:
|
||||
>>> len(OrderedSet([]))
|
||||
0
|
||||
>>> len(OrderedSet([1, 2]))
|
||||
2
|
||||
"""
|
||||
return len(self.items)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Get the item at a given index.
|
||||
|
||||
If `index` is a slice, you will get back that slice of items, as a
|
||||
new OrderedSet.
|
||||
|
||||
If `index` is a list or a similar iterable, you'll get a list of
|
||||
items corresponding to those indices. This is similar to NumPy's
|
||||
"fancy indexing". The result is not an OrderedSet because you may ask
|
||||
for duplicate indices, and the number of elements returned should be
|
||||
the number of elements asked for.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset[1]
|
||||
2
|
||||
"""
|
||||
if isinstance(index, slice) and index == SLICE_ALL:
|
||||
return self.copy()
|
||||
elif is_iterable(index):
|
||||
return [self.items[i] for i in index]
|
||||
elif hasattr(index, "__index__") or isinstance(index, slice):
|
||||
result = self.items[index]
|
||||
if isinstance(result, list):
|
||||
return self.__class__(result)
|
||||
else:
|
||||
return result
|
||||
else:
|
||||
raise TypeError("Don't know how to index an OrderedSet by %r" % index)
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
Return a shallow copy of this object.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> other = this.copy()
|
||||
>>> this == other
|
||||
True
|
||||
>>> this is other
|
||||
False
|
||||
"""
|
||||
return self.__class__(self)
|
||||
|
||||
def __getstate__(self):
|
||||
if len(self) == 0:
|
||||
# The state can't be an empty list.
|
||||
# We need to return a truthy value, or else __setstate__ won't be run.
|
||||
#
|
||||
# This could have been done more gracefully by always putting the state
|
||||
# in a tuple, but this way is backwards- and forwards- compatible with
|
||||
# previous versions of OrderedSet.
|
||||
return (None,)
|
||||
else:
|
||||
return list(self)
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state == (None,):
|
||||
self.__init__([])
|
||||
else:
|
||||
self.__init__(state)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""
|
||||
Test if the item is in this ordered set
|
||||
|
||||
Example:
|
||||
>>> 1 in OrderedSet([1, 3, 2])
|
||||
True
|
||||
>>> 5 in OrderedSet([1, 3, 2])
|
||||
False
|
||||
"""
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
"""
|
||||
Add `key` as an item to this OrderedSet, then return its index.
|
||||
|
||||
If `key` is already in the OrderedSet, return the index it already
|
||||
had.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet()
|
||||
>>> oset.append(3)
|
||||
0
|
||||
>>> print(oset)
|
||||
OrderedSet([3])
|
||||
"""
|
||||
if key not in self.map:
|
||||
self.map[key] = len(self.items)
|
||||
self.items.append(key)
|
||||
return self.map[key]
|
||||
|
||||
append = add
|
||||
|
||||
def update(self, sequence):
|
||||
"""
|
||||
Update the set with the given iterable sequence, then return the index
|
||||
of the last element inserted.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.update([3, 1, 5, 1, 4])
|
||||
4
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3, 5, 4])
|
||||
"""
|
||||
item_index = None
|
||||
try:
|
||||
for item in sequence:
|
||||
item_index = self.add(item)
|
||||
except TypeError:
|
||||
raise ValueError(
|
||||
"Argument needs to be an iterable, got %s" % type(sequence)
|
||||
)
|
||||
return item_index
|
||||
|
||||
def index(self, key):
|
||||
"""
|
||||
Get the index of a given entry, raising an IndexError if it's not
|
||||
present.
|
||||
|
||||
`key` can be an iterable of entries that is not a string, in which case
|
||||
this returns a list of indices.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.index(2)
|
||||
1
|
||||
"""
|
||||
if is_iterable(key):
|
||||
return [self.index(subkey) for subkey in key]
|
||||
return self.map[key]
|
||||
|
||||
# Provide some compatibility with pd.Index
|
||||
get_loc = index
|
||||
get_indexer = index
|
||||
|
||||
def pop(self):
|
||||
"""
|
||||
Remove and return the last element from the set.
|
||||
|
||||
Raises KeyError if the set is empty.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.pop()
|
||||
3
|
||||
"""
|
||||
if not self.items:
|
||||
raise KeyError("Set is empty")
|
||||
|
||||
elem = self.items[-1]
|
||||
del self.items[-1]
|
||||
del self.map[elem]
|
||||
return elem
|
||||
|
||||
def discard(self, key):
|
||||
"""
|
||||
Remove an element. Do not raise an exception if absent.
|
||||
|
||||
The MutableSet mixin uses this to implement the .remove() method, which
|
||||
*does* raise an error when asked to remove a non-existent item.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
"""
|
||||
if key in self:
|
||||
i = self.map[key]
|
||||
del self.items[i]
|
||||
del self.map[key]
|
||||
for k, v in self.map.items():
|
||||
if v >= i:
|
||||
self.map[k] = v - 1
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Remove all items from this OrderedSet.
|
||||
"""
|
||||
del self.items[:]
|
||||
self.map.clear()
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(iter(OrderedSet([1, 2, 3])))
|
||||
[1, 2, 3]
|
||||
"""
|
||||
return iter(self.items)
|
||||
|
||||
def __reversed__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(reversed(OrderedSet([1, 2, 3])))
|
||||
[3, 2, 1]
|
||||
"""
|
||||
return reversed(self.items)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return "%s()" % (self.__class__.__name__,)
|
||||
return "%s(%r)" % (self.__class__.__name__, list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Returns true if the containers have the same items. If `other` is a
|
||||
Sequence, then order is checked, otherwise it is ignored.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 3, 2])
|
||||
>>> oset == [1, 3, 2]
|
||||
True
|
||||
>>> oset == [1, 2, 3]
|
||||
False
|
||||
>>> oset == [2, 3]
|
||||
False
|
||||
>>> oset == OrderedSet([3, 2, 1])
|
||||
False
|
||||
"""
|
||||
# In Python 2 deque is not a Sequence, so treat it as one for
|
||||
# consistent behavior with Python 3.
|
||||
if isinstance(other, (Sequence, deque)):
|
||||
# Check that this OrderedSet contains the same elements, in the
|
||||
# same order, as the other object.
|
||||
return list(self) == list(other)
|
||||
try:
|
||||
other_as_set = set(other)
|
||||
except TypeError:
|
||||
# If `other` can't be converted into a set, it's not equal.
|
||||
return False
|
||||
else:
|
||||
return set(self) == other_as_set
|
||||
|
||||
def union(self, *sets):
|
||||
"""
|
||||
Combines all unique items.
|
||||
Each items order is defined by its first appearance.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
|
||||
>>> print(oset)
|
||||
OrderedSet([3, 1, 4, 5, 2, 0])
|
||||
>>> oset.union([8, 9])
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
|
||||
>>> oset | {10}
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 10])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
containers = map(list, it.chain([self], sets))
|
||||
items = it.chain.from_iterable(containers)
|
||||
return cls(items)
|
||||
|
||||
def __and__(self, other):
|
||||
# the parent implementation of this is backwards
|
||||
return self.intersection(other)
|
||||
|
||||
def intersection(self, *sets):
|
||||
"""
|
||||
Returns elements in common between all sets. Order is defined only
|
||||
by the first set.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3])
|
||||
>>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
|
||||
OrderedSet([2])
|
||||
>>> oset.intersection()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
if sets:
|
||||
common = set.intersection(*map(set, sets))
|
||||
items = (item for item in self if item in common)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def difference(self, *sets):
|
||||
"""
|
||||
Returns all elements that are in this set but not the others.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
|
||||
OrderedSet([1])
|
||||
>>> OrderedSet([1, 2, 3]) - OrderedSet([2])
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__
|
||||
if sets:
|
||||
other = set.union(*map(set, sets))
|
||||
items = (item for item in self if item not in other)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def issubset(self, other):
|
||||
"""
|
||||
Report whether another set contains this set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2})
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
|
||||
True
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
|
||||
False
|
||||
"""
|
||||
if len(self) > len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in other for item in self)
|
||||
|
||||
def issuperset(self, other):
|
||||
"""
|
||||
Report whether this set contains another set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2]).issuperset([1, 2, 3])
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
|
||||
True
|
||||
>>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
|
||||
False
|
||||
"""
|
||||
if len(self) < len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in self for item in other)
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
"""
|
||||
Return the symmetric difference of two OrderedSets as a new set.
|
||||
That is, the new set will contain all elements that are in exactly
|
||||
one of the sets.
|
||||
|
||||
Their order will be preserved, with elements from `self` preceding
|
||||
elements from `other`.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference(other)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
diff1 = cls(self).difference(other)
|
||||
diff2 = cls(other).difference(self)
|
||||
return diff1.union(diff2)
|
||||
|
||||
def _update_items(self, items):
|
||||
"""
|
||||
Replace the 'items' list of this OrderedSet with a new one, updating
|
||||
self.map accordingly.
|
||||
"""
|
||||
self.items = items
|
||||
self.map = {item: idx for (idx, item) in enumerate(items)}
|
||||
|
||||
def difference_update(self, *sets):
|
||||
"""
|
||||
Update this OrderedSet to remove items from one or more other sets.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> this.difference_update(OrderedSet([2, 4]))
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3])
|
||||
|
||||
>>> this = OrderedSet([1, 2, 3, 4, 5])
|
||||
>>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
|
||||
>>> print(this)
|
||||
OrderedSet([3, 5])
|
||||
"""
|
||||
items_to_remove = set()
|
||||
for other in sets:
|
||||
items_to_remove |= set(other)
|
||||
self._update_items([item for item in self.items if item not in items_to_remove])
|
||||
|
||||
def intersection_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to keep only items in another set, preserving
|
||||
their order in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.intersection_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3, 7])
|
||||
"""
|
||||
other = set(other)
|
||||
self._update_items([item for item in self.items if item in other])
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to remove items from another set, then
|
||||
add items from the other set that were not present in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
items_to_add = [item for item in other if item not in self]
|
||||
items_to_remove = set(other)
|
||||
self._update_items(
|
||||
[item for item in self.items if item not in items_to_remove] + items_to_add
|
||||
)
|
||||
@@ -0,0 +1,27 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "19.2"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD or Apache License, Version 2.0"
|
||||
__copyright__ = "Copyright 2014-2019 %s" % __author__
|
||||
@@ -0,0 +1,26 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from .__about__ import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
__version__,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,31 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
# flake8: noqa
|
||||
|
||||
if PY3:
|
||||
string_types = (str,)
|
||||
else:
|
||||
string_types = (basestring,)
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""
|
||||
Create a base class with a metaclass.
|
||||
"""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
return type.__new__(metaclass, "temporary_class", (), {})
|
||||
@@ -0,0 +1,68 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class Infinity(object):
|
||||
def __repr__(self):
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = Infinity()
|
||||
|
||||
|
||||
class NegativeInfinity(object):
|
||||
def __repr__(self):
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinity()
|
||||
@@ -0,0 +1,296 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import operator
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
|
||||
from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
|
||||
from setuptools.extern.pyparsing import Literal as L # noqa
|
||||
|
||||
from ._compat import string_types
|
||||
from .specifiers import Specifier, InvalidSpecifier
|
||||
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
|
||||
class InvalidMarker(ValueError):
|
||||
"""
|
||||
An invalid marker was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedComparison(ValueError):
|
||||
"""
|
||||
An invalid operation was attempted on a value that doesn't support it.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedEnvironmentName(ValueError):
|
||||
"""
|
||||
A name was attempted to be used that does not exist inside of the
|
||||
environment.
|
||||
"""
|
||||
|
||||
|
||||
class Node(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
|
||||
|
||||
def serialize(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Variable(Node):
|
||||
def serialize(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
def serialize(self):
|
||||
return '"{0}"'.format(self)
|
||||
|
||||
|
||||
class Op(Node):
|
||||
def serialize(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
VARIABLE = (
|
||||
L("implementation_version")
|
||||
| L("platform_python_implementation")
|
||||
| L("implementation_name")
|
||||
| L("python_full_version")
|
||||
| L("platform_release")
|
||||
| L("platform_version")
|
||||
| L("platform_machine")
|
||||
| L("platform_system")
|
||||
| L("python_version")
|
||||
| L("sys_platform")
|
||||
| L("os_name")
|
||||
| L("os.name")
|
||||
| L("sys.platform") # PEP-345
|
||||
| L("platform.version") # PEP-345
|
||||
| L("platform.machine") # PEP-345
|
||||
| L("platform.python_implementation") # PEP-345
|
||||
| L("python_implementation") # PEP-345
|
||||
| L("extra") # undocumented setuptools legacy
|
||||
)
|
||||
ALIASES = {
|
||||
"os.name": "os_name",
|
||||
"sys.platform": "sys_platform",
|
||||
"platform.version": "platform_version",
|
||||
"platform.machine": "platform_machine",
|
||||
"platform.python_implementation": "platform_python_implementation",
|
||||
"python_implementation": "platform_python_implementation",
|
||||
}
|
||||
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
|
||||
|
||||
VERSION_CMP = (
|
||||
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
|
||||
)
|
||||
|
||||
MARKER_OP = VERSION_CMP | L("not in") | L("in")
|
||||
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
|
||||
|
||||
MARKER_VALUE = QuotedString("'") | QuotedString('"')
|
||||
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
|
||||
|
||||
BOOLOP = L("and") | L("or")
|
||||
|
||||
MARKER_VAR = VARIABLE | MARKER_VALUE
|
||||
|
||||
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
|
||||
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
|
||||
|
||||
LPAREN = L("(").suppress()
|
||||
RPAREN = L(")").suppress()
|
||||
|
||||
MARKER_EXPR = Forward()
|
||||
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
|
||||
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
|
||||
|
||||
MARKER = stringStart + MARKER_EXPR + stringEnd
|
||||
|
||||
|
||||
def _coerce_parse_result(results):
|
||||
if isinstance(results, ParseResults):
|
||||
return [_coerce_parse_result(i) for i in results]
|
||||
else:
|
||||
return results
|
||||
|
||||
|
||||
def _format_marker(marker, first=True):
|
||||
assert isinstance(marker, (list, tuple, string_types))
|
||||
|
||||
# Sometimes we have a structure like [[...]] which is a single item list
|
||||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
inner = (_format_marker(m, first=False) for m in marker)
|
||||
if first:
|
||||
return " ".join(inner)
|
||||
else:
|
||||
return "(" + " ".join(inner) + ")"
|
||||
elif isinstance(marker, tuple):
|
||||
return " ".join([m.serialize() for m in marker])
|
||||
else:
|
||||
return marker
|
||||
|
||||
|
||||
_operators = {
|
||||
"in": lambda lhs, rhs: lhs in rhs,
|
||||
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||
"<": operator.lt,
|
||||
"<=": operator.le,
|
||||
"==": operator.eq,
|
||||
"!=": operator.ne,
|
||||
">=": operator.ge,
|
||||
">": operator.gt,
|
||||
}
|
||||
|
||||
|
||||
def _eval_op(lhs, op, rhs):
|
||||
try:
|
||||
spec = Specifier("".join([op.serialize(), rhs]))
|
||||
except InvalidSpecifier:
|
||||
pass
|
||||
else:
|
||||
return spec.contains(lhs)
|
||||
|
||||
oper = _operators.get(op.serialize())
|
||||
if oper is None:
|
||||
raise UndefinedComparison(
|
||||
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
|
||||
)
|
||||
|
||||
return oper(lhs, rhs)
|
||||
|
||||
|
||||
_undefined = object()
|
||||
|
||||
|
||||
def _get_env(environment, name):
|
||||
value = environment.get(name, _undefined)
|
||||
|
||||
if value is _undefined:
|
||||
raise UndefinedEnvironmentName(
|
||||
"{0!r} does not exist in evaluation environment.".format(name)
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def _evaluate_markers(markers, environment):
|
||||
groups = [[]]
|
||||
|
||||
for marker in markers:
|
||||
assert isinstance(marker, (list, tuple, string_types))
|
||||
|
||||
if isinstance(marker, list):
|
||||
groups[-1].append(_evaluate_markers(marker, environment))
|
||||
elif isinstance(marker, tuple):
|
||||
lhs, op, rhs = marker
|
||||
|
||||
if isinstance(lhs, Variable):
|
||||
lhs_value = _get_env(environment, lhs.value)
|
||||
rhs_value = rhs.value
|
||||
else:
|
||||
lhs_value = lhs.value
|
||||
rhs_value = _get_env(environment, rhs.value)
|
||||
|
||||
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||
else:
|
||||
assert marker in ["and", "or"]
|
||||
if marker == "or":
|
||||
groups.append([])
|
||||
|
||||
return any(all(item) for item in groups)
|
||||
|
||||
|
||||
def format_full_version(info):
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment():
|
||||
if hasattr(sys, "implementation"):
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
else:
|
||||
iver = "0"
|
||||
implementation_name = ""
|
||||
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": iver,
|
||||
"os_name": os.name,
|
||||
"platform_machine": platform.machine(),
|
||||
"platform_release": platform.release(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker(object):
|
||||
def __init__(self, marker):
|
||||
try:
|
||||
self._markers = _coerce_parse_result(MARKER.parseString(marker))
|
||||
except ParseException as e:
|
||||
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
|
||||
marker, marker[e.loc : e.loc + 8]
|
||||
)
|
||||
raise InvalidMarker(err_str)
|
||||
|
||||
def __str__(self):
|
||||
return _format_marker(self._markers)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Marker({0!r})>".format(str(self))
|
||||
|
||||
def evaluate(self, environment=None):
|
||||
"""Evaluate a marker.
|
||||
|
||||
Return the boolean from evaluating the given marker against the
|
||||
environment. environment is an optional argument to override all or
|
||||
part of the determined environment.
|
||||
|
||||
The environment is determined from the current Python process.
|
||||
"""
|
||||
current_environment = default_environment()
|
||||
if environment is not None:
|
||||
current_environment.update(environment)
|
||||
|
||||
return _evaluate_markers(self._markers, current_environment)
|
||||
@@ -0,0 +1,138 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import string
|
||||
import re
|
||||
|
||||
from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
|
||||
from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
|
||||
from setuptools.extern.pyparsing import Literal as L # noqa
|
||||
from setuptools.extern.six.moves.urllib import parse as urlparse
|
||||
|
||||
from .markers import MARKER_EXPR, Marker
|
||||
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
"""
|
||||
An invalid requirement was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
ALPHANUM = Word(string.ascii_letters + string.digits)
|
||||
|
||||
LBRACKET = L("[").suppress()
|
||||
RBRACKET = L("]").suppress()
|
||||
LPAREN = L("(").suppress()
|
||||
RPAREN = L(")").suppress()
|
||||
COMMA = L(",").suppress()
|
||||
SEMICOLON = L(";").suppress()
|
||||
AT = L("@").suppress()
|
||||
|
||||
PUNCTUATION = Word("-_.")
|
||||
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
|
||||
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
|
||||
|
||||
NAME = IDENTIFIER("name")
|
||||
EXTRA = IDENTIFIER
|
||||
|
||||
URI = Regex(r"[^ ]+")("url")
|
||||
URL = AT + URI
|
||||
|
||||
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
|
||||
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
|
||||
|
||||
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
|
||||
VERSION_MANY = Combine(
|
||||
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
|
||||
)("_raw_spec")
|
||||
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
|
||||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
|
||||
|
||||
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
|
||||
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
|
||||
|
||||
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
|
||||
MARKER_EXPR.setParseAction(
|
||||
lambda s, l, t: Marker(s[t._original_start : t._original_end])
|
||||
)
|
||||
MARKER_SEPARATOR = SEMICOLON
|
||||
MARKER = MARKER_SEPARATOR + MARKER_EXPR
|
||||
|
||||
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
|
||||
URL_AND_MARKER = URL + Optional(MARKER)
|
||||
|
||||
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
|
||||
|
||||
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
|
||||
# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
|
||||
# issue #104
|
||||
REQUIREMENT.parseString("x[]")
|
||||
|
||||
|
||||
class Requirement(object):
|
||||
"""Parse a requirement.
|
||||
|
||||
Parse a given requirement string into its parts, such as name, specifier,
|
||||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||
string.
|
||||
"""
|
||||
|
||||
# TODO: Can we test whether something is contained within a requirement?
|
||||
# If so how do we do that? Do we need to test against the _name_ of
|
||||
# the thing as well as the version? What about the markers?
|
||||
# TODO: Can we normalize the name and extra name?
|
||||
|
||||
def __init__(self, requirement_string):
|
||||
try:
|
||||
req = REQUIREMENT.parseString(requirement_string)
|
||||
except ParseException as e:
|
||||
raise InvalidRequirement(
|
||||
'Parse error at "{0!r}": {1}'.format(
|
||||
requirement_string[e.loc : e.loc + 8], e.msg
|
||||
)
|
||||
)
|
||||
|
||||
self.name = req.name
|
||||
if req.url:
|
||||
parsed_url = urlparse.urlparse(req.url)
|
||||
if parsed_url.scheme == "file":
|
||||
if urlparse.urlunparse(parsed_url) != req.url:
|
||||
raise InvalidRequirement("Invalid URL given")
|
||||
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
||||
not parsed_url.scheme and not parsed_url.netloc
|
||||
):
|
||||
raise InvalidRequirement("Invalid URL: {0}".format(req.url))
|
||||
self.url = req.url
|
||||
else:
|
||||
self.url = None
|
||||
self.extras = set(req.extras.asList() if req.extras else [])
|
||||
self.specifier = SpecifierSet(req.specifier)
|
||||
self.marker = req.marker if req.marker else None
|
||||
|
||||
def __str__(self):
|
||||
parts = [self.name]
|
||||
|
||||
if self.extras:
|
||||
parts.append("[{0}]".format(",".join(sorted(self.extras))))
|
||||
|
||||
if self.specifier:
|
||||
parts.append(str(self.specifier))
|
||||
|
||||
if self.url:
|
||||
parts.append("@ {0}".format(self.url))
|
||||
if self.marker:
|
||||
parts.append(" ")
|
||||
|
||||
if self.marker:
|
||||
parts.append("; {0}".format(self.marker))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Requirement({0!r})>".format(str(self))
|
||||
@@ -0,0 +1,749 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from ._compat import string_types, with_metaclass
|
||||
from .version import Version, LegacyVersion, parse
|
||||
|
||||
|
||||
class InvalidSpecifier(ValueError):
|
||||
"""
|
||||
An invalid specifier was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __str__(self):
|
||||
"""
|
||||
Returns the str representation of this Specifier like object. This
|
||||
should be representative of the Specifier itself.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __hash__(self):
|
||||
"""
|
||||
Returns a hash value for this Specifier like object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Returns a boolean representing whether or not the two Specifier like
|
||||
objects are equal.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Returns a boolean representing whether or not the two Specifier like
|
||||
objects are not equal.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def prereleases(self):
|
||||
"""
|
||||
Returns whether or not pre-releases as a whole are allowed by this
|
||||
specifier.
|
||||
"""
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value):
|
||||
"""
|
||||
Sets whether or not pre-releases as a whole are allowed by this
|
||||
specifier.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def contains(self, item, prereleases=None):
|
||||
"""
|
||||
Determines if the given item is contained within this specifier.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def filter(self, iterable, prereleases=None):
|
||||
"""
|
||||
Takes an iterable of items and filters them so that only items which
|
||||
are contained within this specifier are allowed in it.
|
||||
"""
|
||||
|
||||
|
||||
class _IndividualSpecifier(BaseSpecifier):
|
||||
|
||||
_operators = {}
|
||||
|
||||
def __init__(self, spec="", prereleases=None):
|
||||
match = self._regex.search(spec)
|
||||
if not match:
|
||||
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
|
||||
|
||||
self._spec = (match.group("operator").strip(), match.group("version").strip())
|
||||
|
||||
# Store whether or not this Specifier should accept prereleases
|
||||
self._prereleases = prereleases
|
||||
|
||||
def __repr__(self):
|
||||
pre = (
|
||||
", prereleases={0!r}".format(self.prereleases)
|
||||
if self._prereleases is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
|
||||
|
||||
def __str__(self):
|
||||
return "{0}{1}".format(*self._spec)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._spec)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
try:
|
||||
other = self.__class__(other)
|
||||
except InvalidSpecifier:
|
||||
return NotImplemented
|
||||
elif not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
return self._spec == other._spec
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
try:
|
||||
other = self.__class__(other)
|
||||
except InvalidSpecifier:
|
||||
return NotImplemented
|
||||
elif not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
return self._spec != other._spec
|
||||
|
||||
def _get_operator(self, op):
|
||||
return getattr(self, "_compare_{0}".format(self._operators[op]))
|
||||
|
||||
def _coerce_version(self, version):
|
||||
if not isinstance(version, (LegacyVersion, Version)):
|
||||
version = parse(version)
|
||||
return version
|
||||
|
||||
@property
|
||||
def operator(self):
|
||||
return self._spec[0]
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._spec[1]
|
||||
|
||||
@property
|
||||
def prereleases(self):
|
||||
return self._prereleases
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value):
|
||||
self._prereleases = value
|
||||
|
||||
def __contains__(self, item):
|
||||
return self.contains(item)
|
||||
|
||||
def contains(self, item, prereleases=None):
|
||||
# Determine if prereleases are to be allowed or not.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# Normalize item to a Version or LegacyVersion, this allows us to have
|
||||
# a shortcut for ``"2.0" in Specifier(">=2")
|
||||
item = self._coerce_version(item)
|
||||
|
||||
# Determine if we should be supporting prereleases in this specifier
|
||||
# or not, if we do not support prereleases than we can short circuit
|
||||
# logic if this version is a prereleases.
|
||||
if item.is_prerelease and not prereleases:
|
||||
return False
|
||||
|
||||
# Actually do the comparison to determine if this item is contained
|
||||
# within this Specifier or not.
|
||||
return self._get_operator(self.operator)(item, self.version)
|
||||
|
||||
def filter(self, iterable, prereleases=None):
|
||||
yielded = False
|
||||
found_prereleases = []
|
||||
|
||||
kw = {"prereleases": prereleases if prereleases is not None else True}
|
||||
|
||||
# Attempt to iterate over all the values in the iterable and if any of
|
||||
# them match, yield them.
|
||||
for version in iterable:
|
||||
parsed_version = self._coerce_version(version)
|
||||
|
||||
if self.contains(parsed_version, **kw):
|
||||
# If our version is a prerelease, and we were not set to allow
|
||||
# prereleases, then we'll store it for later incase nothing
|
||||
# else matches this specifier.
|
||||
if parsed_version.is_prerelease and not (
|
||||
prereleases or self.prereleases
|
||||
):
|
||||
found_prereleases.append(version)
|
||||
# Either this is not a prerelease, or we should have been
|
||||
# accepting prereleases from the beginning.
|
||||
else:
|
||||
yielded = True
|
||||
yield version
|
||||
|
||||
# Now that we've iterated over everything, determine if we've yielded
|
||||
# any values, and if we have not and we have any prereleases stored up
|
||||
# then we will go ahead and yield the prereleases.
|
||||
if not yielded and found_prereleases:
|
||||
for version in found_prereleases:
|
||||
yield version
|
||||
|
||||
|
||||
class LegacySpecifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = r"""
|
||||
(?P<operator>(==|!=|<=|>=|<|>))
|
||||
\s*
|
||||
(?P<version>
|
||||
[^,;\s)]* # Since this is a "legacy" specifier, and the version
|
||||
# string can be just about anything, we match everything
|
||||
# except for whitespace, a semi-colon for marker support,
|
||||
# a closing paren since versions can be enclosed in
|
||||
# them, and a comma since it's a version separator.
|
||||
)
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"==": "equal",
|
||||
"!=": "not_equal",
|
||||
"<=": "less_than_equal",
|
||||
">=": "greater_than_equal",
|
||||
"<": "less_than",
|
||||
">": "greater_than",
|
||||
}
|
||||
|
||||
def _coerce_version(self, version):
|
||||
if not isinstance(version, LegacyVersion):
|
||||
version = LegacyVersion(str(version))
|
||||
return version
|
||||
|
||||
def _compare_equal(self, prospective, spec):
|
||||
return prospective == self._coerce_version(spec)
|
||||
|
||||
def _compare_not_equal(self, prospective, spec):
|
||||
return prospective != self._coerce_version(spec)
|
||||
|
||||
def _compare_less_than_equal(self, prospective, spec):
|
||||
return prospective <= self._coerce_version(spec)
|
||||
|
||||
def _compare_greater_than_equal(self, prospective, spec):
|
||||
return prospective >= self._coerce_version(spec)
|
||||
|
||||
def _compare_less_than(self, prospective, spec):
|
||||
return prospective < self._coerce_version(spec)
|
||||
|
||||
def _compare_greater_than(self, prospective, spec):
|
||||
return prospective > self._coerce_version(spec)
|
||||
|
||||
|
||||
def _require_version_compare(fn):
|
||||
@functools.wraps(fn)
|
||||
def wrapped(self, prospective, spec):
|
||||
if not isinstance(prospective, Version):
|
||||
return False
|
||||
return fn(self, prospective, spec)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class Specifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = r"""
|
||||
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
||||
(?P<version>
|
||||
(?:
|
||||
# The identity operators allow for an escape hatch that will
|
||||
# do an exact string match of the version you wish to install.
|
||||
# This will not be parsed by PEP 440 and we cannot determine
|
||||
# any semantic meaning from it. This operator is discouraged
|
||||
# but included entirely as an escape hatch.
|
||||
(?<====) # Only match for the identity operator
|
||||
\s*
|
||||
[^\s]* # We just match everything, except for whitespace
|
||||
# since we are only testing for strict identity.
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The (non)equality operators allow for wild card and local
|
||||
# versions to be specified so we have to define these two
|
||||
# operators separately to enable that.
|
||||
(?<===|!=) # Only match for equals and not equals
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
|
||||
# You cannot use a wild card and a dev or local version
|
||||
# together so group them with a | and make them optional.
|
||||
(?:
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
||||
|
|
||||
\.\* # Wild card syntax of .*
|
||||
)?
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The compatible operator requires at least two digits in the
|
||||
# release segment.
|
||||
(?<=~=) # Only match for the compatible operator
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# All other operators only allow a sub set of what the
|
||||
# (non)equality operators do. Specifically they do not allow
|
||||
# local versions to be specified nor do they allow the prefix
|
||||
# matching wild cards.
|
||||
(?<!==|!=|~=) # We have special cases for these
|
||||
# operators so we want to make sure they
|
||||
# don't match here.
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"~=": "compatible",
|
||||
"==": "equal",
|
||||
"!=": "not_equal",
|
||||
"<=": "less_than_equal",
|
||||
">=": "greater_than_equal",
|
||||
"<": "less_than",
|
||||
">": "greater_than",
|
||||
"===": "arbitrary",
|
||||
}
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_compatible(self, prospective, spec):
|
||||
# Compatible releases have an equivalent combination of >= and ==. That
|
||||
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
||||
# implement this in terms of the other specifiers instead of
|
||||
# implementing it ourselves. The only thing we need to do is construct
|
||||
# the other specifiers.
|
||||
|
||||
# We want everything but the last item in the version, but we want to
|
||||
# ignore post and dev releases and we want to treat the pre-release as
|
||||
# it's own separate segment.
|
||||
prefix = ".".join(
|
||||
list(
|
||||
itertools.takewhile(
|
||||
lambda x: (not x.startswith("post") and not x.startswith("dev")),
|
||||
_version_split(spec),
|
||||
)
|
||||
)[:-1]
|
||||
)
|
||||
|
||||
# Add the prefix notation to the end of our string
|
||||
prefix += ".*"
|
||||
|
||||
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
||||
prospective, prefix
|
||||
)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_equal(self, prospective, spec):
|
||||
# We need special logic to handle prefix matching
|
||||
if spec.endswith(".*"):
|
||||
# In the case of prefix matching we want to ignore local segment.
|
||||
prospective = Version(prospective.public)
|
||||
# Split the spec out by dots, and pretend that there is an implicit
|
||||
# dot in between a release segment and a pre-release segment.
|
||||
spec = _version_split(spec[:-2]) # Remove the trailing .*
|
||||
|
||||
# Split the prospective version out by dots, and pretend that there
|
||||
# is an implicit dot in between a release segment and a pre-release
|
||||
# segment.
|
||||
prospective = _version_split(str(prospective))
|
||||
|
||||
# Shorten the prospective version to be the same length as the spec
|
||||
# so that we can determine if the specifier is a prefix of the
|
||||
# prospective version or not.
|
||||
prospective = prospective[: len(spec)]
|
||||
|
||||
# Pad out our two sides with zeros so that they both equal the same
|
||||
# length.
|
||||
spec, prospective = _pad_version(spec, prospective)
|
||||
else:
|
||||
# Convert our spec string into a Version
|
||||
spec = Version(spec)
|
||||
|
||||
# If the specifier does not have a local segment, then we want to
|
||||
# act as if the prospective version also does not have a local
|
||||
# segment.
|
||||
if not spec.local:
|
||||
prospective = Version(prospective.public)
|
||||
|
||||
return prospective == spec
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_not_equal(self, prospective, spec):
|
||||
return not self._compare_equal(prospective, spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_less_than_equal(self, prospective, spec):
|
||||
return prospective <= Version(spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_greater_than_equal(self, prospective, spec):
|
||||
return prospective >= Version(spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_less_than(self, prospective, spec):
|
||||
# Convert our spec to a Version instance, since we'll want to work with
|
||||
# it as a version.
|
||||
spec = Version(spec)
|
||||
|
||||
# Check to see if the prospective version is less than the spec
|
||||
# version. If it's not we can short circuit and just return False now
|
||||
# instead of doing extra unneeded work.
|
||||
if not prospective < spec:
|
||||
return False
|
||||
|
||||
# This special case is here so that, unless the specifier itself
|
||||
# includes is a pre-release version, that we do not accept pre-release
|
||||
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
||||
# not match 3.1.dev0, but should match 3.0.dev0).
|
||||
if not spec.is_prerelease and prospective.is_prerelease:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# If we've gotten to here, it means that prospective version is both
|
||||
# less than the spec version *and* it's not a pre-release of the same
|
||||
# version in the spec.
|
||||
return True
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_greater_than(self, prospective, spec):
|
||||
# Convert our spec to a Version instance, since we'll want to work with
|
||||
# it as a version.
|
||||
spec = Version(spec)
|
||||
|
||||
# Check to see if the prospective version is greater than the spec
|
||||
# version. If it's not we can short circuit and just return False now
|
||||
# instead of doing extra unneeded work.
|
||||
if not prospective > spec:
|
||||
return False
|
||||
|
||||
# This special case is here so that, unless the specifier itself
|
||||
# includes is a post-release version, that we do not accept
|
||||
# post-release versions for the version mentioned in the specifier
|
||||
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
||||
if not spec.is_postrelease and prospective.is_postrelease:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# Ensure that we do not allow a local version of the version mentioned
|
||||
# in the specifier, which is technically greater than, to match.
|
||||
if prospective.local is not None:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# If we've gotten to here, it means that prospective version is both
|
||||
# greater than the spec version *and* it's not a pre-release of the
|
||||
# same version in the spec.
|
||||
return True
|
||||
|
||||
def _compare_arbitrary(self, prospective, spec):
|
||||
return str(prospective).lower() == str(spec).lower()
|
||||
|
||||
@property
|
||||
def prereleases(self):
|
||||
# If there is an explicit prereleases set for this, then we'll just
|
||||
# blindly use that.
|
||||
if self._prereleases is not None:
|
||||
return self._prereleases
|
||||
|
||||
# Look at all of our specifiers and determine if they are inclusive
|
||||
# operators, and if they are if they are including an explicit
|
||||
# prerelease.
|
||||
operator, version = self._spec
|
||||
if operator in ["==", ">=", "<=", "~=", "==="]:
|
||||
# The == specifier can include a trailing .*, if it does we
|
||||
# want to remove before parsing.
|
||||
if operator == "==" and version.endswith(".*"):
|
||||
version = version[:-2]
|
||||
|
||||
# Parse the version, and if it is a pre-release than this
|
||||
# specifier allows pre-releases.
|
||||
if parse(version).is_prerelease:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value):
|
||||
self._prereleases = value
|
||||
|
||||
|
||||
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
||||
|
||||
|
||||
def _version_split(version):
|
||||
result = []
|
||||
for item in version.split("."):
|
||||
match = _prefix_regex.search(item)
|
||||
if match:
|
||||
result.extend(match.groups())
|
||||
else:
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def _pad_version(left, right):
|
||||
left_split, right_split = [], []
|
||||
|
||||
# Get the release segment of our versions
|
||||
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
||||
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
||||
|
||||
# Get the rest of our versions
|
||||
left_split.append(left[len(left_split[0]) :])
|
||||
right_split.append(right[len(right_split[0]) :])
|
||||
|
||||
# Insert our padding
|
||||
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
||||
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
||||
|
||||
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
||||
|
||||
|
||||
class SpecifierSet(BaseSpecifier):
|
||||
def __init__(self, specifiers="", prereleases=None):
|
||||
# Split on , to break each indidivual specifier into it's own item, and
|
||||
# strip each item to remove leading/trailing whitespace.
|
||||
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
||||
|
||||
# Parsed each individual specifier, attempting first to make it a
|
||||
# Specifier and falling back to a LegacySpecifier.
|
||||
parsed = set()
|
||||
for specifier in specifiers:
|
||||
try:
|
||||
parsed.add(Specifier(specifier))
|
||||
except InvalidSpecifier:
|
||||
parsed.add(LegacySpecifier(specifier))
|
||||
|
||||
# Turn our parsed specifiers into a frozen set and save them for later.
|
||||
self._specs = frozenset(parsed)
|
||||
|
||||
# Store our prereleases value so we can use it later to determine if
|
||||
# we accept prereleases or not.
|
||||
self._prereleases = prereleases
|
||||
|
||||
def __repr__(self):
|
||||
pre = (
|
||||
", prereleases={0!r}".format(self.prereleases)
|
||||
if self._prereleases is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
|
||||
|
||||
def __str__(self):
|
||||
return ",".join(sorted(str(s) for s in self._specs))
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._specs)
|
||||
|
||||
def __and__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = SpecifierSet(other)
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
specifier = SpecifierSet()
|
||||
specifier._specs = frozenset(self._specs | other._specs)
|
||||
|
||||
if self._prereleases is None and other._prereleases is not None:
|
||||
specifier._prereleases = other._prereleases
|
||||
elif self._prereleases is not None and other._prereleases is None:
|
||||
specifier._prereleases = self._prereleases
|
||||
elif self._prereleases == other._prereleases:
|
||||
specifier._prereleases = self._prereleases
|
||||
else:
|
||||
raise ValueError(
|
||||
"Cannot combine SpecifierSets with True and False prerelease "
|
||||
"overrides."
|
||||
)
|
||||
|
||||
return specifier
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = SpecifierSet(other)
|
||||
elif isinstance(other, _IndividualSpecifier):
|
||||
other = SpecifierSet(str(other))
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs == other._specs
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, string_types):
|
||||
other = SpecifierSet(other)
|
||||
elif isinstance(other, _IndividualSpecifier):
|
||||
other = SpecifierSet(str(other))
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs != other._specs
|
||||
|
||||
def __len__(self):
|
||||
return len(self._specs)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._specs)
|
||||
|
||||
@property
|
||||
def prereleases(self):
|
||||
# If we have been given an explicit prerelease modifier, then we'll
|
||||
# pass that through here.
|
||||
if self._prereleases is not None:
|
||||
return self._prereleases
|
||||
|
||||
# If we don't have any specifiers, and we don't have a forced value,
|
||||
# then we'll just return None since we don't know if this should have
|
||||
# pre-releases or not.
|
||||
if not self._specs:
|
||||
return None
|
||||
|
||||
# Otherwise we'll see if any of the given specifiers accept
|
||||
# prereleases, if any of them do we'll return True, otherwise False.
|
||||
return any(s.prereleases for s in self._specs)
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value):
|
||||
self._prereleases = value
|
||||
|
||||
def __contains__(self, item):
|
||||
return self.contains(item)
|
||||
|
||||
def contains(self, item, prereleases=None):
|
||||
# Ensure that our item is a Version or LegacyVersion instance.
|
||||
if not isinstance(item, (LegacyVersion, Version)):
|
||||
item = parse(item)
|
||||
|
||||
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||
# one for this particular filter call, then we'll use whatever the
|
||||
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# We can determine if we're going to allow pre-releases by looking to
|
||||
# see if any of the underlying items supports them. If none of them do
|
||||
# and this item is a pre-release then we do not allow it and we can
|
||||
# short circuit that here.
|
||||
# Note: This means that 1.0.dev1 would not be contained in something
|
||||
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
||||
if not prereleases and item.is_prerelease:
|
||||
return False
|
||||
|
||||
# We simply dispatch to the underlying specs here to make sure that the
|
||||
# given version is contained within all of them.
|
||||
# Note: This use of all() here means that an empty set of specifiers
|
||||
# will always return True, this is an explicit design decision.
|
||||
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
||||
|
||||
def filter(self, iterable, prereleases=None):
|
||||
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||
# one for this particular filter call, then we'll use whatever the
|
||||
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# If we have any specifiers, then we want to wrap our iterable in the
|
||||
# filter method for each one, this will act as a logical AND amongst
|
||||
# each specifier.
|
||||
if self._specs:
|
||||
for spec in self._specs:
|
||||
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
||||
return iterable
|
||||
# If we do not have any specifiers, then we need to have a rough filter
|
||||
# which will filter out any pre-releases, unless there are no final
|
||||
# releases, and which will filter out LegacyVersion in general.
|
||||
else:
|
||||
filtered = []
|
||||
found_prereleases = []
|
||||
|
||||
for item in iterable:
|
||||
# Ensure that we some kind of Version class for this item.
|
||||
if not isinstance(item, (LegacyVersion, Version)):
|
||||
parsed_version = parse(item)
|
||||
else:
|
||||
parsed_version = item
|
||||
|
||||
# Filter out any item which is parsed as a LegacyVersion
|
||||
if isinstance(parsed_version, LegacyVersion):
|
||||
continue
|
||||
|
||||
# Store any item which is a pre-release for later unless we've
|
||||
# already found a final version or we are accepting prereleases
|
||||
if parsed_version.is_prerelease and not prereleases:
|
||||
if not filtered:
|
||||
found_prereleases.append(item)
|
||||
else:
|
||||
filtered.append(item)
|
||||
|
||||
# If we've found no items except for pre-releases, then we'll go
|
||||
# ahead and use the pre-releases
|
||||
if not filtered and found_prereleases and prereleases is None:
|
||||
return found_prereleases
|
||||
|
||||
return filtered
|
||||
@@ -0,0 +1,404 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import distutils.util
|
||||
|
||||
try:
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
except ImportError: # pragma: no cover
|
||||
import imp
|
||||
|
||||
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
|
||||
del imp
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
|
||||
INTERPRETER_SHORT_NAMES = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
|
||||
|
||||
|
||||
class Tag(object):
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform"]
|
||||
|
||||
def __init__(self, interpreter, abi, platform):
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
|
||||
@property
|
||||
def interpreter(self):
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self):
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self):
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
(self.platform == other.platform)
|
||||
and (self.abi == other.abi)
|
||||
and (self.interpreter == other.interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
def __str__(self):
|
||||
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
|
||||
|
||||
|
||||
def parse_tag(tag):
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _normalize_string(string):
|
||||
return string.replace(".", "_").replace("-", "_")
|
||||
|
||||
|
||||
def _cpython_interpreter(py_version):
|
||||
# TODO: Is using py_version_nodot for interpreter version critical?
|
||||
return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
|
||||
|
||||
|
||||
def _cpython_abis(py_version):
|
||||
abis = []
|
||||
version = "{}{}".format(*py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
with_debug = sysconfig.get_config_var("Py_DEBUG")
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC")
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append("cp{version}".format(version=version))
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
return abis
|
||||
|
||||
|
||||
def _cpython_tags(py_version, interpreter, abis, platforms):
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
|
||||
yield tag
|
||||
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
|
||||
yield tag
|
||||
# PEP 384 was first implemented in Python 3.2.
|
||||
for minor_version in range(py_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{major}{minor}".format(
|
||||
major=py_version[0], minor=minor_version
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _pypy_interpreter():
|
||||
return "pp{py_major}{pypy_major}{pypy_minor}".format(
|
||||
py_major=sys.version_info[0],
|
||||
pypy_major=sys.pypy_version_info.major,
|
||||
pypy_minor=sys.pypy_version_info.minor,
|
||||
)
|
||||
|
||||
|
||||
def _generic_abi():
|
||||
abi = sysconfig.get_config_var("SOABI")
|
||||
if abi:
|
||||
return _normalize_string(abi)
|
||||
else:
|
||||
return "none"
|
||||
|
||||
|
||||
def _pypy_tags(py_version, interpreter, abi, platforms):
|
||||
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
|
||||
yield tag
|
||||
for tag in (Tag(interpreter, "none", platform) for platform in platforms):
|
||||
yield tag
|
||||
|
||||
|
||||
def _generic_tags(interpreter, py_version, abi, platforms):
|
||||
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
|
||||
yield tag
|
||||
if abi != "none":
|
||||
tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
for tag in tags:
|
||||
yield tag
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version):
|
||||
"""
|
||||
Yield Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all following versions up to 'end'.
|
||||
"""
|
||||
yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1])
|
||||
yield "py{major}".format(major=py_version[0])
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield "py{major}{minor}".format(major=py_version[0], minor=minor)
|
||||
|
||||
|
||||
def _independent_tags(interpreter, py_version, platforms):
|
||||
"""
|
||||
Return the sequence of tags that are consistent across implementations.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any
|
||||
- py*-none-any
|
||||
"""
|
||||
for version in _py_interpreter_range(py_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(py_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version, cpu_arch):
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
formats.append("universal")
|
||||
return formats
|
||||
|
||||
|
||||
def _mac_platforms(version=None, arch=None):
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = tuple(map(int, version_str.split(".")[:2]))
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
platforms = []
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = version[0], minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
platforms.append(
|
||||
"macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
)
|
||||
return platforms
|
||||
|
||||
|
||||
# From PEP 513.
|
||||
def _is_manylinux_compatible(name, glibc_version):
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux
|
||||
|
||||
return bool(getattr(_manylinux, name + "_compatible"))
|
||||
except (ImportError, AttributeError):
|
||||
# Fall through to heuristic check below.
|
||||
pass
|
||||
|
||||
return _have_compatible_glibc(*glibc_version)
|
||||
|
||||
|
||||
def _glibc_version_string():
|
||||
# Returns glibc version string, or None if not using glibc.
|
||||
import ctypes
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
# Separated out from have_compatible_glibc for easier unit testing.
|
||||
def _check_glibc_version(version_str, required_major, minimum_minor):
|
||||
# Parse string and check against requested version.
|
||||
#
|
||||
# We use a regexp instead of str.split because we want to discard any
|
||||
# random junk that might come after the minor version -- this might happen
|
||||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
# uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
"Expected glibc version with 2 components major.minor,"
|
||||
" got: %s" % version_str,
|
||||
RuntimeWarning,
|
||||
)
|
||||
return False
|
||||
return (
|
||||
int(m.group("major")) == required_major
|
||||
and int(m.group("minor")) >= minimum_minor
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_glibc(required_major, minimum_minor):
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return False
|
||||
return _check_glibc_version(version_str, required_major, minimum_minor)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
|
||||
linux = _normalize_string(distutils.util.get_platform())
|
||||
if linux == "linux_x86_64" and is_32bit:
|
||||
linux = "linux_i686"
|
||||
manylinux_support = (
|
||||
("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
)
|
||||
manylinux_support_iter = iter(manylinux_support)
|
||||
for name, glibc_version in manylinux_support_iter:
|
||||
if _is_manylinux_compatible(name, glibc_version):
|
||||
platforms = [linux.replace("linux", name)]
|
||||
break
|
||||
else:
|
||||
platforms = []
|
||||
# Support for a later manylinux implies support for an earlier version.
|
||||
platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
|
||||
platforms.append(linux)
|
||||
return platforms
|
||||
|
||||
|
||||
def _generic_platforms():
|
||||
platform = _normalize_string(distutils.util.get_platform())
|
||||
return [platform]
|
||||
|
||||
|
||||
def _interpreter_name():
|
||||
name = platform.python_implementation().lower()
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def _generic_interpreter(name, py_version):
|
||||
version = sysconfig.get_config_var("py_version_nodot")
|
||||
if not version:
|
||||
version = "".join(map(str, py_version[:2]))
|
||||
return "{name}{version}".format(name=name, version=version)
|
||||
|
||||
|
||||
def sys_tags():
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
py_version = sys.version_info[:2]
|
||||
interpreter_name = _interpreter_name()
|
||||
if platform.system() == "Darwin":
|
||||
platforms = _mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
platforms = _linux_platforms()
|
||||
else:
|
||||
platforms = _generic_platforms()
|
||||
|
||||
if interpreter_name == "cp":
|
||||
interpreter = _cpython_interpreter(py_version)
|
||||
abis = _cpython_abis(py_version)
|
||||
for tag in _cpython_tags(py_version, interpreter, abis, platforms):
|
||||
yield tag
|
||||
elif interpreter_name == "pp":
|
||||
interpreter = _pypy_interpreter()
|
||||
abi = _generic_abi()
|
||||
for tag in _pypy_tags(py_version, interpreter, abi, platforms):
|
||||
yield tag
|
||||
else:
|
||||
interpreter = _generic_interpreter(interpreter_name, py_version)
|
||||
abi = _generic_abi()
|
||||
for tag in _generic_tags(interpreter, py_version, abi, platforms):
|
||||
yield tag
|
||||
for tag in _independent_tags(interpreter, py_version, platforms):
|
||||
yield tag
|
||||
@@ -0,0 +1,57 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import re
|
||||
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
|
||||
|
||||
def canonicalize_name(name):
|
||||
# This is taken from PEP 503.
|
||||
return _canonicalize_regex.sub("-", name).lower()
|
||||
|
||||
|
||||
def canonicalize_version(version):
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle differences
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
|
||||
try:
|
||||
version = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if version.epoch != 0:
|
||||
parts.append("{0}!".format(version.epoch))
|
||||
|
||||
# Release segment
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
|
||||
|
||||
# Pre-release
|
||||
if version.pre is not None:
|
||||
parts.append("".join(str(x) for x in version.pre))
|
||||
|
||||
# Post-release
|
||||
if version.post is not None:
|
||||
parts.append(".post{0}".format(version.post))
|
||||
|
||||
# Development release
|
||||
if version.dev is not None:
|
||||
parts.append(".dev{0}".format(version.dev))
|
||||
|
||||
# Local version segment
|
||||
if version.local is not None:
|
||||
parts.append("+{0}".format(version.local))
|
||||
|
||||
return "".join(parts)
|
||||
@@ -0,0 +1,420 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from ._structures import Infinity
|
||||
|
||||
|
||||
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
||||
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion(object):
|
||||
def __hash__(self):
|
||||
return hash(self._key)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._key, other._key)
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
def __init__(self, version):
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
return -1
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c",
|
||||
"preview": "c",
|
||||
"-": "final-",
|
||||
"rc": "c",
|
||||
"dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# it's adoption of the packaging library.
|
||||
parts = []
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
parts = tuple(parts)
|
||||
|
||||
return epoch, parts
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
def __init__(self, version):
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(".post{0}".format(self.post))
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(".dev{0}".format(self.dev))
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append("+{0}".format(self.local))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
return self.dev is not None
|
||||
|
||||
|
||||
def _parse_letter_version(letter, number):
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
|
||||
|
||||
def _cmpkey(epoch, release, pre, post, dev, local):
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
pre = -Infinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
pre = Infinity
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
post = -Infinity
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
dev = Infinity
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
local = -Infinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
|
||||
|
||||
return epoch, release, pre, post, dev, local
|
||||
5742
venv/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py
Normal file
5742
venv/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py
Normal file
File diff suppressed because it is too large
Load Diff
868
venv/lib/python3.8/site-packages/setuptools/_vendor/six.py
Normal file
868
venv/lib/python3.8/site-packages/setuptools/_vendor/six.py
Normal file
@@ -0,0 +1,868 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2015 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.10.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
raise value from from_value
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
173
venv/lib/python3.8/site-packages/setuptools/archive_util.py
Normal file
173
venv/lib/python3.8/site-packages/setuptools/archive_util.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""Utilities for extracting common archive formats"""
|
||||
|
||||
import zipfile
|
||||
import tarfile
|
||||
import os
|
||||
import shutil
|
||||
import posixpath
|
||||
import contextlib
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
from pkg_resources import ensure_directory
|
||||
|
||||
__all__ = [
|
||||
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
|
||||
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
||||
]
|
||||
|
||||
|
||||
class UnrecognizedFormat(DistutilsError):
|
||||
"""Couldn't recognize the archive type"""
|
||||
|
||||
|
||||
def default_filter(src, dst):
|
||||
"""The default progress/filter callback; returns True for all files"""
|
||||
return dst
|
||||
|
||||
|
||||
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
|
||||
drivers=None):
|
||||
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
||||
|
||||
`progress_filter` is a function taking two arguments: a source path
|
||||
internal to the archive ('/'-separated), and a filesystem path where it
|
||||
will be extracted. The callback must return the desired extract path
|
||||
(which may be the same as the one passed in), or else ``None`` to skip
|
||||
that file or directory. The callback can thus be used to report on the
|
||||
progress of the extraction, as well as to filter the items extracted or
|
||||
alter their extraction paths.
|
||||
|
||||
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
||||
same signature as this function (minus the `drivers` argument), that raise
|
||||
``UnrecognizedFormat`` if they do not support extracting the designated
|
||||
archive type. The `drivers` are tried in sequence until one is found that
|
||||
does not raise an error, or until all are exhausted (in which case
|
||||
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
||||
drivers, the module's ``extraction_drivers`` constant will be used, which
|
||||
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
||||
order.
|
||||
"""
|
||||
for driver in drivers or extraction_drivers:
|
||||
try:
|
||||
driver(filename, extract_dir, progress_filter)
|
||||
except UnrecognizedFormat:
|
||||
continue
|
||||
else:
|
||||
return
|
||||
else:
|
||||
raise UnrecognizedFormat(
|
||||
"Not a recognized archive type: %s" % filename
|
||||
)
|
||||
|
||||
|
||||
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
||||
""""Unpack" a directory, using the same interface as for archives
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
||||
"""
|
||||
if not os.path.isdir(filename):
|
||||
raise UnrecognizedFormat("%s is not a directory" % filename)
|
||||
|
||||
paths = {
|
||||
filename: ('', extract_dir),
|
||||
}
|
||||
for base, dirs, files in os.walk(filename):
|
||||
src, dst = paths[base]
|
||||
for d in dirs:
|
||||
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
||||
for f in files:
|
||||
target = os.path.join(dst, f)
|
||||
target = progress_filter(src + f, target)
|
||||
if not target:
|
||||
# skip non-files
|
||||
continue
|
||||
ensure_directory(target)
|
||||
f = os.path.join(base, f)
|
||||
shutil.copyfile(f, target)
|
||||
shutil.copystat(f, target)
|
||||
|
||||
|
||||
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack zip `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
||||
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
|
||||
if not zipfile.is_zipfile(filename):
|
||||
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
||||
|
||||
with zipfile.ZipFile(filename) as z:
|
||||
for info in z.infolist():
|
||||
name = info.filename
|
||||
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if name.startswith('/') or '..' in name.split('/'):
|
||||
continue
|
||||
|
||||
target = os.path.join(extract_dir, *name.split('/'))
|
||||
target = progress_filter(name, target)
|
||||
if not target:
|
||||
continue
|
||||
if name.endswith('/'):
|
||||
# directory
|
||||
ensure_directory(target)
|
||||
else:
|
||||
# file
|
||||
ensure_directory(target)
|
||||
data = z.read(info.filename)
|
||||
with open(target, 'wb') as f:
|
||||
f.write(data)
|
||||
unix_attributes = info.external_attr >> 16
|
||||
if unix_attributes:
|
||||
os.chmod(target, unix_attributes)
|
||||
|
||||
|
||||
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
||||
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
try:
|
||||
tarobj = tarfile.open(filename)
|
||||
except tarfile.TarError:
|
||||
raise UnrecognizedFormat(
|
||||
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||
)
|
||||
with contextlib.closing(tarobj):
|
||||
# don't do any chowning!
|
||||
tarobj.chown = lambda *args: None
|
||||
for member in tarobj:
|
||||
name = member.name
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if not name.startswith('/') and '..' not in name.split('/'):
|
||||
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
||||
|
||||
# resolve any links and to extract the link targets as normal
|
||||
# files
|
||||
while member is not None and (member.islnk() or member.issym()):
|
||||
linkpath = member.linkname
|
||||
if member.issym():
|
||||
base = posixpath.dirname(member.name)
|
||||
linkpath = posixpath.join(base, linkpath)
|
||||
linkpath = posixpath.normpath(linkpath)
|
||||
member = tarobj._getmember(linkpath)
|
||||
|
||||
if member is not None and (member.isfile() or member.isdir()):
|
||||
final_dst = progress_filter(name, prelim_dst)
|
||||
if final_dst:
|
||||
if final_dst.endswith(os.sep):
|
||||
final_dst = final_dst[:-1]
|
||||
try:
|
||||
# XXX Ugh
|
||||
tarobj._extract_member(member, final_dst)
|
||||
except tarfile.ExtractError:
|
||||
# chown/chmod/mkfifo/mknode/makedev failed
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
||||
257
venv/lib/python3.8/site-packages/setuptools/build_meta.py
Normal file
257
venv/lib/python3.8/site-packages/setuptools/build_meta.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""A PEP 517 interface to setuptools
|
||||
|
||||
Previously, when a user or a command line tool (let's call it a "frontend")
|
||||
needed to make a request of setuptools to take a certain action, for
|
||||
example, generating a list of installation requirements, the frontend would
|
||||
would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
|
||||
|
||||
PEP 517 defines a different method of interfacing with setuptools. Rather
|
||||
than calling "setup.py" directly, the frontend should:
|
||||
|
||||
1. Set the current directory to the directory with a setup.py file
|
||||
2. Import this module into a safe python interpreter (one in which
|
||||
setuptools can potentially set global variables or crash hard).
|
||||
3. Call one of the functions defined in PEP 517.
|
||||
|
||||
What each function does is defined in PEP 517. However, here is a "casual"
|
||||
definition of the functions (this definition should not be relied on for
|
||||
bug reports or API stability):
|
||||
|
||||
- `build_wheel`: build a wheel in the folder and return the basename
|
||||
- `get_requires_for_build_wheel`: get the `setup_requires` to build
|
||||
- `prepare_metadata_for_build_wheel`: get the `install_requires`
|
||||
- `build_sdist`: build an sdist in the folder and return the basename
|
||||
- `get_requires_for_build_sdist`: get the `setup_requires` to build
|
||||
|
||||
Again, this is not a formal definition! Just a "taste" of the module.
|
||||
"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import tokenize
|
||||
import shutil
|
||||
import contextlib
|
||||
|
||||
import setuptools
|
||||
import distutils
|
||||
from setuptools.py31compat import TemporaryDirectory
|
||||
|
||||
from pkg_resources import parse_requirements
|
||||
from pkg_resources.py31compat import makedirs
|
||||
|
||||
__all__ = ['get_requires_for_build_sdist',
|
||||
'get_requires_for_build_wheel',
|
||||
'prepare_metadata_for_build_wheel',
|
||||
'build_wheel',
|
||||
'build_sdist',
|
||||
'__legacy__',
|
||||
'SetupRequirementsError']
|
||||
|
||||
class SetupRequirementsError(BaseException):
|
||||
def __init__(self, specifiers):
|
||||
self.specifiers = specifiers
|
||||
|
||||
|
||||
class Distribution(setuptools.dist.Distribution):
|
||||
def fetch_build_eggs(self, specifiers):
|
||||
specifier_list = list(map(str, parse_requirements(specifiers)))
|
||||
|
||||
raise SetupRequirementsError(specifier_list)
|
||||
|
||||
@classmethod
|
||||
@contextlib.contextmanager
|
||||
def patch(cls):
|
||||
"""
|
||||
Replace
|
||||
distutils.dist.Distribution with this class
|
||||
for the duration of this context.
|
||||
"""
|
||||
orig = distutils.core.Distribution
|
||||
distutils.core.Distribution = cls
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
distutils.core.Distribution = orig
|
||||
|
||||
|
||||
def _to_str(s):
|
||||
"""
|
||||
Convert a filename to a string (on Python 2, explicitly
|
||||
a byte string, not Unicode) as distutils checks for the
|
||||
exact type str.
|
||||
"""
|
||||
if sys.version_info[0] == 2 and not isinstance(s, str):
|
||||
# Assume it's Unicode, as that's what the PEP says
|
||||
# should be provided.
|
||||
return s.encode(sys.getfilesystemencoding())
|
||||
return s
|
||||
|
||||
|
||||
def _get_immediate_subdirectories(a_dir):
|
||||
return [name for name in os.listdir(a_dir)
|
||||
if os.path.isdir(os.path.join(a_dir, name))]
|
||||
|
||||
|
||||
def _file_with_extension(directory, extension):
|
||||
matching = (
|
||||
f for f in os.listdir(directory)
|
||||
if f.endswith(extension)
|
||||
)
|
||||
file, = matching
|
||||
return file
|
||||
|
||||
|
||||
def _open_setup_script(setup_script):
|
||||
if not os.path.exists(setup_script):
|
||||
# Supply a default setup.py
|
||||
return io.StringIO(u"from setuptools import setup; setup()")
|
||||
|
||||
return getattr(tokenize, 'open', open)(setup_script)
|
||||
|
||||
|
||||
class _BuildMetaBackend(object):
|
||||
|
||||
def _fix_config(self, config_settings):
|
||||
config_settings = config_settings or {}
|
||||
config_settings.setdefault('--global-option', [])
|
||||
return config_settings
|
||||
|
||||
def _get_build_requires(self, config_settings, requirements):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
|
||||
sys.argv = sys.argv[:1] + ['egg_info'] + \
|
||||
config_settings["--global-option"]
|
||||
try:
|
||||
with Distribution.patch():
|
||||
self.run_setup()
|
||||
except SetupRequirementsError as e:
|
||||
requirements += e.specifiers
|
||||
|
||||
return requirements
|
||||
|
||||
def run_setup(self, setup_script='setup.py'):
|
||||
# Note that we can reuse our build directory between calls
|
||||
# Correctness comes first, then optimization later
|
||||
__file__ = setup_script
|
||||
__name__ = '__main__'
|
||||
|
||||
with _open_setup_script(__file__) as f:
|
||||
code = f.read().replace(r'\r\n', r'\n')
|
||||
|
||||
exec(compile(code, __file__, 'exec'), locals())
|
||||
|
||||
def get_requires_for_build_wheel(self, config_settings=None):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
return self._get_build_requires(config_settings, requirements=['wheel'])
|
||||
|
||||
def get_requires_for_build_sdist(self, config_settings=None):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
return self._get_build_requires(config_settings, requirements=[])
|
||||
|
||||
def prepare_metadata_for_build_wheel(self, metadata_directory,
|
||||
config_settings=None):
|
||||
sys.argv = sys.argv[:1] + ['dist_info', '--egg-base',
|
||||
_to_str(metadata_directory)]
|
||||
self.run_setup()
|
||||
|
||||
dist_info_directory = metadata_directory
|
||||
while True:
|
||||
dist_infos = [f for f in os.listdir(dist_info_directory)
|
||||
if f.endswith('.dist-info')]
|
||||
|
||||
if (len(dist_infos) == 0 and
|
||||
len(_get_immediate_subdirectories(dist_info_directory)) == 1):
|
||||
|
||||
dist_info_directory = os.path.join(
|
||||
dist_info_directory, os.listdir(dist_info_directory)[0])
|
||||
continue
|
||||
|
||||
assert len(dist_infos) == 1
|
||||
break
|
||||
|
||||
# PEP 517 requires that the .dist-info directory be placed in the
|
||||
# metadata_directory. To comply, we MUST copy the directory to the root
|
||||
if dist_info_directory != metadata_directory:
|
||||
shutil.move(
|
||||
os.path.join(dist_info_directory, dist_infos[0]),
|
||||
metadata_directory)
|
||||
shutil.rmtree(dist_info_directory, ignore_errors=True)
|
||||
|
||||
return dist_infos[0]
|
||||
|
||||
def _build_with_temp_dir(self, setup_command, result_extension,
|
||||
result_directory, config_settings):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
result_directory = os.path.abspath(result_directory)
|
||||
|
||||
# Build in a temporary directory, then copy to the target.
|
||||
makedirs(result_directory, exist_ok=True)
|
||||
with TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
|
||||
sys.argv = (sys.argv[:1] + setup_command +
|
||||
['--dist-dir', tmp_dist_dir] +
|
||||
config_settings["--global-option"])
|
||||
self.run_setup()
|
||||
|
||||
result_basename = _file_with_extension(tmp_dist_dir, result_extension)
|
||||
result_path = os.path.join(result_directory, result_basename)
|
||||
if os.path.exists(result_path):
|
||||
# os.rename will fail overwriting on non-Unix.
|
||||
os.remove(result_path)
|
||||
os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
|
||||
|
||||
return result_basename
|
||||
|
||||
|
||||
def build_wheel(self, wheel_directory, config_settings=None,
|
||||
metadata_directory=None):
|
||||
return self._build_with_temp_dir(['bdist_wheel'], '.whl',
|
||||
wheel_directory, config_settings)
|
||||
|
||||
def build_sdist(self, sdist_directory, config_settings=None):
|
||||
return self._build_with_temp_dir(['sdist', '--formats', 'gztar'],
|
||||
'.tar.gz', sdist_directory,
|
||||
config_settings)
|
||||
|
||||
|
||||
class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
||||
"""Compatibility backend for setuptools
|
||||
|
||||
This is a version of setuptools.build_meta that endeavors to maintain backwards
|
||||
compatibility with pre-PEP 517 modes of invocation. It exists as a temporary
|
||||
bridge between the old packaging mechanism and the new packaging mechanism,
|
||||
and will eventually be removed.
|
||||
"""
|
||||
def run_setup(self, setup_script='setup.py'):
|
||||
# In order to maintain compatibility with scripts assuming that
|
||||
# the setup.py script is in a directory on the PYTHONPATH, inject
|
||||
# '' into sys.path. (pypa/setuptools#1642)
|
||||
sys_path = list(sys.path) # Save the original path
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(setup_script))
|
||||
if script_dir not in sys.path:
|
||||
sys.path.insert(0, script_dir)
|
||||
|
||||
try:
|
||||
super(_BuildMetaLegacyBackend,
|
||||
self).run_setup(setup_script=setup_script)
|
||||
finally:
|
||||
# While PEP 517 frontends should be calling each hook in a fresh
|
||||
# subprocess according to the standard (and thus it should not be
|
||||
# strictly necessary to restore the old sys.path), we'll restore
|
||||
# the original path so that the path manipulation does not persist
|
||||
# within the hook after run_setup is called.
|
||||
sys.path[:] = sys_path
|
||||
|
||||
# The primary backend
|
||||
_BACKEND = _BuildMetaBackend()
|
||||
|
||||
get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
|
||||
get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
|
||||
prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
|
||||
build_wheel = _BACKEND.build_wheel
|
||||
build_sdist = _BACKEND.build_sdist
|
||||
|
||||
|
||||
# The legacy backend
|
||||
__legacy__ = _BuildMetaLegacyBackend()
|
||||
BIN
venv/lib/python3.8/site-packages/setuptools/cli-32.exe
Normal file
BIN
venv/lib/python3.8/site-packages/setuptools/cli-32.exe
Normal file
Binary file not shown.
BIN
venv/lib/python3.8/site-packages/setuptools/cli-64.exe
Normal file
BIN
venv/lib/python3.8/site-packages/setuptools/cli-64.exe
Normal file
Binary file not shown.
BIN
venv/lib/python3.8/site-packages/setuptools/cli.exe
Normal file
BIN
venv/lib/python3.8/site-packages/setuptools/cli.exe
Normal file
Binary file not shown.
@@ -0,0 +1,17 @@
|
||||
__all__ = [
|
||||
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
|
||||
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
|
||||
'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
|
||||
'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info',
|
||||
]
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
from setuptools.command import install_scripts
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
bdist.format_commands.append('egg')
|
||||
|
||||
del bdist, sys
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
80
venv/lib/python3.8/site-packages/setuptools/command/alias.py
Normal file
80
venv/lib/python3.8/site-packages/setuptools/command/alias.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from setuptools.extern.six.moves import map
|
||||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
||||
|
||||
class alias(option_base):
|
||||
"""Define a shortcut that invokes one or more commands"""
|
||||
|
||||
description = "define a shortcut to invoke one or more commands"
|
||||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.args = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.remove and len(self.args) != 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify exactly one argument (the alias name) when "
|
||||
"using --remove"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
aliases = self.distribution.get_option_dict('aliases')
|
||||
|
||||
if not self.args:
|
||||
print("Command Aliases")
|
||||
print("---------------")
|
||||
for alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args) == 1:
|
||||
alias, = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
elif alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
else:
|
||||
print("No alias definition found for %r" % alias)
|
||||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
source, command = aliases[name]
|
||||
if source == config_file('global'):
|
||||
source = '--global-config '
|
||||
elif source == config_file('user'):
|
||||
source = '--user-config '
|
||||
elif source == config_file('local'):
|
||||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source + name + ' ' + command
|
||||
502
venv/lib/python3.8/site-packages/setuptools/command/bdist_egg.py
Normal file
502
venv/lib/python3.8/site-packages/setuptools/command/bdist_egg.py
Normal file
@@ -0,0 +1,502 @@
|
||||
"""setuptools.command.bdist_egg
|
||||
|
||||
Build .egg distributions"""
|
||||
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
import marshal
|
||||
|
||||
from setuptools.extern import six
|
||||
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from pkg_resources import EntryPoint
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
|
||||
try:
|
||||
# Python 2.7 or >=3.2
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
except ImportError:
|
||||
from distutils.sysconfig import get_python_lib, get_python_version
|
||||
|
||||
def _get_purelib():
|
||||
return get_python_lib(False)
|
||||
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
filename = os.path.splitext(filename)[0]
|
||||
if filename.endswith('module'):
|
||||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def sorted_walk(dir):
|
||||
"""Do os.walk in a reproducible way,
|
||||
independent of indeterministic filesystem readdir order
|
||||
"""
|
||||
for base, dirs, files in os.walk(dir):
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
yield base, dirs, files
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent("""
|
||||
def __bootstrap__():
|
||||
global __bootstrap__, __loader__, __file__
|
||||
import sys, pkg_resources, imp
|
||||
__file__ = pkg_resources.resource_filename(__name__, %r)
|
||||
__loader__ = None; del __bootstrap__, __loader__
|
||||
imp.load_dynamic(__name__,__file__)
|
||||
__bootstrap__()
|
||||
""").lstrip()
|
||||
with open(pyfile, 'w') as f:
|
||||
f.write(_stub_template % resource)
|
||||
|
||||
|
||||
class bdist_egg(Command):
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p', "platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_build_platform()),
|
||||
('exclude-source-files', None,
|
||||
"remove all .py files from the generated egg"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
'keep-temp', 'skip-build', 'exclude-source-files'
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.egg_output = None
|
||||
self.exclude_source_files = None
|
||||
|
||||
def finalize_options(self):
|
||||
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_info = ei_cmd.egg_info
|
||||
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
||||
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
|
||||
# Compute filename of the output egg
|
||||
basename = Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
get_python_version(),
|
||||
self.distribution.has_ext_modules() and self.plat_name
|
||||
).egg_name()
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages) + 1:], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
log.info("installing package data to %s", self.bdist_dir)
|
||||
self.call_command('install_data', force=0, root=None)
|
||||
finally:
|
||||
self.distribution.data_files = old
|
||||
|
||||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
return cmd
|
||||
|
||||
def run(self):
|
||||
# Generate metadata first
|
||||
self.run_command("egg_info")
|
||||
# We run install_lib before install_data, because some data hacks
|
||||
# pull their data path from the install_lib command.
|
||||
log.info("installing library code to %s", self.bdist_dir)
|
||||
instcmd = self.get_finalized_command('install')
|
||||
old_root = instcmd.root
|
||||
instcmd.root = None
|
||||
if self.distribution.has_c_libraries() and not self.skip_build:
|
||||
self.run_command('build_clib')
|
||||
cmd = self.call_command('install_lib', warn_dir=0)
|
||||
instcmd.root = old_root
|
||||
|
||||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for (p, ext_name) in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
|
||||
'.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s", ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
if self.distribution.data_files:
|
||||
self.do_install_data()
|
||||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s", script_dir)
|
||||
self.call_command('install_scripts', install_dir=script_dir,
|
||||
no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
if all_outputs:
|
||||
log.info("writing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
ensure_directory(native_libs)
|
||||
libs_file = open(native_libs, 'wt')
|
||||
libs_file.write('\n'.join(all_outputs))
|
||||
libs_file.write('\n')
|
||||
libs_file.close()
|
||||
elif os.path.isfile(native_libs):
|
||||
log.info("removing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(
|
||||
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
if self.exclude_source_files:
|
||||
self.zap_pyfiles()
|
||||
|
||||
# Make the archive
|
||||
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output))
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
path = os.path.join(base, name)
|
||||
|
||||
if name.endswith('.py'):
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
if base.endswith('__pycache__'):
|
||||
path_old = path
|
||||
|
||||
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
|
||||
m = re.match(pattern, name)
|
||||
path_new = os.path.join(
|
||||
base, os.pardir, m.group('name') + '.pyc')
|
||||
log.info(
|
||||
"Renaming file from [%s] to [%s]"
|
||||
% (path_old, path_new))
|
||||
try:
|
||||
os.remove(path_new)
|
||||
except OSError:
|
||||
pass
|
||||
os.rename(path_old, path_new)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
return analyze_egg(self.bdist_dir, self.stubs)
|
||||
|
||||
def gen_header(self):
|
||||
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
|
||||
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
|
||||
if ep is None:
|
||||
return 'w' # not an eggsecutable, do it the usual way.
|
||||
|
||||
if not ep.attrs or ep.extras:
|
||||
raise DistutilsSetupError(
|
||||
"eggsecutable entry point (%r) cannot have 'extras' "
|
||||
"or refer to a module" % (ep,)
|
||||
)
|
||||
|
||||
pyver = '{}.{}'.format(*sys.version_info)
|
||||
pkg = ep.module_name
|
||||
full = '.'.join(ep.attrs)
|
||||
base = ep.attrs[0]
|
||||
basename = os.path.basename(self.egg_output)
|
||||
|
||||
header = (
|
||||
"#!/bin/sh\n"
|
||||
'if [ `basename $0` = "%(basename)s" ]\n'
|
||||
'then exec python%(pyver)s -c "'
|
||||
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
|
||||
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
|
||||
'" "$@"\n'
|
||||
'else\n'
|
||||
' echo $0 is not the correct name for this egg file.\n'
|
||||
' echo Please rename it back to %(basename)s and try again.\n'
|
||||
' exec false\n'
|
||||
'fi\n'
|
||||
) % locals()
|
||||
|
||||
if not self.dry_run:
|
||||
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
|
||||
f = open(self.egg_output, 'w')
|
||||
f.write(header)
|
||||
f.close()
|
||||
return 'a'
|
||||
|
||||
def copy_metadata_to(self, target_dir):
|
||||
"Copy metadata (egg info) to the target_dir"
|
||||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix):])
|
||||
ensure_directory(target)
|
||||
self.copy_file(path, target)
|
||||
|
||||
def get_ext_outputs(self):
|
||||
"""Get a list of relative paths to C extensions in the output distro"""
|
||||
|
||||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in sorted_walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base, filename)] = (paths[base] +
|
||||
filename + '/')
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
||||
|
||||
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
||||
|
||||
|
||||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = sorted_walk(egg_dir)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py') or name.endswith('.pyw'):
|
||||
continue
|
||||
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
||||
# always scan, even if we already know we're not safe
|
||||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if six.PY2:
|
||||
skip = 8 # skip magic & date
|
||||
elif sys.version_info < (3, 7):
|
||||
skip = 12 # skip magic & date & file size
|
||||
else:
|
||||
skip = 16 # skip magic & reserved? & date & file size
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
safe = True
|
||||
symbols = dict.fromkeys(iter_symbols(code))
|
||||
for bad in ['__file__', '__path__']:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module references %s", module, bad)
|
||||
safe = False
|
||||
if 'inspect' in symbols:
|
||||
for bad in [
|
||||
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
|
||||
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
|
||||
'getinnerframes', 'getouterframes', 'stack', 'trace'
|
||||
]:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const, six.string_types):
|
||||
yield const
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
return True
|
||||
log.warn("Unable to analyze compiled code on this platform.")
|
||||
log.warn("Please ask the author to include a 'zip_safe'"
|
||||
" setting (either True or False) in the package's setup.py")
|
||||
|
||||
|
||||
# Attribute names of options for commands that might need to be convinced to
|
||||
# install to the egg build directory
|
||||
|
||||
INSTALL_DIRECTORY_ATTRS = [
|
||||
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
and found on the default search path). If neither tool is available,
|
||||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
def visit(z, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir) + 1:]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'", p)
|
||||
|
||||
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
||||
if not dry_run:
|
||||
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(z, dirname, files)
|
||||
z.close()
|
||||
else:
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(None, dirname, files)
|
||||
return zip_filename
|
||||
@@ -0,0 +1,43 @@
|
||||
import distutils.command.bdist_rpm as orig
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
||||
1. Run egg_info to ensure the name and version are properly calculated.
|
||||
2. Always run 'install' using --single-version-externally-managed to
|
||||
disable eggs in RPM distributions.
|
||||
3. Replace dash with underscore in the version numbers for better RPM
|
||||
compatibility.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
# ensure distro name is up-to-date
|
||||
self.run_command('egg_info')
|
||||
|
||||
orig.bdist_rpm.run(self)
|
||||
|
||||
def _make_spec_file(self):
|
||||
version = self.distribution.get_version()
|
||||
rpmversion = version.replace('-', '_')
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
line23 = '%define version ' + version
|
||||
line24 = '%define version ' + rpmversion
|
||||
spec = [
|
||||
line.replace(
|
||||
"Source0: %{name}-%{version}.tar",
|
||||
"Source0: %{name}-%{unmangled_version}.tar"
|
||||
).replace(
|
||||
"setup.py install ",
|
||||
"setup.py install --single-version-externally-managed "
|
||||
).replace(
|
||||
"%setup",
|
||||
"%setup -n %{name}-%{unmangled_version}"
|
||||
).replace(line23, line24)
|
||||
for line in spec
|
||||
]
|
||||
insert_loc = spec.index(line24) + 1
|
||||
unmangled_version = "%define unmangled_version " + version
|
||||
spec.insert(insert_loc, unmangled_version)
|
||||
return spec
|
||||
@@ -0,0 +1,21 @@
|
||||
import distutils.command.bdist_wininst as orig
|
||||
|
||||
|
||||
class bdist_wininst(orig.bdist_wininst):
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
"""
|
||||
Supplement reinitialize_command to work around
|
||||
http://bugs.python.org/issue20819
|
||||
"""
|
||||
cmd = self.distribution.reinitialize_command(
|
||||
command, reinit_subcommands)
|
||||
if command in ('install', 'install_lib'):
|
||||
cmd.install_lib = None
|
||||
return cmd
|
||||
|
||||
def run(self):
|
||||
self._is_running = True
|
||||
try:
|
||||
orig.bdist_wininst.run(self)
|
||||
finally:
|
||||
self._is_running = False
|
||||
@@ -0,0 +1,98 @@
|
||||
import distutils.command.build_clib as orig
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
from setuptools.dep_util import newer_pairwise_group
|
||||
|
||||
|
||||
class build_clib(orig.build_clib):
|
||||
"""
|
||||
Override the default build_clib behaviour to do the following:
|
||||
|
||||
1. Implement a rudimentary timestamp-based dependency system
|
||||
so 'compile()' doesn't run every time.
|
||||
2. Add more keys to the 'build_info' dictionary:
|
||||
* obj_deps - specify dependencies for each object compiled.
|
||||
this should be a dictionary mapping a key
|
||||
with the source filename to a list of
|
||||
dependencies. Use an empty string for global
|
||||
dependencies.
|
||||
* cflags - specify a list of additional flags to pass to
|
||||
the compiler.
|
||||
"""
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# Make sure everything is the correct type.
|
||||
# obj_deps should be a dictionary of keys as sources
|
||||
# and a list/tuple of files that are its dependencies.
|
||||
obj_deps = build_info.get('obj_deps', dict())
|
||||
if not isinstance(obj_deps, dict):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
dependencies = []
|
||||
|
||||
# Get the global dependencies that are specified by the '' key.
|
||||
# These will go into every source's dependency list.
|
||||
global_deps = obj_deps.get('', list())
|
||||
if not isinstance(global_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
|
||||
# Build the list to be used by newer_pairwise_group
|
||||
# each source will be auto-added to its dependencies.
|
||||
for source in sources:
|
||||
src_deps = [source]
|
||||
src_deps.extend(global_deps)
|
||||
extra_deps = obj_deps.get(source, list())
|
||||
if not isinstance(extra_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
src_deps.extend(extra_deps)
|
||||
dependencies.append(src_deps)
|
||||
|
||||
expected_objects = self.compiler.object_filenames(
|
||||
sources,
|
||||
output_dir=self.build_temp
|
||||
)
|
||||
|
||||
if newer_pairwise_group(dependencies, expected_objects) != ([], []):
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
cflags = build_info.get('cflags')
|
||||
objects = self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
extra_postargs=cflags,
|
||||
debug=self.debug
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
expected_objects,
|
||||
lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug
|
||||
)
|
||||
327
venv/lib/python3.8/site-packages/setuptools/command/build_ext.py
Normal file
327
venv/lib/python3.8/site-packages/setuptools/command/build_ext.py
Normal file
@@ -0,0 +1,327 @@
|
||||
import os
|
||||
import sys
|
||||
import itertools
|
||||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler, get_config_var
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
|
||||
from setuptools.extension import Library
|
||||
from setuptools.extern import six
|
||||
|
||||
if six.PY2:
|
||||
import imp
|
||||
|
||||
EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
|
||||
else:
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
|
||||
try:
|
||||
# Attempt to use Cython for building extensions, if available
|
||||
from Cython.Distutils.build_ext import build_ext as _build_ext
|
||||
# Additionally, assert that the compiler module will load
|
||||
# also. Ref #1229.
|
||||
__import__('Cython.Compiler.Main')
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
# make sure _config_vars is initialized
|
||||
get_config_var("LDSHARED")
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS
|
||||
|
||||
|
||||
def _customize_compiler_for_shlib(compiler):
|
||||
if sys.platform == "darwin":
|
||||
# building .dylib requires additional compiler flags on OSX; here we
|
||||
# temporarily substitute the pyconfig.h variables so that distutils'
|
||||
# 'customize_compiler' uses them before we build the shared libraries.
|
||||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS['LDSHARED'] = (
|
||||
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
finally:
|
||||
_CONFIG_VARS.clear()
|
||||
_CONFIG_VARS.update(tmp)
|
||||
else:
|
||||
customize_compiler(compiler)
|
||||
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
libtype = 'shared'
|
||||
|
||||
if sys.platform == "darwin":
|
||||
use_stubs = True
|
||||
elif os.name != 'nt':
|
||||
try:
|
||||
import dl
|
||||
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if_dl = lambda s: s if have_rtld else ''
|
||||
|
||||
|
||||
def get_abi3_suffix():
|
||||
"""Return the file extension for an abi3-compliant Extension()"""
|
||||
for suffix in EXTENSION_SUFFIXES:
|
||||
if '.abi3' in suffix: # Unix
|
||||
return suffix
|
||||
elif suffix == '.pyd': # Windows
|
||||
return suffix
|
||||
|
||||
|
||||
class build_ext(_build_ext):
|
||||
def run(self):
|
||||
"""Build extensions in build directory, then copy if --inplace"""
|
||||
old_inplace, self.inplace = self.inplace, 0
|
||||
_build_ext.run(self)
|
||||
self.inplace = old_inplace
|
||||
if old_inplace:
|
||||
self.copy_extensions_to_source()
|
||||
|
||||
def copy_extensions_to_source(self):
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
for ext in self.extensions:
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
filename = self.get_ext_filename(fullname)
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
dest_filename = os.path.join(package_dir,
|
||||
os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib, filename)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
# used.
|
||||
copy_file(
|
||||
src_filename, dest_filename, verbose=self.verbose,
|
||||
dry_run=self.dry_run
|
||||
)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
use_abi3 = (
|
||||
six.PY3
|
||||
and getattr(ext, 'py_limited_api')
|
||||
and get_abi3_suffix()
|
||||
)
|
||||
if use_abi3:
|
||||
so_ext = get_config_var('EXT_SUFFIX')
|
||||
filename = filename[:-len(so_ext)]
|
||||
filename = filename + get_abi3_suffix()
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
_build_ext.initialize_options(self)
|
||||
self.shlib_compiler = None
|
||||
self.shlibs = []
|
||||
self.ext_map = {}
|
||||
|
||||
def finalize_options(self):
|
||||
_build_ext.finalize_options(self)
|
||||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions
|
||||
if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
ext._full_name = self.get_ext_fullname(ext.name)
|
||||
for ext in self.extensions:
|
||||
fullname = ext._full_name
|
||||
self.ext_map[fullname] = ext
|
||||
|
||||
# distutils 3.1 will also ask for module names
|
||||
# XXX what to do with conflicts?
|
||||
self.ext_map[fullname.split('.')[-1]] = ext
|
||||
|
||||
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ns = ltd and use_stubs and not isinstance(ext, Library)
|
||||
ext._links_to_dynamic = ltd
|
||||
ext._needs_stub = ns
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
ext.runtime_library_dirs.append(os.curdir)
|
||||
|
||||
def setup_shlib_compiler(self):
|
||||
compiler = self.shlib_compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
_customize_compiler_for_shlib(compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name, value) in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# hack so distutils' build_extension() builds a library instead
|
||||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
ext._convert_pyx_sources_to_lang()
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
cmd = self.get_finalized_command('build_py').build_lib
|
||||
self.write_stub(cmd, ext)
|
||||
finally:
|
||||
self.compiler = _compiler
|
||||
|
||||
def links_to_dynamic(self, ext):
|
||||
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
||||
# XXX this should check to ensure the lib is actually being built
|
||||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
return any(pkg + libname in libnames for libname in ext.libraries)
|
||||
|
||||
def get_outputs(self):
|
||||
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
|
||||
|
||||
def __get_stubs_outputs(self):
|
||||
# assemble the base name for each extension that needs a stub
|
||||
ns_ext_bases = (
|
||||
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
for ext in self.extensions
|
||||
if ext._needs_stub
|
||||
)
|
||||
# pair each base with the extension
|
||||
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
||||
return list(base + fnext for base, fnext in pairs)
|
||||
|
||||
def __get_output_extensions(self):
|
||||
yield '.py'
|
||||
yield '.pyc'
|
||||
if self.get_finalized_command('build_py').optimize:
|
||||
yield '.pyo'
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
output_dir)
|
||||
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
|
||||
'.py')
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise DistutilsError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join([
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, imp" + if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
if_dl(" old_flags = sys.getdlopenflags()"),
|
||||
" old_dir = os.getcwd()",
|
||||
" try:",
|
||||
" os.chdir(os.path.dirname(__file__))",
|
||||
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
||||
" imp.load_dynamic(__name__,__file__)",
|
||||
" finally:",
|
||||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"" # terminal \n
|
||||
])
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0,
|
||||
force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
if optimize > 0:
|
||||
byte_compile([stub_file], optimize=optimize,
|
||||
force=True, dry_run=self.dry_run)
|
||||
if os.path.exists(stub_file) and not self.dry_run:
|
||||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY, objects, output_libname,
|
||||
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||
export_symbols, debug, extra_preargs, extra_postargs,
|
||||
build_temp, target_lang
|
||||
)
|
||||
else:
|
||||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
# a different prefix
|
||||
basename = basename[3:]
|
||||
|
||||
self.create_static_lib(
|
||||
objects, basename, output_dir, debug, target_lang
|
||||
)
|
||||
270
venv/lib/python3.8/site-packages/setuptools/command/build_py.py
Normal file
270
venv/lib/python3.8/site-packages/setuptools/command/build_py.py
Normal file
@@ -0,0 +1,270 @@
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import io
|
||||
import distutils.errors
|
||||
import itertools
|
||||
|
||||
from setuptools.extern import six
|
||||
from setuptools.extern.six.moves import map, filter, filterfalse
|
||||
|
||||
try:
|
||||
from setuptools.lib2to3_ex import Mixin2to3
|
||||
except ImportError:
|
||||
|
||||
class Mixin2to3:
|
||||
def run_2to3(self, files, doctests=True):
|
||||
"do nothing"
|
||||
|
||||
|
||||
class build_py(orig.build_py, Mixin2to3):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
The data files are specified via a 'package_data' argument to 'setup()'.
|
||||
See 'setuptools.dist.Distribution' for more details.
|
||||
|
||||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = (self.distribution.exclude_package_data or
|
||||
{})
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
self.__doctests_2to3 = []
|
||||
|
||||
def run(self):
|
||||
"""Build modules, packages, and copy data files to build directory"""
|
||||
if not self.py_modules and not self.packages:
|
||||
return
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
self.run_2to3(self.__updated_files, False)
|
||||
self.run_2to3(self.__updated_files, True)
|
||||
self.run_2to3(self.__doctests_2to3, True)
|
||||
|
||||
# Only compile actual .py files, using our base class' idea of what our
|
||||
# output files are.
|
||||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"lazily compute data files"
|
||||
if attr == 'data_files':
|
||||
self.data_files = self._get_data_files()
|
||||
return self.data_files
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
if six.PY2 and isinstance(package, six.string_types):
|
||||
# avoid errors on Python 2 when unicode is passed (#190)
|
||||
package = package.split('.')
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file,
|
||||
package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
||||
def _get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
self.analyze_manifest()
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def _get_pkg_data_files(self, package):
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
os.path.relpath(file, src_dir)
|
||||
for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
return package, src_dir, build_dir, filenames
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
patterns = self._get_platform_patterns(
|
||||
self.package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
globs_expanded = map(glob, patterns)
|
||||
# flatten the expanded globs into an iterable of matches
|
||||
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
||||
glob_files = filter(os.path.isfile, globs_matches)
|
||||
files = itertools.chain(
|
||||
self.manifest_files.get(package, []),
|
||||
glob_files,
|
||||
)
|
||||
return self.exclude_data_files(package, src_dir, files)
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
srcfile = os.path.join(src_dir, filename)
|
||||
outf, copied = self.copy_file(srcfile, target)
|
||||
srcfile = os.path.abspath(srcfile)
|
||||
if (copied and
|
||||
srcfile in self.distribution.convert_2to3_doctests):
|
||||
self.__doctests_2to3.append(outf)
|
||||
|
||||
def analyze_manifest(self):
|
||||
self.manifest_files = mf = {}
|
||||
if not self.distribution.include_package_data:
|
||||
return
|
||||
src_dirs = {}
|
||||
for package in self.packages or ():
|
||||
# Locate package source directory
|
||||
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
||||
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
for path in ei_cmd.filelist.files:
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if path.endswith('.py') and f == oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self):
|
||||
pass # Lazily compute data files in _get_data_files() function.
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
try:
|
||||
return self.packages_checked[package]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
init_py = orig.build_py.check_package(self, package, package_dir)
|
||||
self.packages_checked[package] = init_py
|
||||
|
||||
if not init_py or not self.distribution.namespace_packages:
|
||||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
with io.open(init_py, 'rb') as f:
|
||||
contents = f.read()
|
||||
if b'declare_namespace' not in contents:
|
||||
raise distutils.errors.DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
res = orig.build_py.get_package_dir(self, package)
|
||||
if self.distribution.src_root is not None:
|
||||
return os.path.join(self.distribution.src_root, res)
|
||||
return res
|
||||
|
||||
def exclude_data_files(self, package, src_dir, files):
|
||||
"""Filter filenames for package's data files in 'src_dir'"""
|
||||
files = list(files)
|
||||
patterns = self._get_platform_patterns(
|
||||
self.exclude_package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
match_groups = (
|
||||
fnmatch.filter(files, pattern)
|
||||
for pattern in patterns
|
||||
)
|
||||
# flatten the groups of matches into an iterable of matches
|
||||
matches = itertools.chain.from_iterable(match_groups)
|
||||
bad = set(matches)
|
||||
keepers = (
|
||||
fn
|
||||
for fn in files
|
||||
if fn not in bad
|
||||
)
|
||||
# ditch dupes
|
||||
return list(_unique_everseen(keepers))
|
||||
|
||||
@staticmethod
|
||||
def _get_platform_patterns(spec, package, src_dir):
|
||||
"""
|
||||
yield platform-specific path patterns (suitable for glob
|
||||
or fn_match) from a glob-based spec (such as
|
||||
self.package_data or self.exclude_package_data)
|
||||
matching package in src_dir.
|
||||
"""
|
||||
raw_patterns = itertools.chain(
|
||||
spec.get('', []),
|
||||
spec.get(package, []),
|
||||
)
|
||||
return (
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
os.path.join(src_dir, convert_path(pattern))
|
||||
for pattern in raw_patterns
|
||||
)
|
||||
|
||||
|
||||
# from Python docs
|
||||
def _unique_everseen(iterable, key=None):
|
||||
"List unique elements, preserving order. Remember all elements ever seen."
|
||||
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
||||
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
if key is None:
|
||||
for element in filterfalse(seen.__contains__, iterable):
|
||||
seen_add(element)
|
||||
yield element
|
||||
else:
|
||||
for element in iterable:
|
||||
k = key(element)
|
||||
if k not in seen:
|
||||
seen_add(k)
|
||||
yield element
|
||||
|
||||
|
||||
def assert_relative(path):
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = textwrap.dedent("""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
%s
|
||||
|
||||
setup() arguments must *always* be /-separated paths relative to the
|
||||
setup.py directory, *never* absolute paths.
|
||||
""").lstrip() % path
|
||||
raise DistutilsSetupError(msg)
|
||||
221
venv/lib/python3.8/site-packages/setuptools/command/develop.py
Normal file
221
venv/lib/python3.8/site-packages/setuptools/command/develop.py
Normal file
@@ -0,0 +1,221 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
import os
|
||||
import glob
|
||||
import io
|
||||
|
||||
from setuptools.extern import six
|
||||
|
||||
import pkg_resources
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools import namespaces
|
||||
import setuptools
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class develop(namespaces.DevelopInstaller, easy_install):
|
||||
"""Set up package for development"""
|
||||
|
||||
description = "install package in 'development mode'"
|
||||
|
||||
user_options = easy_install.user_options + [
|
||||
("uninstall", "u", "Uninstall this source package"),
|
||||
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
||||
]
|
||||
|
||||
boolean_options = easy_install.boolean_options + ['uninstall']
|
||||
|
||||
command_consumes_arguments = False # override base
|
||||
|
||||
def run(self):
|
||||
if self.uninstall:
|
||||
self.multi_version = True
|
||||
self.uninstall_link()
|
||||
self.uninstall_namespaces()
|
||||
else:
|
||||
self.install_for_development()
|
||||
self.warn_deprecated_options()
|
||||
|
||||
def initialize_options(self):
|
||||
self.uninstall = None
|
||||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
if ei.broken_egg_info:
|
||||
template = "Please rename %r to %r before using 'develop'"
|
||||
args = ei.egg_info, ei.broken_egg_info
|
||||
raise DistutilsError(template % args)
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
egg_link_fn = ei.egg_name + '.egg-link'
|
||||
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = pkg_resources.normalize_path(self.egg_base)
|
||||
egg_path = pkg_resources.normalize_path(
|
||||
os.path.join(self.install_dir, self.egg_path))
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = pkg_resources.Distribution(
|
||||
target,
|
||||
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name=ei.egg_name
|
||||
)
|
||||
|
||||
self.setup_path = self._resolve_setup_path(
|
||||
self.egg_base,
|
||||
self.install_dir,
|
||||
self.egg_path,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
||||
"""
|
||||
Generate a path from egg_base back to '.' where the
|
||||
setup script resides and ensure that path points to the
|
||||
setup path from $install_dir/$egg_path.
|
||||
"""
|
||||
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
||||
if path_to_setup != os.curdir:
|
||||
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
||||
resolved = pkg_resources.normalize_path(
|
||||
os.path.join(install_dir, egg_path, path_to_setup)
|
||||
)
|
||||
if resolved != pkg_resources.normalize_path(os.curdir):
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory", resolved,
|
||||
pkg_resources.normalize_path(os.curdir))
|
||||
return path_to_setup
|
||||
|
||||
def install_for_development(self):
|
||||
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
self.reinitialize_command('build_py', inplace=0)
|
||||
self.run_command('build_py')
|
||||
bpy_cmd = self.get_finalized_command("build_py")
|
||||
build_path = pkg_resources.normalize_path(bpy_cmd.build_lib)
|
||||
|
||||
# Build extensions
|
||||
self.reinitialize_command('egg_info', egg_base=build_path)
|
||||
self.run_command('egg_info')
|
||||
|
||||
self.reinitialize_command('build_ext', inplace=0)
|
||||
self.run_command('build_ext')
|
||||
|
||||
# Fixup egg-link and easy-install.pth
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_path = build_path
|
||||
self.dist.location = build_path
|
||||
# XXX
|
||||
self.dist._provider = pkg_resources.PathMetadata(
|
||||
build_path, ei_cmd.egg_info)
|
||||
else:
|
||||
# Without 2to3 inplace works fine:
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
self.install_site_py() # ensure that target dir is site-safe
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
self.install_namespaces()
|
||||
|
||||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
with open(self.egg_link, "w") as f:
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path],
|
||||
[self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
os.unlink(self.egg_link)
|
||||
if not self.dry_run:
|
||||
self.update_pth(self.dist) # remove any .pth link to us
|
||||
if self.distribution.scripts:
|
||||
# XXX should also check for entry point scripts!
|
||||
log.warn("Note: you must uninstall or replace scripts manually!")
|
||||
|
||||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
# new-style...
|
||||
self.install_wrapper_scripts(dist)
|
||||
|
||||
# ...and old-style
|
||||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
with io.open(script_path) as strm:
|
||||
script_text = strm.read()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
def install_wrapper_scripts(self, dist):
|
||||
dist = VersionlessRequirement(dist)
|
||||
return easy_install.install_wrapper_scripts(self, dist)
|
||||
|
||||
|
||||
class VersionlessRequirement:
|
||||
"""
|
||||
Adapt a pkg_resources.Distribution to simply return the project
|
||||
name as the 'requirement' so that scripts will work across
|
||||
multiple versions.
|
||||
|
||||
>>> from pkg_resources import Distribution
|
||||
>>> dist = Distribution(project_name='foo', version='1.0')
|
||||
>>> str(dist.as_requirement())
|
||||
'foo==1.0'
|
||||
>>> adapted_dist = VersionlessRequirement(dist)
|
||||
>>> str(adapted_dist.as_requirement())
|
||||
'foo'
|
||||
"""
|
||||
|
||||
def __init__(self, dist):
|
||||
self.__dist = dist
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__dist, name)
|
||||
|
||||
def as_requirement(self):
|
||||
return self.project_name
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user