Initial commit
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,33 @@
|
||||
from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
|
||||
|
||||
|
||||
def infer_namespace(node, context=None):
|
||||
callsite = arguments.CallSite.from_call(node, context=context)
|
||||
if not callsite.keyword_arguments:
|
||||
# Cannot make sense of it.
|
||||
raise UseInferenceDefault()
|
||||
|
||||
class_node = nodes.ClassDef("Namespace", "docstring")
|
||||
class_node.parent = node.parent
|
||||
for attr in set(callsite.keyword_arguments):
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return iter((class_node.instantiate_class(),))
|
||||
|
||||
|
||||
def _looks_like_namespace(node):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return (
|
||||
func.attrname == "Namespace"
|
||||
and isinstance(func.expr, nodes.Name)
|
||||
and func.expr.name == "argparse"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
|
||||
)
|
||||
@@ -0,0 +1,65 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
"""
|
||||
Astroid hook for the attrs library
|
||||
|
||||
Without this hook pylint reports unsupported-assignment-operation
|
||||
for attrs classes
|
||||
"""
|
||||
|
||||
import astroid
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib"))
|
||||
ATTRS_NAMES = frozenset(("attr.s", "attrs", "attr.attrs", "attr.attributes"))
|
||||
|
||||
|
||||
def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
|
||||
"""Return True if a decorated node has
|
||||
an attr decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
|
||||
decorator_attribute = decorator_attribute.func
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def attr_attributes_transform(node):
|
||||
"""Given that the ClassNode has an attr decorator,
|
||||
rewrite class attributes as instance attributes
|
||||
"""
|
||||
# Astroid can't infer this attribute properly
|
||||
# Prevents https://github.com/PyCQA/pylint/issues/1884
|
||||
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
|
||||
|
||||
for cdefbodynode in node.body:
|
||||
if not isinstance(cdefbodynode, (astroid.Assign, astroid.AnnAssign)):
|
||||
continue
|
||||
if isinstance(cdefbodynode.value, astroid.Call):
|
||||
if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
targets = (
|
||||
cdefbodynode.targets
|
||||
if hasattr(cdefbodynode, "targets")
|
||||
else [cdefbodynode.target]
|
||||
)
|
||||
for target in targets:
|
||||
|
||||
rhs_node = astroid.Unknown(
|
||||
lineno=cdefbodynode.lineno,
|
||||
col_offset=cdefbodynode.col_offset,
|
||||
parent=cdefbodynode,
|
||||
)
|
||||
node.locals[target.name] = [rhs_node]
|
||||
node.instance_attrs[target.name] = [rhs_node]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for understanding boto3.ServiceRequest()"""
|
||||
import astroid
|
||||
from astroid import MANAGER, extract_node
|
||||
|
||||
BOTO_SERVICE_FACTORY_QUALIFIED_NAME = "boto3.resources.base.ServiceResource"
|
||||
|
||||
|
||||
def service_request_transform(node):
|
||||
"""Transform ServiceResource to look like dynamic classes"""
|
||||
code = """
|
||||
def __getattr__(self, attr):
|
||||
return 0
|
||||
"""
|
||||
func_getattr = extract_node(code)
|
||||
node.locals["__getattr__"] = [func_getattr]
|
||||
return node
|
||||
|
||||
|
||||
def _looks_like_boto3_service_request(node):
|
||||
return node.qname() == BOTO_SERVICE_FACTORY_QUALIFIED_NAME
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, service_request_transform, _looks_like_boto3_service_request
|
||||
)
|
||||
@@ -0,0 +1,873 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Stanislav Levin <slev@altlinux.org>
|
||||
# Copyright (c) 2019 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2019 Bryce Guinta <bryce.guinta@protonmail.com>
|
||||
# Copyright (c) 2019 Frédéric Chapoton <fchapoton2@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for various builtins."""
|
||||
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
from astroid import (
|
||||
MANAGER,
|
||||
UseInferenceDefault,
|
||||
AttributeInferenceError,
|
||||
inference_tip,
|
||||
InferenceError,
|
||||
NameInferenceError,
|
||||
AstroidTypeError,
|
||||
MroError,
|
||||
)
|
||||
from astroid import arguments
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import helpers
|
||||
from astroid import nodes
|
||||
from astroid import objects
|
||||
from astroid import scoped_nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
OBJECT_DUNDER_NEW = "object.__new__"
|
||||
|
||||
|
||||
def _extend_str(class_node, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
code = dedent(
|
||||
"""
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return ''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
"""
|
||||
)
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
|
||||
for method in fake.mymethods():
|
||||
method.parent = class_node
|
||||
method.lineno = None
|
||||
method.col_offset = None
|
||||
if "__class__" in method.locals:
|
||||
method.locals["__class__"] = [class_node]
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
|
||||
def _extend_builtins(class_transforms):
|
||||
builtin_ast = MANAGER.builtins_module
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
|
||||
_extend_builtins(
|
||||
{
|
||||
"bytes": partial(_extend_str, rvalue="b''"),
|
||||
"str": partial(_extend_str, rvalue="''"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _builtin_filter_predicate(node, builtin_name):
|
||||
if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
|
||||
return True
|
||||
if isinstance(node.func, nodes.Attribute):
|
||||
return (
|
||||
node.func.attrname == "fromkeys"
|
||||
and isinstance(node.func.expr, nodes.Name)
|
||||
and node.func.expr.name == "dict"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
if not result.parent:
|
||||
# Let the transformation function determine
|
||||
# the parent for its result. Otherwise,
|
||||
# we set it to be the node we transformed from.
|
||||
result.parent = node
|
||||
|
||||
if result.lineno is None:
|
||||
result.lineno = node.lineno
|
||||
if result.col_offset is None:
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call,
|
||||
inference_tip(_transform_wrapper),
|
||||
partial(_builtin_filter_predicate, builtin_name=builtin_name),
|
||||
)
|
||||
|
||||
|
||||
def _container_generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
(arg,) = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
inferred = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
raise UseInferenceDefault()
|
||||
if inferred is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
transformed = transform(inferred)
|
||||
if not transformed or transformed is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
return transformed
|
||||
|
||||
|
||||
def _container_generic_transform(arg, context, klass, iterables, build_elts):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
elif isinstance(arg, iterables):
|
||||
if all(isinstance(elt, nodes.Const) for elt in arg.elts):
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
else:
|
||||
# TODO: Does not handle deduplication for sets.
|
||||
elts = []
|
||||
for element in arg.elts:
|
||||
inferred = helpers.safe_infer(element, context=context)
|
||||
if inferred:
|
||||
evaluated_object = nodes.EvaluatedObject(
|
||||
original=element, value=inferred
|
||||
)
|
||||
elts.append(evaluated_object)
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
# Dicts need to have consts as strings already.
|
||||
if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif isinstance(arg, nodes.Const) and isinstance(
|
||||
arg.value, (six.string_types, six.binary_type)
|
||||
):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass.from_elements(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin_container(
|
||||
node, context, klass=None, iterables=None, build_elts=None
|
||||
):
|
||||
transform_func = partial(
|
||||
_container_generic_transform,
|
||||
context=context,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts,
|
||||
)
|
||||
|
||||
return _container_generic_inference(node, context, klass, transform_func)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(
|
||||
nodes.List,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=tuple,
|
||||
)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.List,
|
||||
iterables=(
|
||||
nodes.Tuple,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=list,
|
||||
)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=set,
|
||||
)
|
||||
|
||||
infer_frozenset = partial(
|
||||
_infer_builtin_container,
|
||||
klass=objects.FrozenSet,
|
||||
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=frozenset,
|
||||
)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
|
||||
try:
|
||||
inferred = next(arg.infer(context))
|
||||
except (InferenceError, NameInferenceError):
|
||||
raise UseInferenceDefault()
|
||||
if isinstance(inferred, nodes.Dict):
|
||||
items = inferred.items
|
||||
elif is_iterable(inferred):
|
||||
items = []
|
||||
for elt in inferred.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be inferred, we'll fallback to default inference.
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.has_invalid_arguments() or call.has_invalid_keywords():
|
||||
raise UseInferenceDefault
|
||||
|
||||
args = call.positional_arguments
|
||||
kwargs = list(call.keyword_arguments.items())
|
||||
|
||||
if not args and not kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
elif kwargs and not args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
elif len(args) == 1 and kwargs:
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(args[0], context)
|
||||
keys = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
items = elts + keys
|
||||
elif len(args) == 1:
|
||||
items = _get_elts(args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
value = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
value.postinit(items)
|
||||
return value
|
||||
|
||||
|
||||
def infer_super(node, context=None):
|
||||
"""Understand super calls.
|
||||
|
||||
There are some restrictions for what can be understood:
|
||||
|
||||
* unbounded super (one argument form) is not understood.
|
||||
|
||||
* if the super call is not inside a function (classmethod or method),
|
||||
then the default inference will be used.
|
||||
|
||||
* if the super arguments can't be inferred, the default inference
|
||||
will be used.
|
||||
"""
|
||||
if len(node.args) == 1:
|
||||
# Ignore unbounded super.
|
||||
raise UseInferenceDefault
|
||||
|
||||
scope = node.scope()
|
||||
if not isinstance(scope, nodes.FunctionDef):
|
||||
# Ignore non-method uses of super.
|
||||
raise UseInferenceDefault
|
||||
if scope.type not in ("classmethod", "method"):
|
||||
# Not interested in staticmethods.
|
||||
raise UseInferenceDefault
|
||||
|
||||
cls = scoped_nodes.get_wrapping_class(scope)
|
||||
if not len(node.args):
|
||||
mro_pointer = cls
|
||||
# In we are in a classmethod, the interpreter will fill
|
||||
# automatically the class as the second argument, not an instance.
|
||||
if scope.type == "classmethod":
|
||||
mro_type = cls
|
||||
else:
|
||||
mro_type = cls.instantiate_class()
|
||||
else:
|
||||
try:
|
||||
mro_pointer = next(node.args[0].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
mro_type = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
|
||||
# No way we could understand this.
|
||||
raise UseInferenceDefault
|
||||
|
||||
super_obj = objects.Super(
|
||||
mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
|
||||
)
|
||||
super_obj.parent = node
|
||||
return super_obj
|
||||
|
||||
|
||||
def _infer_getattr_args(node, context):
|
||||
if len(node.args) not in (2, 3):
|
||||
# Not a valid getattr call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
obj = next(node.args[0].infer(context=context))
|
||||
attr = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if obj is util.Uninferable or attr is util.Uninferable:
|
||||
# If one of the arguments is something we can't infer,
|
||||
# then also make the result of the getattr call something
|
||||
# which is unknown.
|
||||
return util.Uninferable, util.Uninferable
|
||||
|
||||
is_string = isinstance(attr, nodes.Const) and isinstance(
|
||||
attr.value, six.string_types
|
||||
)
|
||||
if not is_string:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return obj, attr.value
|
||||
|
||||
|
||||
def infer_getattr(node, context=None):
|
||||
"""Understand getattr calls
|
||||
|
||||
If one of the arguments is an Uninferable object, then the
|
||||
result will be an Uninferable object. Otherwise, the normal attribute
|
||||
lookup will be done.
|
||||
"""
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "igetattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
|
||||
try:
|
||||
return next(obj.igetattr(attr, context=context))
|
||||
except (StopIteration, InferenceError, AttributeInferenceError):
|
||||
if len(node.args) == 3:
|
||||
# Try to infer the default and return it instead.
|
||||
try:
|
||||
return next(node.args[2].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
raise UseInferenceDefault
|
||||
|
||||
|
||||
def infer_hasattr(node, context=None):
|
||||
"""Understand hasattr calls
|
||||
|
||||
This always guarantees three possible outcomes for calling
|
||||
hasattr: Const(False) when we are sure that the object
|
||||
doesn't have the intended attribute, Const(True) when
|
||||
we know that the object has the attribute and Uninferable
|
||||
when we are unsure of the outcome of the function call.
|
||||
"""
|
||||
try:
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "getattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
obj.getattr(attr, context=context)
|
||||
except UseInferenceDefault:
|
||||
# Can't infer something from this function call.
|
||||
return util.Uninferable
|
||||
except AttributeInferenceError:
|
||||
# Doesn't have it.
|
||||
return nodes.Const(False)
|
||||
return nodes.Const(True)
|
||||
|
||||
|
||||
def infer_callable(node, context=None):
|
||||
"""Understand callable calls
|
||||
|
||||
This follows Python's semantics, where an object
|
||||
is callable if it provides an attribute __call__,
|
||||
even though that attribute is something which can't be
|
||||
called.
|
||||
"""
|
||||
if len(node.args) != 1:
|
||||
# Invalid callable call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(inferred.callable())
|
||||
|
||||
|
||||
def infer_property(node, context=None):
|
||||
"""Understand `property` class
|
||||
|
||||
This only infers the output of `property`
|
||||
call, not the arguments themselves.
|
||||
"""
|
||||
if len(node.args) < 1:
|
||||
# Invalid property call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
getter = node.args[0]
|
||||
try:
|
||||
inferred = next(getter.infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(inferred, (nodes.FunctionDef, nodes.Lambda)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
return objects.Property(
|
||||
function=inferred,
|
||||
name=inferred.name,
|
||||
doc=getattr(inferred, "doc", None),
|
||||
lineno=node.lineno,
|
||||
parent=node,
|
||||
col_offset=node.col_offset,
|
||||
)
|
||||
|
||||
|
||||
def infer_bool(node, context=None):
|
||||
"""Understand bool calls."""
|
||||
if len(node.args) > 1:
|
||||
# Invalid bool call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not node.args:
|
||||
return nodes.Const(False)
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
|
||||
bool_value = inferred.bool_value(context=context)
|
||||
if bool_value is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(bool_value)
|
||||
|
||||
|
||||
def infer_type(node, context=None):
|
||||
"""Understand the one-argument form of *type*."""
|
||||
if len(node.args) != 1:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return helpers.object_type(node.args[0], context)
|
||||
|
||||
|
||||
def infer_slice(node, context=None):
|
||||
"""Understand `slice` calls."""
|
||||
args = node.args
|
||||
if not 0 < len(args) <= 3:
|
||||
raise UseInferenceDefault
|
||||
|
||||
infer_func = partial(helpers.safe_infer, context=context)
|
||||
args = [infer_func(arg) for arg in args]
|
||||
for arg in args:
|
||||
if not arg or arg is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg, nodes.Const):
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg.value, (type(None), int)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(args) < 3:
|
||||
# Make sure we have 3 arguments.
|
||||
args.extend([None] * (3 - len(args)))
|
||||
|
||||
slice_node = nodes.Slice(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
slice_node.postinit(*args)
|
||||
return slice_node
|
||||
|
||||
|
||||
def _infer_object__new__decorator(node, context=None):
|
||||
# Instantiate class immediately
|
||||
# since that's what @object.__new__ does
|
||||
return iter((node.instantiate_class(),))
|
||||
|
||||
|
||||
def _infer_object__new__decorator_check(node):
|
||||
"""Predicate before inference_tip
|
||||
|
||||
Check if the given ClassDef has an @object.__new__ decorator
|
||||
"""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, nodes.Attribute):
|
||||
if decorator.as_string() == OBJECT_DUNDER_NEW:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def infer_issubclass(callnode, context=None):
|
||||
"""Infer issubclass() calls
|
||||
|
||||
:param nodes.Call callnode: an `issubclass` call
|
||||
:param InferenceContext: the context for the inference
|
||||
:rtype nodes.Const: Boolean Const value of the `issubclass` call
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# issubclass doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
"Expected two arguments, got {count}".format(
|
||||
count=len(call.positional_arguments)
|
||||
)
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
|
||||
try:
|
||||
obj_type = next(obj_node.infer(context=context))
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if not isinstance(obj_type, nodes.ClassDef):
|
||||
raise UseInferenceDefault("TypeError: arg 1 must be class")
|
||||
|
||||
# The right hand argument is the class(es) that the given
|
||||
# object is to be checked against.
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
try:
|
||||
issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
return nodes.Const(issubclass_bool)
|
||||
|
||||
|
||||
def infer_isinstance(callnode, context=None):
|
||||
"""Infer isinstance calls
|
||||
|
||||
:param nodes.Call callnode: an isinstance call
|
||||
:param InferenceContext: context for call
|
||||
(currently unused but is a common interface for inference)
|
||||
:rtype nodes.Const: Boolean Const value of isinstance call
|
||||
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# isinstance doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
"Expected two arguments, got {count}".format(
|
||||
count=len(call.positional_arguments)
|
||||
)
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
# The right hand argument is the class(es) that the given
|
||||
# obj is to be check is an instance of
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc))
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if isinstance_bool is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
return nodes.Const(isinstance_bool)
|
||||
|
||||
|
||||
def _class_or_tuple_to_container(node, context=None):
|
||||
# Move inferences results into container
|
||||
# to simplify later logic
|
||||
# raises InferenceError if any of the inferences fall through
|
||||
node_infer = next(node.infer(context=context))
|
||||
# arg2 MUST be a type or a TUPLE of types
|
||||
# for isinstance
|
||||
if isinstance(node_infer, nodes.Tuple):
|
||||
class_container = [
|
||||
next(node.infer(context=context)) for node in node_infer.elts
|
||||
]
|
||||
class_container = [
|
||||
klass_node for klass_node in class_container if klass_node is not None
|
||||
]
|
||||
else:
|
||||
class_container = [node_infer]
|
||||
return class_container
|
||||
|
||||
|
||||
def infer_len(node, context=None):
|
||||
"""Infer length calls
|
||||
|
||||
:param nodes.Call node: len call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const node with the inferred length, if possible
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
|
||||
if len(call.positional_arguments) != 1:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: len() must take exactly one argument "
|
||||
"({len}) given".format(len=len(call.positional_arguments))
|
||||
)
|
||||
[argument_node] = call.positional_arguments
|
||||
try:
|
||||
return nodes.Const(helpers.object_len(argument_node, context=context))
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_str(node, context=None):
|
||||
"""Infer str() calls
|
||||
|
||||
:param nodes.Call node: str() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing an empty string
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
|
||||
try:
|
||||
return nodes.Const("")
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_int(node, context=None):
|
||||
"""Infer int() calls
|
||||
|
||||
:param nodes.Call node: int() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing the integer value of the int() call
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
|
||||
if call.positional_arguments:
|
||||
try:
|
||||
first_value = next(call.positional_arguments[0].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
if first_value is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if isinstance(first_value, nodes.Const) and isinstance(
|
||||
first_value.value, (int, str)
|
||||
):
|
||||
try:
|
||||
actual_value = int(first_value.value)
|
||||
except ValueError:
|
||||
return nodes.Const(0)
|
||||
return nodes.Const(actual_value)
|
||||
|
||||
return nodes.Const(0)
|
||||
|
||||
|
||||
def infer_dict_fromkeys(node, context=None):
|
||||
"""Infer dict.fromkeys
|
||||
|
||||
:param nodes.Call node: dict.fromkeys() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Dict:
|
||||
a Dictionary containing the values that astroid was able to infer.
|
||||
In case the inference failed for any reason, an empty dictionary
|
||||
will be inferred instead.
|
||||
"""
|
||||
|
||||
def _build_dict_with_elements(elements):
|
||||
new_node = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
new_node.postinit(elements)
|
||||
return new_node
|
||||
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
if len(call.positional_arguments) not in {1, 2}:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: Needs between 1 and 2 positional arguments"
|
||||
)
|
||||
|
||||
default = nodes.Const(None)
|
||||
values = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_values = next(values.infer(context=context))
|
||||
except InferenceError:
|
||||
return _build_dict_with_elements([])
|
||||
if inferred_values is util.Uninferable:
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
# Limit to a couple of potential values, as this can become pretty complicated
|
||||
accepted_iterable_elements = (nodes.Const,)
|
||||
if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
|
||||
elements = inferred_values.elts
|
||||
for element in elements:
|
||||
if not isinstance(element, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in elements]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
|
||||
elif isinstance(inferred_values, nodes.Const) and isinstance(
|
||||
inferred_values.value, (str, bytes)
|
||||
):
|
||||
elements = [
|
||||
(nodes.Const(element), default) for element in inferred_values.value
|
||||
]
|
||||
return _build_dict_with_elements(elements)
|
||||
elif isinstance(inferred_values, nodes.Dict):
|
||||
keys = inferred_values.itered()
|
||||
for key in keys:
|
||||
if not isinstance(key, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in keys]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
|
||||
# Fallback to an empty dictionary
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_bool, "bool")
|
||||
register_builtin_transform(infer_super, "super")
|
||||
register_builtin_transform(infer_callable, "callable")
|
||||
register_builtin_transform(infer_property, "property")
|
||||
register_builtin_transform(infer_getattr, "getattr")
|
||||
register_builtin_transform(infer_hasattr, "hasattr")
|
||||
register_builtin_transform(infer_tuple, "tuple")
|
||||
register_builtin_transform(infer_set, "set")
|
||||
register_builtin_transform(infer_list, "list")
|
||||
register_builtin_transform(infer_dict, "dict")
|
||||
register_builtin_transform(infer_frozenset, "frozenset")
|
||||
register_builtin_transform(infer_type, "type")
|
||||
register_builtin_transform(infer_slice, "slice")
|
||||
register_builtin_transform(infer_isinstance, "isinstance")
|
||||
register_builtin_transform(infer_issubclass, "issubclass")
|
||||
register_builtin_transform(infer_len, "len")
|
||||
register_builtin_transform(infer_str, "str")
|
||||
register_builtin_transform(infer_int, "int")
|
||||
register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
|
||||
|
||||
|
||||
# Infer object.__new__ calls
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
inference_tip(_infer_object__new__decorator),
|
||||
_infer_object__new__decorator_check,
|
||||
)
|
||||
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _collections_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
def __getitem__(self, key): return default_factory
|
||||
|
||||
"""
|
||||
+ _deque_mock()
|
||||
+ _ordered_dict_mock()
|
||||
)
|
||||
|
||||
|
||||
def _deque_mock():
|
||||
base_deque_class = """
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None):
|
||||
self.iterable = iterable or []
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): return self.iterable[0]
|
||||
def popleft(self): return self.iterable[0]
|
||||
def remove(self, value): pass
|
||||
def reverse(self): return reversed(self.iterable)
|
||||
def rotate(self, n=1): return self
|
||||
def __iter__(self): return self
|
||||
def __reversed__(self): return self.iterable[::-1]
|
||||
def __getitem__(self, index): return self.iterable[index]
|
||||
def __setitem__(self, index, value): pass
|
||||
def __delitem__(self, index): pass
|
||||
def __bool__(self): return bool(self.iterable)
|
||||
def __nonzero__(self): return bool(self.iterable)
|
||||
def __contains__(self, o): return o in self.iterable
|
||||
def __len__(self): return len(self.iterable)
|
||||
def __copy__(self): return deque(self.iterable)
|
||||
def copy(self): return deque(self.iterable)
|
||||
def index(self, x, start=0, end=0): return 0
|
||||
def insert(self, x, i): pass
|
||||
def __add__(self, other): pass
|
||||
def __iadd__(self, other): pass
|
||||
def __mul__(self, other): pass
|
||||
def __imul__(self, other): pass
|
||||
def __rmul__(self, other): pass"""
|
||||
return base_deque_class
|
||||
|
||||
|
||||
def _ordered_dict_mock():
|
||||
base_ordered_dict_class = """
|
||||
class OrderedDict(dict):
|
||||
def __reversed__(self): return self[::-1]
|
||||
def move_to_end(self, key, last=False): pass"""
|
||||
return base_ordered_dict_class
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)
|
||||
@@ -0,0 +1,26 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
import astroid
|
||||
|
||||
PY37 = sys.version_info >= (3, 7)
|
||||
|
||||
if PY37:
|
||||
# Since Python 3.7 Hashing Methods are added
|
||||
# dynamically to globals()
|
||||
|
||||
def _re_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_Method = namedtuple('_Method', 'name ident salt_chars total_size')
|
||||
|
||||
METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
|
||||
METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
|
||||
METHOD_BLOWFISH = _Method('BLOWFISH', 2, 'b', 22)
|
||||
METHOD_MD5 = _Method('MD5', '1', 8, 34)
|
||||
METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
|
||||
"""
|
||||
)
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "crypt", _re_transform)
|
||||
179
venv/lib/python3.8/site-packages/astroid/brain/brain_curses.py
Normal file
179
venv/lib/python3.8/site-packages/astroid/brain/brain_curses.py
Normal file
@@ -0,0 +1,179 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import astroid
|
||||
|
||||
|
||||
def _curses_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
A_ALTCHARSET = 1
|
||||
A_BLINK = 1
|
||||
A_BOLD = 1
|
||||
A_DIM = 1
|
||||
A_INVIS = 1
|
||||
A_ITALIC = 1
|
||||
A_NORMAL = 1
|
||||
A_PROTECT = 1
|
||||
A_REVERSE = 1
|
||||
A_STANDOUT = 1
|
||||
A_UNDERLINE = 1
|
||||
A_HORIZONTAL = 1
|
||||
A_LEFT = 1
|
||||
A_LOW = 1
|
||||
A_RIGHT = 1
|
||||
A_TOP = 1
|
||||
A_VERTICAL = 1
|
||||
A_CHARTEXT = 1
|
||||
A_ATTRIBUTES = 1
|
||||
A_CHARTEXT = 1
|
||||
A_COLOR = 1
|
||||
KEY_MIN = 1
|
||||
KEY_BREAK = 1
|
||||
KEY_DOWN = 1
|
||||
KEY_UP = 1
|
||||
KEY_LEFT = 1
|
||||
KEY_RIGHT = 1
|
||||
KEY_HOME = 1
|
||||
KEY_BACKSPACE = 1
|
||||
KEY_F0 = 1
|
||||
KEY_Fn = 1
|
||||
KEY_DL = 1
|
||||
KEY_IL = 1
|
||||
KEY_DC = 1
|
||||
KEY_IC = 1
|
||||
KEY_EIC = 1
|
||||
KEY_CLEAR = 1
|
||||
KEY_EOS = 1
|
||||
KEY_EOL = 1
|
||||
KEY_SF = 1
|
||||
KEY_SR = 1
|
||||
KEY_NPAGE = 1
|
||||
KEY_PPAGE = 1
|
||||
KEY_STAB = 1
|
||||
KEY_CTAB = 1
|
||||
KEY_CATAB = 1
|
||||
KEY_ENTER = 1
|
||||
KEY_SRESET = 1
|
||||
KEY_RESET = 1
|
||||
KEY_PRINT = 1
|
||||
KEY_LL = 1
|
||||
KEY_A1 = 1
|
||||
KEY_A3 = 1
|
||||
KEY_B2 = 1
|
||||
KEY_C1 = 1
|
||||
KEY_C3 = 1
|
||||
KEY_BTAB = 1
|
||||
KEY_BEG = 1
|
||||
KEY_CANCEL = 1
|
||||
KEY_CLOSE = 1
|
||||
KEY_COMMAND = 1
|
||||
KEY_COPY = 1
|
||||
KEY_CREATE = 1
|
||||
KEY_END = 1
|
||||
KEY_EXIT = 1
|
||||
KEY_FIND = 1
|
||||
KEY_HELP = 1
|
||||
KEY_MARK = 1
|
||||
KEY_MESSAGE = 1
|
||||
KEY_MOVE = 1
|
||||
KEY_NEXT = 1
|
||||
KEY_OPEN = 1
|
||||
KEY_OPTIONS = 1
|
||||
KEY_PREVIOUS = 1
|
||||
KEY_REDO = 1
|
||||
KEY_REFERENCE = 1
|
||||
KEY_REFRESH = 1
|
||||
KEY_REPLACE = 1
|
||||
KEY_RESTART = 1
|
||||
KEY_RESUME = 1
|
||||
KEY_SAVE = 1
|
||||
KEY_SBEG = 1
|
||||
KEY_SCANCEL = 1
|
||||
KEY_SCOMMAND = 1
|
||||
KEY_SCOPY = 1
|
||||
KEY_SCREATE = 1
|
||||
KEY_SDC = 1
|
||||
KEY_SDL = 1
|
||||
KEY_SELECT = 1
|
||||
KEY_SEND = 1
|
||||
KEY_SEOL = 1
|
||||
KEY_SEXIT = 1
|
||||
KEY_SFIND = 1
|
||||
KEY_SHELP = 1
|
||||
KEY_SHOME = 1
|
||||
KEY_SIC = 1
|
||||
KEY_SLEFT = 1
|
||||
KEY_SMESSAGE = 1
|
||||
KEY_SMOVE = 1
|
||||
KEY_SNEXT = 1
|
||||
KEY_SOPTIONS = 1
|
||||
KEY_SPREVIOUS = 1
|
||||
KEY_SPRINT = 1
|
||||
KEY_SREDO = 1
|
||||
KEY_SREPLACE = 1
|
||||
KEY_SRIGHT = 1
|
||||
KEY_SRSUME = 1
|
||||
KEY_SSAVE = 1
|
||||
KEY_SSUSPEND = 1
|
||||
KEY_SUNDO = 1
|
||||
KEY_SUSPEND = 1
|
||||
KEY_UNDO = 1
|
||||
KEY_MOUSE = 1
|
||||
KEY_RESIZE = 1
|
||||
KEY_MAX = 1
|
||||
ACS_BBSS = 1
|
||||
ACS_BLOCK = 1
|
||||
ACS_BOARD = 1
|
||||
ACS_BSBS = 1
|
||||
ACS_BSSB = 1
|
||||
ACS_BSSS = 1
|
||||
ACS_BTEE = 1
|
||||
ACS_BULLET = 1
|
||||
ACS_CKBOARD = 1
|
||||
ACS_DARROW = 1
|
||||
ACS_DEGREE = 1
|
||||
ACS_DIAMOND = 1
|
||||
ACS_GEQUAL = 1
|
||||
ACS_HLINE = 1
|
||||
ACS_LANTERN = 1
|
||||
ACS_LARROW = 1
|
||||
ACS_LEQUAL = 1
|
||||
ACS_LLCORNER = 1
|
||||
ACS_LRCORNER = 1
|
||||
ACS_LTEE = 1
|
||||
ACS_NEQUAL = 1
|
||||
ACS_PI = 1
|
||||
ACS_PLMINUS = 1
|
||||
ACS_PLUS = 1
|
||||
ACS_RARROW = 1
|
||||
ACS_RTEE = 1
|
||||
ACS_S1 = 1
|
||||
ACS_S3 = 1
|
||||
ACS_S7 = 1
|
||||
ACS_S9 = 1
|
||||
ACS_SBBS = 1
|
||||
ACS_SBSB = 1
|
||||
ACS_SBSS = 1
|
||||
ACS_SSBB = 1
|
||||
ACS_SSBS = 1
|
||||
ACS_SSSB = 1
|
||||
ACS_SSSS = 1
|
||||
ACS_STERLING = 1
|
||||
ACS_TTEE = 1
|
||||
ACS_UARROW = 1
|
||||
ACS_ULCORNER = 1
|
||||
ACS_URCORNER = 1
|
||||
ACS_VLINE = 1
|
||||
COLOR_BLACK = 1
|
||||
COLOR_BLUE = 1
|
||||
COLOR_CYAN = 1
|
||||
COLOR_GREEN = 1
|
||||
COLOR_MAGENTA = 1
|
||||
COLOR_RED = 1
|
||||
COLOR_WHITE = 1
|
||||
COLOR_YELLOW = 1
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)
|
||||
@@ -0,0 +1,50 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
"""
|
||||
Astroid hook for the dataclasses library
|
||||
"""
|
||||
|
||||
import astroid
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
DATACLASSES_DECORATORS = frozenset(("dataclasses.dataclass", "dataclass"))
|
||||
|
||||
|
||||
def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS):
|
||||
"""Return True if a decorated node has a `dataclass` decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
|
||||
decorator_attribute = decorator_attribute.func
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def dataclass_transform(node):
|
||||
"""Rewrite a dataclass to be easily understood by pylint"""
|
||||
|
||||
for assign_node in node.body:
|
||||
if not isinstance(assign_node, (astroid.AnnAssign, astroid.Assign)):
|
||||
continue
|
||||
|
||||
targets = (
|
||||
assign_node.targets
|
||||
if hasattr(assign_node, "targets")
|
||||
else [assign_node.target]
|
||||
)
|
||||
for target in targets:
|
||||
rhs_node = astroid.Unknown(
|
||||
lineno=assign_node.lineno,
|
||||
col_offset=assign_node.col_offset,
|
||||
parent=assign_node,
|
||||
)
|
||||
node.instance_attrs[target.name] = [rhs_node]
|
||||
node.locals[target.name] = [rhs_node]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, dataclass_transform, is_decorated_with_dataclass
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 raylu <lurayl@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for dateutil"""
|
||||
|
||||
import textwrap
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def dateutil_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import datetime
|
||||
def parse(timestr, parserinfo=None, **kwargs):
|
||||
return datetime.datetime()
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)
|
||||
@@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import collections
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
|
||||
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
|
||||
if other_fields:
|
||||
init_params.update({param: getattr(node, param) for param in other_fields})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, "postinit") and _astroid_fields:
|
||||
for param, child in postinit_params.items():
|
||||
if child and not isinstance(child, collections.Sequence):
|
||||
cloned_child = _clone_node_with_lineno(
|
||||
node=child, lineno=new_node.lineno, parent=new_node
|
||||
)
|
||||
postinit_params[param] = cloned_child
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def _transform_formatted_value(node):
|
||||
if node.value and node.value.lineno == 1:
|
||||
if node.lineno != node.value.lineno:
|
||||
new_node = astroid.FormattedValue(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
new_value = _clone_node_with_lineno(
|
||||
node=node.value, lineno=node.lineno, parent=new_node
|
||||
)
|
||||
new_node.postinit(value=new_value, format_spec=node.format_spec)
|
||||
return new_node
|
||||
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051
|
||||
# The problem is that FormattedValue.value, which is a Name node,
|
||||
# has wrong line numbers, usually 1. This creates problems for pylint,
|
||||
# which expects correct line numbers for things such as message control.
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.FormattedValue, _transform_formatted_value
|
||||
)
|
||||
@@ -0,0 +1,159 @@
|
||||
# Copyright (c) 2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
|
||||
"""Astroid hooks for understanding functools library module."""
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
import astroid
|
||||
from astroid import arguments
|
||||
from astroid import BoundMethod
|
||||
from astroid import extract_node
|
||||
from astroid import helpers
|
||||
from astroid.interpreter import objectmodel
|
||||
from astroid import MANAGER
|
||||
from astroid import objects
|
||||
|
||||
|
||||
LRU_CACHE = "functools.lru_cache"
|
||||
|
||||
|
||||
class LruWrappedModel(objectmodel.FunctionModel):
|
||||
"""Special attribute model for functions decorated with functools.lru_cache.
|
||||
|
||||
The said decorators patches at decoration time some functions onto
|
||||
the decorated function.
|
||||
"""
|
||||
|
||||
@property
|
||||
def attr___wrapped__(self):
|
||||
return self._instance
|
||||
|
||||
@property
|
||||
def attr_cache_info(self):
|
||||
cache_info = extract_node(
|
||||
"""
|
||||
from functools import _CacheInfo
|
||||
_CacheInfo(0, 0, 0, 0)
|
||||
"""
|
||||
)
|
||||
|
||||
class CacheInfoBoundMethod(BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield helpers.safe_infer(cache_info)
|
||||
|
||||
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
|
||||
|
||||
@property
|
||||
def attr_cache_clear(self):
|
||||
node = extract_node("""def cache_clear(self): pass""")
|
||||
return BoundMethod(proxy=node, bound=self._instance.parent.scope())
|
||||
|
||||
|
||||
def _transform_lru_cache(node, context=None):
|
||||
# TODO: this is not ideal, since the node should be immutable,
|
||||
# but due to https://github.com/PyCQA/astroid/issues/354,
|
||||
# there's not much we can do now.
|
||||
# Replacing the node would work partially, because,
|
||||
# in pylint, the old node would still be available, leading
|
||||
# to spurious false positives.
|
||||
node.special_attributes = LruWrappedModel()(node)
|
||||
return
|
||||
|
||||
|
||||
def _functools_partial_inference(node, context=None):
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
number_of_positional = len(call.positional_arguments)
|
||||
if number_of_positional < 1:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"functools.partial takes at least one argument"
|
||||
)
|
||||
if number_of_positional == 1 and not call.keyword_arguments:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"functools.partial needs at least to have some filled arguments"
|
||||
)
|
||||
|
||||
partial_function = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_wrapped_function = next(partial_function.infer(context=context))
|
||||
except astroid.InferenceError as exc:
|
||||
raise astroid.UseInferenceDefault from exc
|
||||
if inferred_wrapped_function is astroid.Uninferable:
|
||||
raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
|
||||
if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
|
||||
raise astroid.UseInferenceDefault("The wrapped function is not a function")
|
||||
|
||||
# Determine if the passed keywords into the callsite are supported
|
||||
# by the wrapped function.
|
||||
function_parameters = chain(
|
||||
inferred_wrapped_function.args.args or (),
|
||||
inferred_wrapped_function.args.posonlyargs or (),
|
||||
inferred_wrapped_function.args.kwonlyargs or (),
|
||||
)
|
||||
parameter_names = set(
|
||||
param.name
|
||||
for param in function_parameters
|
||||
if isinstance(param, astroid.AssignName)
|
||||
)
|
||||
if set(call.keyword_arguments) - parameter_names:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"wrapped function received unknown parameters"
|
||||
)
|
||||
|
||||
partial_function = objects.PartialFunction(
|
||||
call,
|
||||
name=inferred_wrapped_function.name,
|
||||
doc=inferred_wrapped_function.doc,
|
||||
lineno=inferred_wrapped_function.lineno,
|
||||
col_offset=inferred_wrapped_function.col_offset,
|
||||
parent=inferred_wrapped_function.parent,
|
||||
)
|
||||
partial_function.postinit(
|
||||
args=inferred_wrapped_function.args,
|
||||
body=inferred_wrapped_function.body,
|
||||
decorators=inferred_wrapped_function.decorators,
|
||||
returns=inferred_wrapped_function.returns,
|
||||
type_comment_returns=inferred_wrapped_function.type_comment_returns,
|
||||
type_comment_args=inferred_wrapped_function.type_comment_args,
|
||||
)
|
||||
return iter((partial_function,))
|
||||
|
||||
|
||||
def _looks_like_lru_cache(node):
|
||||
"""Check if the given function node is decorated with lru_cache."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, astroid.Call):
|
||||
continue
|
||||
if _looks_like_functools_member(decorator, "lru_cache"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _looks_like_functools_member(node, member):
|
||||
"""Check if the given Call node is a functools.partial call"""
|
||||
if isinstance(node.func, astroid.Name):
|
||||
return node.func.name == member
|
||||
elif isinstance(node.func, astroid.Attribute):
|
||||
return (
|
||||
node.func.attrname == member
|
||||
and isinstance(node.func.expr, astroid.Name)
|
||||
and node.func.expr.name == "functools"
|
||||
)
|
||||
|
||||
|
||||
_looks_like_partial = partial(_looks_like_functools_member, member="partial")
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
|
||||
)
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.Call,
|
||||
astroid.inference_tip(_functools_partial_inference),
|
||||
_looks_like_partial,
|
||||
)
|
||||
253
venv/lib/python3.8/site-packages/astroid/brain/brain_gi.py
Normal file
253
venv/lib/python3.8/site-packages/astroid/brain/brain_gi.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Cole Robinson <crobinso@redhat.com>
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Giuseppe Scrivano <gscrivan@redhat.com>
|
||||
# Copyright (c) 2018 Christoph Reiter <reiter.christoph@gmail.com>
|
||||
# Copyright (c) 2019 Philipp Hörist <philipp@hoerist.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from astroid import MANAGER, AstroidBuildingError, nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r"^[A-Za-z_]\w*$"
|
||||
|
||||
_special_methods = frozenset(
|
||||
{
|
||||
"__lt__",
|
||||
"__le__",
|
||||
"__eq__",
|
||||
"__ne__",
|
||||
"__ge__",
|
||||
"__gt__",
|
||||
"__iter__",
|
||||
"__getitem__",
|
||||
"__setitem__",
|
||||
"__delitem__",
|
||||
"__len__",
|
||||
"__bool__",
|
||||
"__nonzero__",
|
||||
"__next__",
|
||||
"__str__",
|
||||
"__len__",
|
||||
"__contains__",
|
||||
"__enter__",
|
||||
"__exit__",
|
||||
"__repr__",
|
||||
"__getattr__",
|
||||
"__setattr__",
|
||||
"__delattr__",
|
||||
"__del__",
|
||||
"__hash__",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__") and name not in _special_methods:
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
|
||||
functions[name] = obj
|
||||
elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
|
||||
methods[name] = obj
|
||||
elif (
|
||||
str(obj).startswith("<flags")
|
||||
or str(obj).startswith("<enum ")
|
||||
or str(obj).startswith("<GType ")
|
||||
or inspect.isdatadescriptor(obj)
|
||||
):
|
||||
constants[name] = 0
|
||||
elif isinstance(obj, (int, str)):
|
||||
constants[name] = obj
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += "# %s constants\n\n" % parent.__name__
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if isinstance(val, str):
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += "%s = %s\n" % (name, strval)
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += "# %s functions\n\n" % parent.__name__
|
||||
for name in sorted(functions):
|
||||
ret += "def %s(*args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += "# %s methods\n\n" % parent.__name__
|
||||
for name in sorted(methods):
|
||||
ret += "def %s(self, *args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += "# %s classes\n\n" % parent.__name__
|
||||
for name, obj in sorted(classes.items()):
|
||||
base = "object"
|
||||
if issubclass(obj, Exception):
|
||||
base = "Exception"
|
||||
ret += "class %s(%s):\n" % (name, base)
|
||||
|
||||
classret = _gi_build_stub(obj)
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith("gi.repository."):
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == "gi.repository.GLib":
|
||||
optional_modnames.append("gi._glib")
|
||||
elif modname == "gi.repository.GObject":
|
||||
optional_modnames.append("gi._gobject")
|
||||
|
||||
try:
|
||||
modcode = ""
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
# Just inspecting the code can raise gi deprecation
|
||||
# warnings, so ignore them.
|
||||
try:
|
||||
from gi import PyGIDeprecationWarning, PyGIWarning
|
||||
|
||||
warnings.simplefilter("ignore", PyGIDeprecationWarning)
|
||||
warnings.simplefilter("ignore", PyGIWarning)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
return astng
|
||||
|
||||
|
||||
def _looks_like_require_version(node):
|
||||
# Return whether this looks like a call to gi.require_version(<name>, <version>)
|
||||
# Only accept function calls with two constant arguments
|
||||
if len(node.args) != 2:
|
||||
return False
|
||||
|
||||
if not all(isinstance(arg, nodes.Const) for arg in node.args):
|
||||
return False
|
||||
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
if func.attrname != "require_version":
|
||||
return False
|
||||
if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == "require_version"
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _register_require_version(node):
|
||||
# Load the gi.require_version locally
|
||||
try:
|
||||
import gi
|
||||
|
||||
gi.require_version(node.args[0].value, node.args[1].value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return node
|
||||
|
||||
|
||||
MANAGER.register_failed_import_hook(_import_gi_module)
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, _register_require_version, _looks_like_require_version
|
||||
)
|
||||
@@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 David Poirier <david-poirier-csn@users.noreply.github.com>
|
||||
# Copyright (c) 2018 wgehalo <wgehalo@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import astroid
|
||||
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
|
||||
def _hashlib_transform():
|
||||
signature = "value=''"
|
||||
template = """
|
||||
class %(name)s(object):
|
||||
def __init__(self, %(signature)s): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
"""
|
||||
algorithms_with_signature = dict.fromkeys(
|
||||
["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
|
||||
)
|
||||
if PY36:
|
||||
blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
new_algorithms = dict.fromkeys(
|
||||
["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
|
||||
signature,
|
||||
)
|
||||
algorithms_with_signature.update(new_algorithms)
|
||||
algorithms_with_signature.update(
|
||||
{"blake2b": blake2b_signature, "blake2s": blake2s_signature}
|
||||
)
|
||||
classes = "".join(
|
||||
template
|
||||
% {
|
||||
"name": hashfunc,
|
||||
"digest": 'b""' if six.PY3 else '""',
|
||||
"signature": signature,
|
||||
}
|
||||
for hashfunc, signature in algorithms_with_signature.items()
|
||||
)
|
||||
return astroid.parse(classes)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
|
||||
211
venv/lib/python3.8/site-packages/astroid/brain/brain_http.py
Normal file
211
venv/lib/python3.8/site-packages/astroid/brain/brain_http.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Copyright (c) 2018-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid brain hints for some of the `http` module."""
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def _http_transform():
|
||||
code = textwrap.dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description')
|
||||
|
||||
class HTTPStatus:
|
||||
|
||||
@property
|
||||
def phrase(self):
|
||||
return ""
|
||||
@property
|
||||
def value(self):
|
||||
return 0
|
||||
@property
|
||||
def description(self):
|
||||
return ""
|
||||
|
||||
# informational
|
||||
CONTINUE = _HTTPStatus(100, 'Continue', 'Request received, please continue')
|
||||
SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols',
|
||||
'Switching to new protocol; obey Upgrade header')
|
||||
PROCESSING = _HTTPStatus(102, 'Processing', '')
|
||||
OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows')
|
||||
CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows')
|
||||
ACCEPTED = _HTTPStatus(202, 'Accepted',
|
||||
'Request accepted, processing continues off-line')
|
||||
NON_AUTHORITATIVE_INFORMATION = _HTTPStatus(203,
|
||||
'Non-Authoritative Information', 'Request fulfilled from cache')
|
||||
NO_CONTENT = _HTTPStatus(204, 'No Content', 'Request fulfilled, nothing follows')
|
||||
RESET_CONTENT =_HTTPStatus(205, 'Reset Content', 'Clear input form for further input')
|
||||
PARTIAL_CONTENT = _HTTPStatus(206, 'Partial Content', 'Partial content follows')
|
||||
MULTI_STATUS = _HTTPStatus(207, 'Multi-Status', '')
|
||||
ALREADY_REPORTED = _HTTPStatus(208, 'Already Reported', '')
|
||||
IM_USED = _HTTPStatus(226, 'IM Used', '')
|
||||
MULTIPLE_CHOICES = _HTTPStatus(300, 'Multiple Choices',
|
||||
'Object has several resources -- see URI list')
|
||||
MOVED_PERMANENTLY = _HTTPStatus(301, 'Moved Permanently',
|
||||
'Object moved permanently -- see URI list')
|
||||
FOUND = _HTTPStatus(302, 'Found', 'Object moved temporarily -- see URI list')
|
||||
SEE_OTHER = _HTTPStatus(303, 'See Other', 'Object moved -- see Method and URL list')
|
||||
NOT_MODIFIED = _HTTPStatus(304, 'Not Modified',
|
||||
'Document has not changed since given time')
|
||||
USE_PROXY = _HTTPStatus(305, 'Use Proxy',
|
||||
'You must use proxy specified in Location to access this resource')
|
||||
TEMPORARY_REDIRECT = _HTTPStatus(307, 'Temporary Redirect',
|
||||
'Object moved temporarily -- see URI list')
|
||||
PERMANENT_REDIRECT = _HTTPStatus(308, 'Permanent Redirect',
|
||||
'Object moved permanently -- see URI list')
|
||||
BAD_REQUEST = _HTTPStatus(400, 'Bad Request',
|
||||
'Bad request syntax or unsupported method')
|
||||
UNAUTHORIZED = _HTTPStatus(401, 'Unauthorized',
|
||||
'No permission -- see authorization schemes')
|
||||
PAYMENT_REQUIRED = _HTTPStatus(402, 'Payment Required',
|
||||
'No payment -- see charging schemes')
|
||||
FORBIDDEN = _HTTPStatus(403, 'Forbidden',
|
||||
'Request forbidden -- authorization will not help')
|
||||
NOT_FOUND = _HTTPStatus(404, 'Not Found',
|
||||
'Nothing matches the given URI')
|
||||
METHOD_NOT_ALLOWED = _HTTPStatus(405, 'Method Not Allowed',
|
||||
'Specified method is invalid for this resource')
|
||||
NOT_ACCEPTABLE = _HTTPStatus(406, 'Not Acceptable',
|
||||
'URI not available in preferred format')
|
||||
PROXY_AUTHENTICATION_REQUIRED = _HTTPStatus(407,
|
||||
'Proxy Authentication Required',
|
||||
'You must authenticate with this proxy before proceeding')
|
||||
REQUEST_TIMEOUT = _HTTPStatus(408, 'Request Timeout',
|
||||
'Request timed out; try again later')
|
||||
CONFLICT = _HTTPStatus(409, 'Conflict', 'Request conflict')
|
||||
GONE = _HTTPStatus(410, 'Gone',
|
||||
'URI no longer exists and has been permanently removed')
|
||||
LENGTH_REQUIRED = _HTTPStatus(411, 'Length Required',
|
||||
'Client must specify Content-Length')
|
||||
PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed',
|
||||
'Precondition in headers is false')
|
||||
REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large',
|
||||
'Entity is too large')
|
||||
REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long',
|
||||
'URI is too long')
|
||||
UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type',
|
||||
'Entity body in unsupported format')
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416,
|
||||
'Requested Range Not Satisfiable',
|
||||
'Cannot satisfy request range')
|
||||
EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed',
|
||||
'Expect condition could not be satisfied')
|
||||
MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request',
|
||||
'Server is not able to produce a response')
|
||||
UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity')
|
||||
LOCKED = _HTTPStatus(423, 'Locked')
|
||||
FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency')
|
||||
UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required')
|
||||
PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required',
|
||||
'The origin server requires the request to be conditional')
|
||||
TOO_MANY_REQUESTS = _HTTPStatus(429, 'Too Many Requests',
|
||||
'The user has sent too many requests in '
|
||||
'a given amount of time ("rate limiting")')
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = _HTTPStatus(431,
|
||||
'Request Header Fields Too Large',
|
||||
'The server is unwilling to process the request because its header '
|
||||
'fields are too large')
|
||||
UNAVAILABLE_FOR_LEGAL_REASONS = _HTTPStatus(451,
|
||||
'Unavailable For Legal Reasons',
|
||||
'The server is denying access to the '
|
||||
'resource as a consequence of a legal demand')
|
||||
INTERNAL_SERVER_ERROR = _HTTPStatus(500, 'Internal Server Error',
|
||||
'Server got itself in trouble')
|
||||
NOT_IMPLEMENTED = _HTTPStatus(501, 'Not Implemented',
|
||||
'Server does not support this operation')
|
||||
BAD_GATEWAY = _HTTPStatus(502, 'Bad Gateway',
|
||||
'Invalid responses from another server/proxy')
|
||||
SERVICE_UNAVAILABLE = _HTTPStatus(503, 'Service Unavailable',
|
||||
'The server cannot process the request due to a high load')
|
||||
GATEWAY_TIMEOUT = _HTTPStatus(504, 'Gateway Timeout',
|
||||
'The gateway server did not receive a timely response')
|
||||
HTTP_VERSION_NOT_SUPPORTED = _HTTPStatus(505, 'HTTP Version Not Supported',
|
||||
'Cannot fulfill request')
|
||||
VARIANT_ALSO_NEGOTIATES = _HTTPStatus(506, 'Variant Also Negotiates')
|
||||
INSUFFICIENT_STORAGE = _HTTPStatus(507, 'Insufficient Storage')
|
||||
LOOP_DETECTED = _HTTPStatus(508, 'Loop Detected')
|
||||
NOT_EXTENDED = _HTTPStatus(510, 'Not Extended')
|
||||
NETWORK_AUTHENTICATION_REQUIRED = _HTTPStatus(511,
|
||||
'Network Authentication Required',
|
||||
'The client needs to authenticate to gain network access')
|
||||
"""
|
||||
)
|
||||
return AstroidBuilder(astroid.MANAGER).string_build(code)
|
||||
|
||||
|
||||
def _http_client_transform():
|
||||
return AstroidBuilder(astroid.MANAGER).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
from http import HTTPStatus
|
||||
|
||||
CONTINUE = HTTPStatus.CONTINUE
|
||||
SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
|
||||
PROCESSING = HTTPStatus.PROCESSING
|
||||
OK = HTTPStatus.OK
|
||||
CREATED = HTTPStatus.CREATED
|
||||
ACCEPTED = HTTPStatus.ACCEPTED
|
||||
NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
|
||||
NO_CONTENT = HTTPStatus.NO_CONTENT
|
||||
RESET_CONTENT = HTTPStatus.RESET_CONTENT
|
||||
PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
|
||||
MULTI_STATUS = HTTPStatus.MULTI_STATUS
|
||||
ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
|
||||
IM_USED = HTTPStatus.IM_USED
|
||||
MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
|
||||
MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
|
||||
FOUND = HTTPStatus.FOUND
|
||||
SEE_OTHER = HTTPStatus.SEE_OTHER
|
||||
NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
|
||||
USE_PROXY = HTTPStatus.USE_PROXY
|
||||
TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
|
||||
PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
|
||||
BAD_REQUEST = HTTPStatus.BAD_REQUEST
|
||||
UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
|
||||
PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
|
||||
FORBIDDEN = HTTPStatus.FORBIDDEN
|
||||
NOT_FOUND = HTTPStatus.NOT_FOUND
|
||||
METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
|
||||
NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
|
||||
PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
|
||||
REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
|
||||
CONFLICT = HTTPStatus.CONFLICT
|
||||
GONE = HTTPStatus.GONE
|
||||
LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
|
||||
PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
|
||||
REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
|
||||
REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
|
||||
UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
|
||||
EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
|
||||
UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
|
||||
LOCKED = HTTPStatus.LOCKED
|
||||
FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
|
||||
UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
|
||||
PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
|
||||
TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
|
||||
INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
|
||||
BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
|
||||
SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
|
||||
GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
|
||||
HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
|
||||
VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
|
||||
INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
|
||||
LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
|
||||
NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
|
||||
NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "http", _http_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, "http.client", _http_client_transform)
|
||||
45
venv/lib/python3.8/site-packages/astroid/brain/brain_io.py
Normal file
45
venv/lib/python3.8/site-packages/astroid/brain/brain_io.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid brain hints for some of the _io C objects."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
BUFFERED = {"BufferedWriter", "BufferedReader"}
|
||||
TextIOWrapper = "TextIOWrapper"
|
||||
FileIO = "FileIO"
|
||||
BufferedWriter = "BufferedWriter"
|
||||
|
||||
|
||||
def _generic_io_transform(node, name, cls):
|
||||
"""Transform the given name, by adding the given *class* as a member of the node."""
|
||||
|
||||
io_module = astroid.MANAGER.ast_from_module_name("_io")
|
||||
attribute_object = io_module[cls]
|
||||
instance = attribute_object.instantiate_class()
|
||||
node.locals[name] = [instance]
|
||||
|
||||
|
||||
def _transform_text_io_wrapper(node):
|
||||
# This is not always correct, since it can vary with the type of the descriptor,
|
||||
# being stdout, stderr or stdin. But we cannot get access to the name of the
|
||||
# stream, which is why we are using the BufferedWriter class as a default
|
||||
# value
|
||||
return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
|
||||
|
||||
|
||||
def _transform_buffered(node):
|
||||
return _generic_io_transform(node, name="raw", cls=FileIO)
|
||||
|
||||
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.ClassDef,
|
||||
_transform_text_io_wrapper,
|
||||
lambda node: node.name == TextIOWrapper,
|
||||
)
|
||||
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
|
||||
class Browser(object):
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "mechanize", mechanize_transform)
|
||||
@@ -0,0 +1,107 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import exceptions
|
||||
|
||||
|
||||
def _multiprocessing_transform():
|
||||
module = astroid.parse(
|
||||
"""
|
||||
from multiprocessing.managers import SyncManager
|
||||
def Manager():
|
||||
return SyncManager()
|
||||
"""
|
||||
)
|
||||
# Multiprocessing uses a getattr lookup inside contexts,
|
||||
# in order to get the attributes they need. Since it's extremely
|
||||
# dynamic, we use this approach to fake it.
|
||||
node = astroid.parse(
|
||||
"""
|
||||
from multiprocessing.context import DefaultContext, BaseContext
|
||||
default = DefaultContext()
|
||||
base = BaseContext()
|
||||
"""
|
||||
)
|
||||
try:
|
||||
context = next(node["default"].infer())
|
||||
base = next(node["base"].infer())
|
||||
except exceptions.InferenceError:
|
||||
return module
|
||||
|
||||
for node in (context, base):
|
||||
for key, value in node.locals.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
value = value[0]
|
||||
if isinstance(value, astroid.FunctionDef):
|
||||
# We need to rebound this, since otherwise
|
||||
# it will have an extra argument (self).
|
||||
value = astroid.BoundMethod(value, node)
|
||||
module[key] = value
|
||||
return module
|
||||
|
||||
|
||||
def _multiprocessing_managers_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
import array
|
||||
import threading
|
||||
import multiprocessing.pool as pool
|
||||
|
||||
import six
|
||||
|
||||
class Namespace(object):
|
||||
pass
|
||||
|
||||
class Value(object):
|
||||
def __init__(self, typecode, value, lock=True):
|
||||
self._typecode = typecode
|
||||
self._value = value
|
||||
def get(self):
|
||||
return self._value
|
||||
def set(self, value):
|
||||
self._value = value
|
||||
def __repr__(self):
|
||||
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
||||
value = property(get, set)
|
||||
|
||||
def Array(typecode, sequence, lock=True):
|
||||
return array.array(typecode, sequence)
|
||||
|
||||
class SyncManager(object):
|
||||
Queue = JoinableQueue = six.moves.queue.Queue
|
||||
Event = threading.Event
|
||||
RLock = threading.RLock
|
||||
BoundedSemaphore = threading.BoundedSemaphore
|
||||
Condition = threading.Condition
|
||||
Barrier = threading.Barrier
|
||||
Pool = pool.Pool
|
||||
list = list
|
||||
dict = dict
|
||||
Value = Value
|
||||
Array = Array
|
||||
Namespace = Namespace
|
||||
__enter__ = lambda self: self
|
||||
__exit__ = lambda *args: args
|
||||
|
||||
def start(self, initializer=None, initargs=None):
|
||||
pass
|
||||
def shutdown(self):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
|
||||
)
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "multiprocessing", _multiprocessing_transform
|
||||
)
|
||||
@@ -0,0 +1,453 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Mateusz Bysiek <mb@mbdev.pl>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python standard library."""
|
||||
|
||||
import functools
|
||||
import keyword
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
|
||||
from astroid import arguments
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder, extract_node
|
||||
from astroid import util
|
||||
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
|
||||
ENUM_BASE_NAMES = {
|
||||
"Enum",
|
||||
"IntEnum",
|
||||
"enum.Enum",
|
||||
"enum.IntEnum",
|
||||
"IntFlag",
|
||||
"enum.IntFlag",
|
||||
}
|
||||
|
||||
|
||||
def _infer_first(node, context):
|
||||
if node is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
if value is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
else:
|
||||
return value
|
||||
except StopIteration:
|
||||
raise InferenceError()
|
||||
|
||||
|
||||
def _find_func_form_arguments(node, context):
|
||||
def _extract_namedtuple_arg_or_keyword(position, key_name=None):
|
||||
|
||||
if len(args) > position:
|
||||
return _infer_first(args[position], context)
|
||||
if key_name and key_name in found_keywords:
|
||||
return _infer_first(found_keywords[key_name], context)
|
||||
|
||||
args = node.args
|
||||
keywords = node.keywords
|
||||
found_keywords = (
|
||||
{keyword.arg: keyword.value for keyword in keywords} if keywords else {}
|
||||
)
|
||||
|
||||
name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
|
||||
names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
|
||||
if name and names:
|
||||
return name.value, names
|
||||
|
||||
raise UseInferenceDefault()
|
||||
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum. """
|
||||
# node is a Call node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name, names = _find_func_form_arguments(node, context)
|
||||
try:
|
||||
attributes = names.value.replace(",", " ").split()
|
||||
except AttributeError:
|
||||
if not enum:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
if hasattr(names, "items") and isinstance(names.items, list):
|
||||
attributes = [
|
||||
_infer_first(const[0], context).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)
|
||||
]
|
||||
elif hasattr(names, "elts"):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple) for const in names.elts):
|
||||
attributes = [
|
||||
_infer_first(const.elts[0], context).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)
|
||||
]
|
||||
else:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
raise AttributeError
|
||||
if not attributes:
|
||||
raise AttributeError
|
||||
except (AttributeError, exceptions.InferenceError):
|
||||
raise UseInferenceDefault()
|
||||
|
||||
# If we can't infer the name of the class, don't crash, up to this point
|
||||
# we know it is a namedtuple anyway.
|
||||
name = name or "Uninferable"
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.ClassDef(name, "docstring")
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
def _has_namedtuple_base(node):
|
||||
"""Predicate for class inference tip
|
||||
|
||||
:type node: ClassDef
|
||||
:rtype: bool
|
||||
"""
|
||||
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
|
||||
|
||||
|
||||
def _looks_like(node, name):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname == name
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == name
|
||||
return False
|
||||
|
||||
|
||||
_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
|
||||
_looks_like_enum = functools.partial(_looks_like, name="Enum")
|
||||
_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
|
||||
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple Call node"""
|
||||
tuple_base_name = nodes.Name(name="tuple", parent=node.root())
|
||||
class_node, name, attributes = infer_func_form(
|
||||
node, tuple_base_name, context=context
|
||||
)
|
||||
call_site = arguments.CallSite.from_call(node, context=context)
|
||||
func = next(extract_node("import collections; collections.namedtuple").infer())
|
||||
try:
|
||||
rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
|
||||
except InferenceError:
|
||||
rename = False
|
||||
|
||||
if rename:
|
||||
attributes = _get_renamed_namedtuple_attributes(attributes)
|
||||
|
||||
replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
|
||||
field_def = (
|
||||
" {name} = property(lambda self: self[{index:d}], "
|
||||
"doc='Alias for field number {index:d}')"
|
||||
)
|
||||
field_defs = "\n".join(
|
||||
field_def.format(name=name, index=index)
|
||||
for index, name in enumerate(attributes)
|
||||
)
|
||||
fake = AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
class %(name)s(tuple):
|
||||
__slots__ = ()
|
||||
_fields = %(fields)r
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(self, %(replace_args)s):
|
||||
return self
|
||||
def __getnewargs__(self):
|
||||
return tuple(self)
|
||||
%(field_defs)s
|
||||
"""
|
||||
% {
|
||||
"name": name,
|
||||
"fields": attributes,
|
||||
"field_defs": field_defs,
|
||||
"replace_args": replace_args,
|
||||
}
|
||||
)
|
||||
class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
|
||||
class_node.locals["_make"] = fake.body[0].locals["_make"]
|
||||
class_node.locals["_replace"] = fake.body[0].locals["_replace"]
|
||||
class_node.locals["_fields"] = fake.body[0].locals["_fields"]
|
||||
for attr in attributes:
|
||||
class_node.locals[attr] = fake.body[0].locals[attr]
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
|
||||
def _get_renamed_namedtuple_attributes(field_names):
|
||||
names = list(field_names)
|
||||
seen = set()
|
||||
for i, name in enumerate(field_names):
|
||||
if (
|
||||
not all(c.isalnum() or c == "_" for c in name)
|
||||
or keyword.iskeyword(name)
|
||||
or not name
|
||||
or name[0].isdigit()
|
||||
or name.startswith("_")
|
||||
or name in seen
|
||||
):
|
||||
names[i] = "_%d" % i
|
||||
seen.add(name)
|
||||
return tuple(names)
|
||||
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
""" Specific inference function for enum Call node. """
|
||||
enum_meta = extract_node(
|
||||
"""
|
||||
class EnumMeta(object):
|
||||
'docstring'
|
||||
def __call__(self, node):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return EnumAttribute()
|
||||
def __iter__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return [EnumAttribute()]
|
||||
def __reversed__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return (EnumAttribute, )
|
||||
def __next__(self):
|
||||
return next(iter(self))
|
||||
def __getitem__(self, attr):
|
||||
class Value(object):
|
||||
@property
|
||||
def name(self):
|
||||
return ''
|
||||
@property
|
||||
def value(self):
|
||||
return attr
|
||||
|
||||
return Value()
|
||||
__members__ = ['']
|
||||
"""
|
||||
)
|
||||
class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
|
||||
return iter([class_node.instantiate_class()])
|
||||
|
||||
|
||||
INT_FLAG_ADDITION_METHODS = """
|
||||
def __or__(self, other):
|
||||
return {name}(self.value | other.value)
|
||||
def __and__(self, other):
|
||||
return {name}(self.value & other.value)
|
||||
def __xor__(self, other):
|
||||
return {name}(self.value ^ other.value)
|
||||
def __add__(self, other):
|
||||
return {name}(self.value + other.value)
|
||||
def __div__(self, other):
|
||||
return {name}(self.value / other.value)
|
||||
def __invert__(self):
|
||||
return {name}(~self.value)
|
||||
def __mul__(self, other):
|
||||
return {name}(self.value * other.value)
|
||||
"""
|
||||
|
||||
|
||||
def infer_enum_class(node):
|
||||
""" Specific inference for enums. """
|
||||
for basename in node.basenames:
|
||||
# TODO: doesn't handle subclasses yet. This implementation
|
||||
# is a hack to support enums.
|
||||
if basename not in ENUM_BASE_NAMES:
|
||||
continue
|
||||
if node.root().name == "enum":
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssignName) for value in values):
|
||||
continue
|
||||
|
||||
targets = []
|
||||
stmt = values[0].statement()
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
elif isinstance(stmt, nodes.AnnAssign):
|
||||
targets = [stmt.target]
|
||||
else:
|
||||
continue
|
||||
|
||||
inferred_return_value = None
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.value, nodes.Const):
|
||||
if isinstance(stmt.value.value, str):
|
||||
inferred_return_value = repr(stmt.value.value)
|
||||
else:
|
||||
inferred_return_value = stmt.value.value
|
||||
else:
|
||||
inferred_return_value = stmt.value.as_string()
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent(
|
||||
"""
|
||||
class {name}({types}):
|
||||
@property
|
||||
def value(self):
|
||||
return {return_value}
|
||||
@property
|
||||
def name(self):
|
||||
return "{name}"
|
||||
""".format(
|
||||
name=target.name,
|
||||
types=", ".join(node.basenames),
|
||||
return_value=inferred_return_value,
|
||||
)
|
||||
)
|
||||
if "IntFlag" in basename:
|
||||
# Alright, we need to add some additional methods.
|
||||
# Unfortunately we still can't infer the resulting objects as
|
||||
# Enum members, but once we'll be able to do that, the following
|
||||
# should result in some nice symbolic execution
|
||||
classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
|
||||
|
||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instantiate_class())
|
||||
node.locals[local] = new_targets
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
def infer_typing_namedtuple_class(class_node, context=None):
|
||||
"""Infer a subclass of typing.NamedTuple"""
|
||||
# Check if it has the corresponding bases
|
||||
annassigns_fields = [
|
||||
annassign.target.name
|
||||
for annassign in class_node.body
|
||||
if isinstance(annassign, nodes.AnnAssign)
|
||||
]
|
||||
code = dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
namedtuple({typename!r}, {fields!r})
|
||||
"""
|
||||
).format(typename=class_node.name, fields=",".join(annassigns_fields))
|
||||
node = extract_node(code)
|
||||
generated_class_node = next(infer_named_tuple(node, context))
|
||||
for method in class_node.mymethods():
|
||||
generated_class_node.locals[method.name] = [method]
|
||||
|
||||
for assign in class_node.body:
|
||||
if not isinstance(assign, nodes.Assign):
|
||||
continue
|
||||
|
||||
for target in assign.targets:
|
||||
attr = target.name
|
||||
generated_class_node.locals[attr] = class_node.locals[attr]
|
||||
|
||||
return iter((generated_class_node,))
|
||||
|
||||
|
||||
def infer_typing_namedtuple(node, context=None):
|
||||
"""Infer a typing.NamedTuple(...) call."""
|
||||
# This is essentially a namedtuple with different arguments
|
||||
# so we extract the args and infer a named tuple.
|
||||
try:
|
||||
func = next(node.func.infer())
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if func.qname() != "typing.NamedTuple":
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(node.args) != 2:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
names = []
|
||||
for elt in node.args[1].elts:
|
||||
if not isinstance(elt, (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault
|
||||
names.append(elt.elts[0].as_string())
|
||||
|
||||
typename = node.args[0].as_string()
|
||||
if names:
|
||||
field_names = "({},)".format(",".join(names))
|
||||
else:
|
||||
field_names = "''"
|
||||
node = extract_node(
|
||||
"namedtuple({typename}, {fields})".format(typename=typename, fields=field_names)
|
||||
)
|
||||
return infer_named_tuple(node, context)
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
|
||||
)
|
||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
infer_enum_class,
|
||||
predicate=lambda cls: any(
|
||||
basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
|
||||
),
|
||||
)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
|
||||
)
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
|
||||
)
|
||||
77
venv/lib/python3.8/site-packages/astroid/brain/brain_nose.py
Normal file
77
venv/lib/python3.8/site-packages/astroid/brain/brain_nose.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
import astroid.builder
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
|
||||
|
||||
|
||||
def _pep8(name, caps=re.compile("([A-Z])")):
|
||||
return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
"""
|
||||
)
|
||||
)
|
||||
try:
|
||||
case = next(module["a"].infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith("assert") and "_" not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
if method.name == "assertEqual":
|
||||
# nose also exports assert_equals.
|
||||
yield "assert_equals", astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build("""__all__ = []""")
|
||||
all_entries = ["ok_", "eq_"]
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub["__all__"].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.fromnumeric module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_fromnumeric_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.fromnumeric", numpy_core_fromnumeric_transform
|
||||
)
|
||||
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.function_base module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"logspace": """def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"geomspace": """def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for func_name, func_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, func_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, func_name),
|
||||
)
|
||||
@@ -0,0 +1,92 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.multiarray module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
def numpy_core_multiarray_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different functions defined in multiarray.py
|
||||
def inner(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
def vdot(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.multiarray", numpy_core_multiarray_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"dot": """def dot(a, b, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty_like": """def empty_like(a, dtype=None, order='K', subok=True):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"concatenate": """def concatenate(arrays, axis=None, out=None):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"where": """def where(condition, x=None, y=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty": """def empty(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"bincount": """def bincount(x, weights=None, minlength=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_count": """def busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_offset": """def busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"can_cast": """def can_cast(from_, to, casting='safe'):
|
||||
return True""",
|
||||
"copyto": """def copyto(dst, src, casting='same_kind', where=True):
|
||||
return None""",
|
||||
"datetime_as_string": """def datetime_as_string(arr, unit=None, timezone='naive', casting='same_kind'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"is_busday": """def is_busday(dates, weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"lexsort": """def lexsort(keys, axis=-1):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"may_share_memory": """def may_share_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "min_scalar_type": """def min_scalar_type(a):
|
||||
# return numpy.dtype('int16')""",
|
||||
"packbits": """def packbits(a, axis=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "result_type": """def result_type(*arrays_and_dtypes):
|
||||
# return numpy.dtype('int16')""",
|
||||
"shares_memory": """def shares_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
"unpackbits": """def unpackbits(a, axis=None, count=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"unravel_index": """def unravel_index(indices, shape, order='C'):
|
||||
return (numpy.ndarray([0, 0]),)""",
|
||||
"zeros": """def zeros(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Name,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
@@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.numeric module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
def numpy_core_numeric_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different functions defined in numeric.py
|
||||
import numpy
|
||||
def zeros_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def ones_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def full_like(a, fill_value, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.numeric", numpy_core_numeric_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"ones": """def ones(shape, dtype=None, order='C'):
|
||||
return numpy.ndarray([0, 0])"""
|
||||
}
|
||||
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
@@ -0,0 +1,254 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
# TODO(hippo91) : correct the methods signature.
|
||||
|
||||
"""Astroid hooks for numpy.core.numerictypes module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_numerictypes_transform():
|
||||
# TODO: Uniformize the generic API with the ndarray one.
|
||||
# According to numpy doc the generic object should expose
|
||||
# the same API than ndarray. This has been done here partially
|
||||
# through the astype method.
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different types defined in numerictypes.py
|
||||
class generic(object):
|
||||
def __init__(self, value):
|
||||
self.T = None
|
||||
self.base = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
self.flat = None
|
||||
self.imag = None
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = None
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def all(self): return uninferable
|
||||
def any(self): return uninferable
|
||||
def argmax(self): return uninferable
|
||||
def argmin(self): return uninferable
|
||||
def argsort(self): return uninferable
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def base(self): return uninferable
|
||||
def byteswap(self): return uninferable
|
||||
def choose(self): return uninferable
|
||||
def clip(self): return uninferable
|
||||
def compress(self): return uninferable
|
||||
def conj(self): return uninferable
|
||||
def conjugate(self): return uninferable
|
||||
def copy(self): return uninferable
|
||||
def cumprod(self): return uninferable
|
||||
def cumsum(self): return uninferable
|
||||
def data(self): return uninferable
|
||||
def diagonal(self): return uninferable
|
||||
def dtype(self): return uninferable
|
||||
def dump(self): return uninferable
|
||||
def dumps(self): return uninferable
|
||||
def fill(self): return uninferable
|
||||
def flags(self): return uninferable
|
||||
def flat(self): return uninferable
|
||||
def flatten(self): return uninferable
|
||||
def getfield(self): return uninferable
|
||||
def imag(self): return uninferable
|
||||
def item(self): return uninferable
|
||||
def itemset(self): return uninferable
|
||||
def itemsize(self): return uninferable
|
||||
def max(self): return uninferable
|
||||
def mean(self): return uninferable
|
||||
def min(self): return uninferable
|
||||
def nbytes(self): return uninferable
|
||||
def ndim(self): return uninferable
|
||||
def newbyteorder(self): return uninferable
|
||||
def nonzero(self): return uninferable
|
||||
def prod(self): return uninferable
|
||||
def ptp(self): return uninferable
|
||||
def put(self): return uninferable
|
||||
def ravel(self): return uninferable
|
||||
def real(self): return uninferable
|
||||
def repeat(self): return uninferable
|
||||
def reshape(self): return uninferable
|
||||
def resize(self): return uninferable
|
||||
def round(self): return uninferable
|
||||
def searchsorted(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
def setflags(self): return uninferable
|
||||
def shape(self): return uninferable
|
||||
def size(self): return uninferable
|
||||
def sort(self): return uninferable
|
||||
def squeeze(self): return uninferable
|
||||
def std(self): return uninferable
|
||||
def strides(self): return uninferable
|
||||
def sum(self): return uninferable
|
||||
def swapaxes(self): return uninferable
|
||||
def take(self): return uninferable
|
||||
def tobytes(self): return uninferable
|
||||
def tofile(self): return uninferable
|
||||
def tolist(self): return uninferable
|
||||
def tostring(self): return uninferable
|
||||
def trace(self): return uninferable
|
||||
def transpose(self): return uninferable
|
||||
def var(self): return uninferable
|
||||
def view(self): return uninferable
|
||||
|
||||
|
||||
class dtype(object):
|
||||
def __init__(self, obj, align=False, copy=False):
|
||||
self.alignment = None
|
||||
self.base = None
|
||||
self.byteorder = None
|
||||
self.char = None
|
||||
self.descr = None
|
||||
self.fields = None
|
||||
self.flags = None
|
||||
self.hasobject = None
|
||||
self.isalignedstruct = None
|
||||
self.isbuiltin = None
|
||||
self.isnative = None
|
||||
self.itemsize = None
|
||||
self.kind = None
|
||||
self.metadata = None
|
||||
self.name = None
|
||||
self.names = None
|
||||
self.num = None
|
||||
self.shape = None
|
||||
self.str = None
|
||||
self.subdtype = None
|
||||
self.type = None
|
||||
|
||||
def newbyteorder(self, new_order='S'): return uninferable
|
||||
def __neg__(self): return uninferable
|
||||
|
||||
class busdaycalendar(object):
|
||||
def __init__(self, weekmask='1111100', holidays=None):
|
||||
self.holidays = None
|
||||
self.weekmask = None
|
||||
|
||||
class flexible(generic): pass
|
||||
class bool_(generic): pass
|
||||
class number(generic):
|
||||
def __neg__(self): return uninferable
|
||||
class datetime64(generic):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
|
||||
|
||||
class void(flexible):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.base = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
def getfield(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
|
||||
|
||||
class character(flexible): pass
|
||||
|
||||
|
||||
class integer(number):
|
||||
def __init__(self, value):
|
||||
self.denominator = None
|
||||
self.numerator = None
|
||||
|
||||
|
||||
class inexact(number): pass
|
||||
|
||||
|
||||
class str_(str, character):
|
||||
def maketrans(self, x, y=None, z=None): return uninferable
|
||||
|
||||
|
||||
class bytes_(bytes, character):
|
||||
def fromhex(self, string): return uninferable
|
||||
def maketrans(self, frm, to): return uninferable
|
||||
|
||||
|
||||
class signedinteger(integer): pass
|
||||
|
||||
|
||||
class unsignedinteger(integer): pass
|
||||
|
||||
|
||||
class complexfloating(inexact): pass
|
||||
|
||||
|
||||
class floating(inexact): pass
|
||||
|
||||
|
||||
class float64(floating, float):
|
||||
def fromhex(self, string): return uninferable
|
||||
|
||||
|
||||
class uint64(unsignedinteger): pass
|
||||
class complex64(complexfloating): pass
|
||||
class int16(signedinteger): pass
|
||||
class float96(floating): pass
|
||||
class int8(signedinteger): pass
|
||||
class uint32(unsignedinteger): pass
|
||||
class uint8(unsignedinteger): pass
|
||||
class _typedict(dict): pass
|
||||
class complex192(complexfloating): pass
|
||||
class timedelta64(signedinteger):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
class int32(signedinteger): pass
|
||||
class uint16(unsignedinteger): pass
|
||||
class float32(floating): pass
|
||||
class complex128(complexfloating, complex): pass
|
||||
class float16(floating): pass
|
||||
class int64(signedinteger): pass
|
||||
|
||||
buffer_type = memoryview
|
||||
bool8 = bool_
|
||||
byte = int8
|
||||
bytes0 = bytes_
|
||||
cdouble = complex128
|
||||
cfloat = complex128
|
||||
clongdouble = complex192
|
||||
clongfloat = complex192
|
||||
complex_ = complex128
|
||||
csingle = complex64
|
||||
double = float64
|
||||
float_ = float64
|
||||
half = float16
|
||||
int0 = int32
|
||||
int_ = int32
|
||||
intc = int32
|
||||
intp = int32
|
||||
long = int32
|
||||
longcomplex = complex192
|
||||
longdouble = float96
|
||||
longfloat = float96
|
||||
longlong = int64
|
||||
object0 = object_
|
||||
object_ = object_
|
||||
short = int16
|
||||
single = float32
|
||||
singlecomplex = complex64
|
||||
str0 = str_
|
||||
string_ = bytes_
|
||||
ubyte = uint8
|
||||
uint = uint32
|
||||
uint0 = uint32
|
||||
uintc = uint32
|
||||
uintp = uint32
|
||||
ulonglong = uint64
|
||||
unicode = str_
|
||||
unicode_ = str_
|
||||
ushort = uint16
|
||||
void0 = void
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
|
||||
)
|
||||
@@ -0,0 +1,147 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.umath module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_umath_transform():
|
||||
ufunc_optional_keyword_arguments = (
|
||||
"""out=None, where=True, casting='same_kind', order='K', """
|
||||
"""dtype=None, subok=True"""
|
||||
)
|
||||
return astroid.parse(
|
||||
"""
|
||||
class FakeUfunc:
|
||||
def __init__(self):
|
||||
self.__doc__ = str()
|
||||
self.__name__ = str()
|
||||
self.nin = 0
|
||||
self.nout = 0
|
||||
self.nargs = 0
|
||||
self.ntypes = 0
|
||||
self.types = None
|
||||
self.identity = None
|
||||
self.signature = None
|
||||
|
||||
@classmethod
|
||||
def reduce(cls, a, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def accumulate(cls, array, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def reduceat(cls, a, indices, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def outer(cls, A, B, **kwargs):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def at(cls, a, indices, b=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArg(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArgBis(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0]), numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncTwoArgs(FakeUfunc):
|
||||
def __call__(self, x1, x2, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
# Constants
|
||||
e = 2.718281828459045
|
||||
euler_gamma = 0.5772156649015329
|
||||
|
||||
# One arg functions with optional kwargs
|
||||
arccos = FakeUfuncOneArg()
|
||||
arccosh = FakeUfuncOneArg()
|
||||
arcsin = FakeUfuncOneArg()
|
||||
arcsinh = FakeUfuncOneArg()
|
||||
arctan = FakeUfuncOneArg()
|
||||
arctanh = FakeUfuncOneArg()
|
||||
cbrt = FakeUfuncOneArg()
|
||||
conj = FakeUfuncOneArg()
|
||||
conjugate = FakeUfuncOneArg()
|
||||
cosh = FakeUfuncOneArg()
|
||||
deg2rad = FakeUfuncOneArg()
|
||||
exp2 = FakeUfuncOneArg()
|
||||
expm1 = FakeUfuncOneArg()
|
||||
fabs = FakeUfuncOneArg()
|
||||
frexp = FakeUfuncOneArgBis()
|
||||
isfinite = FakeUfuncOneArg()
|
||||
isinf = FakeUfuncOneArg()
|
||||
log = FakeUfuncOneArg()
|
||||
log1p = FakeUfuncOneArg()
|
||||
log2 = FakeUfuncOneArg()
|
||||
logical_not = FakeUfuncOneArg()
|
||||
modf = FakeUfuncOneArgBis()
|
||||
negative = FakeUfuncOneArg()
|
||||
positive = FakeUfuncOneArg()
|
||||
rad2deg = FakeUfuncOneArg()
|
||||
reciprocal = FakeUfuncOneArg()
|
||||
rint = FakeUfuncOneArg()
|
||||
sign = FakeUfuncOneArg()
|
||||
signbit = FakeUfuncOneArg()
|
||||
sinh = FakeUfuncOneArg()
|
||||
spacing = FakeUfuncOneArg()
|
||||
square = FakeUfuncOneArg()
|
||||
tan = FakeUfuncOneArg()
|
||||
tanh = FakeUfuncOneArg()
|
||||
trunc = FakeUfuncOneArg()
|
||||
|
||||
# Two args functions with optional kwargs
|
||||
bitwise_and = FakeUfuncTwoArgs()
|
||||
bitwise_or = FakeUfuncTwoArgs()
|
||||
bitwise_xor = FakeUfuncTwoArgs()
|
||||
copysign = FakeUfuncTwoArgs()
|
||||
divide = FakeUfuncTwoArgs()
|
||||
divmod = FakeUfuncTwoArgs()
|
||||
equal = FakeUfuncTwoArgs()
|
||||
float_power = FakeUfuncTwoArgs()
|
||||
floor_divide = FakeUfuncTwoArgs()
|
||||
fmax = FakeUfuncTwoArgs()
|
||||
fmin = FakeUfuncTwoArgs()
|
||||
fmod = FakeUfuncTwoArgs()
|
||||
greater = FakeUfuncTwoArgs()
|
||||
gcd = FakeUfuncTwoArgs()
|
||||
hypot = FakeUfuncTwoArgs()
|
||||
heaviside = FakeUfuncTwoArgs()
|
||||
lcm = FakeUfuncTwoArgs()
|
||||
ldexp = FakeUfuncTwoArgs()
|
||||
left_shift = FakeUfuncTwoArgs()
|
||||
less = FakeUfuncTwoArgs()
|
||||
logaddexp = FakeUfuncTwoArgs()
|
||||
logaddexp2 = FakeUfuncTwoArgs()
|
||||
logical_and = FakeUfuncTwoArgs()
|
||||
logical_or = FakeUfuncTwoArgs()
|
||||
logical_xor = FakeUfuncTwoArgs()
|
||||
maximum = FakeUfuncTwoArgs()
|
||||
minimum = FakeUfuncTwoArgs()
|
||||
nextafter = FakeUfuncTwoArgs()
|
||||
not_equal = FakeUfuncTwoArgs()
|
||||
power = FakeUfuncTwoArgs()
|
||||
remainder = FakeUfuncTwoArgs()
|
||||
right_shift = FakeUfuncTwoArgs()
|
||||
subtract = FakeUfuncTwoArgs()
|
||||
true_divide = FakeUfuncTwoArgs()
|
||||
""".format(
|
||||
opt_args=ufunc_optional_keyword_arguments
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
|
||||
)
|
||||
@@ -0,0 +1,153 @@
|
||||
# Copyright (c) 2015-2016, 2018-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy ndarray class."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
|
||||
|
||||
def infer_numpy_ndarray(node, context=None):
|
||||
ndarray = """
|
||||
class ndarray(object):
|
||||
def __init__(self, shape, dtype=float, buffer=None, offset=0,
|
||||
strides=None, order=None):
|
||||
self.T = None
|
||||
self.base = None
|
||||
self.ctypes = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
self.flat = None
|
||||
self.imag = np.ndarray([0, 0])
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = np.ndarray([0, 0])
|
||||
self.shape = numpy.ndarray([0, 0])
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def __abs__(self): return numpy.ndarray([0, 0])
|
||||
def __add__(self, value): return numpy.ndarray([0, 0])
|
||||
def __and__(self, value): return numpy.ndarray([0, 0])
|
||||
def __array__(self, dtype=None): return numpy.ndarray([0, 0])
|
||||
def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
|
||||
def __contains__(self, key): return True
|
||||
def __copy__(self): return numpy.ndarray([0, 0])
|
||||
def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
|
||||
def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
|
||||
def __eq__(self, value): return numpy.ndarray([0, 0])
|
||||
def __float__(self): return 0.
|
||||
def __floordiv__(self): return numpy.ndarray([0, 0])
|
||||
def __ge__(self, value): return numpy.ndarray([0, 0])
|
||||
def __getitem__(self, key): return uninferable
|
||||
def __gt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iadd__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iand__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ilshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __int__(self): return 0
|
||||
def __invert__(self): return numpy.ndarray([0, 0])
|
||||
def __ior__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ipow__(self, value): return numpy.ndarray([0, 0])
|
||||
def __irshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __isub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __itruediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ixor__(self, value): return numpy.ndarray([0, 0])
|
||||
def __le__(self, value): return numpy.ndarray([0, 0])
|
||||
def __len__(self): return 1
|
||||
def __lshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __lt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __matmul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ne__(self, value): return numpy.ndarray([0, 0])
|
||||
def __neg__(self): return numpy.ndarray([0, 0])
|
||||
def __or__(self): return numpy.ndarray([0, 0])
|
||||
def __pos__(self): return numpy.ndarray([0, 0])
|
||||
def __pow__(self): return numpy.ndarray([0, 0])
|
||||
def __repr__(self): return str()
|
||||
def __rshift__(self): return numpy.ndarray([0, 0])
|
||||
def __setitem__(self, key, value): return uninferable
|
||||
def __str__(self): return str()
|
||||
def __sub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __truediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __xor__(self, value): return numpy.ndarray([0, 0])
|
||||
def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
|
||||
def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def byteswap(self, inplace=False): return np.ndarray([0, 0])
|
||||
def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
|
||||
def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def conj(self): return np.ndarray([0, 0])
|
||||
def conjugate(self): return np.ndarray([0, 0])
|
||||
def copy(self, order='C'): return np.ndarray([0, 0])
|
||||
def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
|
||||
def dot(self, b, out=None): return np.ndarray([0, 0])
|
||||
def dump(self, file): return None
|
||||
def dumps(self): return str()
|
||||
def fill(self, value): return None
|
||||
def flatten(self, order='C'): return np.ndarray([0, 0])
|
||||
def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
|
||||
def item(self, *args): return uninferable
|
||||
def itemset(self, *args): return None
|
||||
def max(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
|
||||
def nonzero(self): return (1,)
|
||||
def partition(self, kth, axis=-1, kind='introselect', order=None): return None
|
||||
def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def put(self, indices, values, mode='raise'): return None
|
||||
def ravel(self, order='C'): return np.ndarray([0, 0])
|
||||
def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
|
||||
def reshape(self, shape, order='C'): return np.ndarray([0, 0])
|
||||
def resize(self, new_shape, refcheck=True): return None
|
||||
def round(self, decimals=0, out=None): return np.ndarray([0, 0])
|
||||
def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
|
||||
def setfield(self, val, dtype, offset=0): return None
|
||||
def setflags(self, write=None, align=None, uic=None): return None
|
||||
def sort(self, axis=-1, kind='quicksort', order=None): return None
|
||||
def squeeze(self, axis=None): return np.ndarray([0, 0])
|
||||
def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
|
||||
def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def tobytes(self, order='C'): return b''
|
||||
def tofile(self, fid, sep="", format="%s"): return None
|
||||
def tolist(self, ): return []
|
||||
def tostring(self, order='C'): return b''
|
||||
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def transpose(self, *axes): return np.ndarray([0, 0])
|
||||
def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def view(self, dtype=None, type=None): return np.ndarray([0, 0])
|
||||
"""
|
||||
node = astroid.extract_node(ndarray)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _looks_like_numpy_ndarray(node):
|
||||
return isinstance(node, astroid.Attribute) and node.attrname == "ndarray"
|
||||
|
||||
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(infer_numpy_ndarray),
|
||||
_looks_like_numpy_ndarray,
|
||||
)
|
||||
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
# TODO(hippo91) : correct the functions return types
|
||||
"""Astroid hooks for numpy.random.mtrand module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_random_mtrand_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
def beta(a, b, size=None): return uninferable
|
||||
def binomial(n, p, size=None): return uninferable
|
||||
def bytes(length): return uninferable
|
||||
def chisquare(df, size=None): return uninferable
|
||||
def choice(a, size=None, replace=True, p=None): return uninferable
|
||||
def dirichlet(alpha, size=None): return uninferable
|
||||
def exponential(scale=1.0, size=None): return uninferable
|
||||
def f(dfnum, dfden, size=None): return uninferable
|
||||
def gamma(shape, scale=1.0, size=None): return uninferable
|
||||
def geometric(p, size=None): return uninferable
|
||||
def get_state(): return uninferable
|
||||
def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
|
||||
def laplace(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def logistic(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
|
||||
def logseries(p, size=None): return uninferable
|
||||
def multinomial(n, pvals, size=None): return uninferable
|
||||
def multivariate_normal(mean, cov, size=None): return uninferable
|
||||
def negative_binomial(n, p, size=None): return uninferable
|
||||
def noncentral_chisquare(df, nonc, size=None): return uninferable
|
||||
def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
|
||||
def normal(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def pareto(a, size=None): return uninferable
|
||||
def permutation(x): return uninferable
|
||||
def poisson(lam=1.0, size=None): return uninferable
|
||||
def power(a, size=None): return uninferable
|
||||
def rand(*args): return uninferable
|
||||
def randint(low, high=None, size=None, dtype='l'):
|
||||
import numpy
|
||||
return numpy.ndarray((1,1))
|
||||
def randn(*args): return uninferable
|
||||
def random_integers(low, high=None, size=None): return uninferable
|
||||
def random_sample(size=None): return uninferable
|
||||
def rayleigh(scale=1.0, size=None): return uninferable
|
||||
def seed(seed=None): return uninferable
|
||||
def set_state(state): return uninferable
|
||||
def shuffle(x): return uninferable
|
||||
def standard_cauchy(size=None): return uninferable
|
||||
def standard_exponential(size=None): return uninferable
|
||||
def standard_gamma(shape, size=None): return uninferable
|
||||
def standard_normal(size=None): return uninferable
|
||||
def standard_t(df, size=None): return uninferable
|
||||
def triangular(left, mode, right, size=None): return uninferable
|
||||
def uniform(low=0.0, high=1.0, size=None): return uninferable
|
||||
def vonmises(mu, kappa, size=None): return uninferable
|
||||
def wald(mean, scale, size=None): return uninferable
|
||||
def weibull(a, size=None): return uninferable
|
||||
def zipf(a, size=None): return uninferable
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
|
||||
)
|
||||
@@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Different utilities for the numpy brains"""
|
||||
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def infer_numpy_member(src, node, context=None):
|
||||
node = astroid.extract_node(src)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _is_a_numpy_module(node: astroid.node_classes.Name) -> bool:
|
||||
"""
|
||||
Returns True if the node is a representation of a numpy module.
|
||||
|
||||
For example in :
|
||||
import numpy as np
|
||||
x = np.linspace(1, 2)
|
||||
The node <Name.np> is a representation of the numpy module.
|
||||
|
||||
:param node: node to test
|
||||
:return: True if the node is a representation of the numpy module.
|
||||
"""
|
||||
module_nickname = node.name
|
||||
potential_import_target = [
|
||||
x for x in node.lookup(module_nickname)[1] if isinstance(x, astroid.Import)
|
||||
]
|
||||
for target in potential_import_target:
|
||||
if ("numpy", module_nickname) in target.names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def looks_like_numpy_member(
|
||||
member_name: str, node: astroid.node_classes.NodeNG
|
||||
) -> bool:
|
||||
"""
|
||||
Returns True if the node is a member of numpy whose
|
||||
name is member_name.
|
||||
|
||||
:param member_name: name of the member
|
||||
:param node: node to test
|
||||
:return: True if the node is a member of numpy
|
||||
"""
|
||||
if (
|
||||
isinstance(node, astroid.Attribute)
|
||||
and node.attrname == member_name
|
||||
and isinstance(node.expr, astroid.Name)
|
||||
and _is_a_numpy_module(node.expr)
|
||||
):
|
||||
return True
|
||||
if (
|
||||
isinstance(node, astroid.Name)
|
||||
and node.name == member_name
|
||||
and node.root().name.startswith("numpy")
|
||||
):
|
||||
return True
|
||||
return False
|
||||
@@ -0,0 +1,75 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
import astroid
|
||||
from astroid import parse
|
||||
from astroid import inference_tip
|
||||
from astroid import register_module_extender
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
def pkg_resources_transform():
|
||||
return parse(
|
||||
"""
|
||||
def require(*requirements):
|
||||
return pkg_resources.working_set.require(*requirements)
|
||||
|
||||
def run_script(requires, script_name):
|
||||
return pkg_resources.working_set.run_script(requires, script_name)
|
||||
|
||||
def iter_entry_points(group, name=None):
|
||||
return pkg_resources.working_set.iter_entry_points(group, name)
|
||||
|
||||
def resource_exists(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).has_resource(resource_name)
|
||||
|
||||
def resource_isdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_isdir(
|
||||
resource_name)
|
||||
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_filename(
|
||||
self, resource_name)
|
||||
|
||||
def resource_stream(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_stream(
|
||||
self, resource_name)
|
||||
|
||||
def resource_string(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_string(
|
||||
self, resource_name)
|
||||
|
||||
def resource_listdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_listdir(
|
||||
resource_name)
|
||||
|
||||
def extraction_error():
|
||||
pass
|
||||
|
||||
def get_cache_path(archive_name, names=()):
|
||||
extract_path = self.extraction_path or get_default_cache()
|
||||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
||||
return target_path
|
||||
|
||||
def postprocess(tempname, filename):
|
||||
pass
|
||||
|
||||
def set_extraction_path(path):
|
||||
pass
|
||||
|
||||
def cleanup_resources(force=False):
|
||||
pass
|
||||
|
||||
def get_distribution(dist):
|
||||
return Distribution(dist)
|
||||
|
||||
_namespace_packages = {}
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "pkg_resources", pkg_resources_transform)
|
||||
@@ -0,0 +1,88 @@
|
||||
# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Jeff Quast <contact@jeffquast.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for pytest."""
|
||||
from __future__ import absolute_import
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pytest_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
|
||||
try:
|
||||
import _pytest.mark
|
||||
import _pytest.recwarn
|
||||
import _pytest.runner
|
||||
import _pytest.python
|
||||
import _pytest.skipping
|
||||
import _pytest.assertion
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
deprecated_call = _pytest.recwarn.deprecated_call
|
||||
warns = _pytest.recwarn.warns
|
||||
|
||||
exit = _pytest.runner.exit
|
||||
fail = _pytest.runner.fail
|
||||
skip = _pytest.runner.skip
|
||||
importorskip = _pytest.runner.importorskip
|
||||
|
||||
xfail = _pytest.skipping.xfail
|
||||
mark = _pytest.mark.MarkGenerator()
|
||||
raises = _pytest.python.raises
|
||||
|
||||
# New in pytest 3.0
|
||||
try:
|
||||
approx = _pytest.python.approx
|
||||
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
# Moved in pytest 3.0
|
||||
|
||||
try:
|
||||
import _pytest.freeze_support
|
||||
freeze_includes = _pytest.freeze_support.freeze_includes
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.genscript
|
||||
freeze_includes = _pytest.genscript.freeze_includes
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.debugging
|
||||
set_trace = _pytest.debugging.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.pdb
|
||||
set_trace = _pytest.pdb.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.fixtures
|
||||
fixture = _pytest.fixtures.fixture
|
||||
yield_fixture = _pytest.fixtures.yield_fixture
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.python
|
||||
fixture = _pytest.python.fixture
|
||||
yield_fixture = _pytest.python.yield_fixture
|
||||
except ImportError:
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "pytest", pytest_transform)
|
||||
register_module_extender(MANAGER, "py.test", pytest_transform)
|
||||
83
venv/lib/python3.8/site-packages/astroid/brain/brain_qt.py
Normal file
83
venv/lib/python3.8/site-packages/astroid/brain/brain_qt.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017 Roy Wright <roy@wright.org>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2019 Antoine Boellinger <aboellinger@hotmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the PyQT library."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import nodes
|
||||
from astroid import parse
|
||||
|
||||
|
||||
def _looks_like_signal(node, signal_name="pyqtSignal"):
|
||||
if "__class__" in node.instance_attrs:
|
||||
try:
|
||||
cls = node.instance_attrs["__class__"][0]
|
||||
return cls.name == signal_name
|
||||
except AttributeError:
|
||||
# return False if the cls does not have a name attribute
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def transform_pyqt_signal(node):
|
||||
module = parse(
|
||||
"""
|
||||
class pyqtSignal(object):
|
||||
def connect(self, slot, type=None, no_receiver_check=False):
|
||||
pass
|
||||
def disconnect(self, slot):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
signal_cls = module["pyqtSignal"]
|
||||
node.instance_attrs["emit"] = signal_cls["emit"]
|
||||
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
|
||||
node.instance_attrs["connect"] = signal_cls["connect"]
|
||||
|
||||
|
||||
def transform_pyside_signal(node):
|
||||
module = parse(
|
||||
"""
|
||||
class NotPySideSignal(object):
|
||||
def connect(self, receiver, type=None):
|
||||
pass
|
||||
def disconnect(self, receiver):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
signal_cls = module["NotPySideSignal"]
|
||||
node.instance_attrs["connect"] = signal_cls["connect"]
|
||||
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
|
||||
node.instance_attrs["emit"] = signal_cls["emit"]
|
||||
|
||||
|
||||
def pyqt4_qtcore_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
|
||||
def SIGNAL(signal_name): pass
|
||||
|
||||
class QObject(object):
|
||||
def emit(self, signal): pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "PyQt4.QtCore", pyqt4_qtcore_transform)
|
||||
MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
transform_pyside_signal,
|
||||
lambda node: node.qname() in ("PySide.QtCore.Signal", "PySide2.QtCore.Signal"),
|
||||
)
|
||||
@@ -0,0 +1,75 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import random
|
||||
|
||||
import astroid
|
||||
from astroid import helpers
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
ACCEPTED_ITERABLES_FOR_SAMPLE = (astroid.List, astroid.Set, astroid.Tuple)
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
|
||||
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
|
||||
if other_fields:
|
||||
init_params.update({param: getattr(node, param) for param in other_fields})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, "postinit") and _astroid_fields:
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def infer_random_sample(node, context=None):
|
||||
if len(node.args) != 2:
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
length = node.args[1]
|
||||
if not isinstance(length, astroid.Const):
|
||||
raise astroid.UseInferenceDefault
|
||||
if not isinstance(length.value, int):
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
inferred_sequence = helpers.safe_infer(node.args[0], context=context)
|
||||
if not inferred_sequence:
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
if length.value > len(inferred_sequence.elts):
|
||||
# In this case, this will raise a ValueError
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
try:
|
||||
elts = random.sample(inferred_sequence.elts, length.value)
|
||||
except ValueError:
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
new_node = astroid.List(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
|
||||
)
|
||||
new_elts = [
|
||||
_clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
|
||||
for elt in elts
|
||||
]
|
||||
new_node.postinit(new_elts)
|
||||
return iter((new_node,))
|
||||
|
||||
|
||||
def _looks_like_random_sample(node):
|
||||
func = node.func
|
||||
if isinstance(func, astroid.Attribute):
|
||||
return func.attrname == "sample"
|
||||
if isinstance(func, astroid.Name):
|
||||
return func.name == "sample"
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample
|
||||
)
|
||||
36
venv/lib/python3.8/site-packages/astroid/brain/brain_re.py
Normal file
36
venv/lib/python3.8/site-packages/astroid/brain/brain_re.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
import astroid
|
||||
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
if PY36:
|
||||
# Since Python 3.6 there is the RegexFlag enum
|
||||
# where every entry will be exposed via updating globals()
|
||||
|
||||
def _re_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
import sre_compile
|
||||
ASCII = sre_compile.SRE_FLAG_ASCII
|
||||
IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
|
||||
LOCALE = sre_compile.SRE_FLAG_LOCALE
|
||||
UNICODE = sre_compile.SRE_FLAG_UNICODE
|
||||
MULTILINE = sre_compile.SRE_FLAG_MULTILINE
|
||||
DOTALL = sre_compile.SRE_FLAG_DOTALL
|
||||
VERBOSE = sre_compile.SRE_FLAG_VERBOSE
|
||||
A = ASCII
|
||||
I = IGNORECASE
|
||||
L = LOCALE
|
||||
U = UNICODE
|
||||
M = MULTILINE
|
||||
S = DOTALL
|
||||
X = VERBOSE
|
||||
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
|
||||
T = TEMPLATE
|
||||
DEBUG = sre_compile.SRE_FLAG_DEBUG
|
||||
"""
|
||||
)
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "re", _re_transform)
|
||||
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Astroid hooks for responses.
|
||||
|
||||
It might need to be manually updated from the public methods of
|
||||
:class:`responses.RequestsMock`.
|
||||
|
||||
See: https://github.com/getsentry/responses/blob/master/responses.py
|
||||
|
||||
"""
|
||||
import astroid
|
||||
|
||||
|
||||
def responses_funcs():
|
||||
return astroid.parse(
|
||||
"""
|
||||
DELETE = "DELETE"
|
||||
GET = "GET"
|
||||
HEAD = "HEAD"
|
||||
OPTIONS = "OPTIONS"
|
||||
PATCH = "PATCH"
|
||||
POST = "POST"
|
||||
PUT = "PUT"
|
||||
response_callback = None
|
||||
|
||||
def reset():
|
||||
return
|
||||
|
||||
def add(
|
||||
method=None, # method or ``Response``
|
||||
url=None,
|
||||
body="",
|
||||
adding_headers=None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
return
|
||||
|
||||
def add_passthru(prefix):
|
||||
return
|
||||
|
||||
def remove(method_or_response=None, url=None):
|
||||
return
|
||||
|
||||
def replace(method_or_response=None, url=None, body="", *args, **kwargs):
|
||||
return
|
||||
|
||||
def add_callback(
|
||||
method, url, callback, match_querystring=False, content_type="text/plain"
|
||||
):
|
||||
return
|
||||
|
||||
calls = []
|
||||
|
||||
def __enter__():
|
||||
return
|
||||
|
||||
def __exit__(type, value, traceback):
|
||||
success = type is None
|
||||
return success
|
||||
|
||||
def activate(func):
|
||||
return func
|
||||
|
||||
def start():
|
||||
return
|
||||
|
||||
def stop(allow_assert=True):
|
||||
return
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "responses", responses_funcs)
|
||||
@@ -0,0 +1,89 @@
|
||||
# Copyright (c) 2019 Valentin Valls <valentin.valls@esrf.fr>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for scipy.signal module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def scipy_signal():
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different functions defined in scipy.signals
|
||||
|
||||
def barthann(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def bartlett(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def blackman(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def blackmanharris(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def bohman(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def boxcar(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def chebwin(M, at, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def cosine(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def exponential(M, center=None, tau=1.0, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def flattop(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def gaussian(M, std, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def general_gaussian(M, p, sig, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hamming(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hann(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hanning(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def impulse2(system, X0=None, T=None, N=None, **kwargs):
|
||||
return numpy.ndarray([0]), numpy.ndarray([0])
|
||||
|
||||
def kaiser(M, beta, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def nuttall(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def parzen(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def slepian(M, width, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def step2(system, X0=None, T=None, N=None, **kwargs):
|
||||
return numpy.ndarray([0]), numpy.ndarray([0])
|
||||
|
||||
def triang(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def tukey(M, alpha=0.5, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "scipy.signal", scipy_signal)
|
||||
201
venv/lib/python3.8/site-packages/astroid/brain/brain_six.py
Normal file
201
venv/lib/python3.8/site-packages/astroid/brain/brain_six.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# Copyright (c) 2014-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for six module."""
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import (
|
||||
AstroidBuildingError,
|
||||
InferenceError,
|
||||
AttributeInferenceError,
|
||||
)
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
SIX_ADD_METACLASS = "six.add_metaclass"
|
||||
|
||||
|
||||
def _indent(text, prefix, predicate=None):
|
||||
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
||||
|
||||
If 'predicate' is provided, 'prefix' will only be added to the lines
|
||||
where 'predicate(line)' is True. If 'predicate' is not provided,
|
||||
it will default to adding 'prefix' to all non-empty lines that do not
|
||||
consist solely of whitespace characters.
|
||||
"""
|
||||
if predicate is None:
|
||||
predicate = lambda line: line.strip()
|
||||
|
||||
def prefixed_lines():
|
||||
for line in text.splitlines(True):
|
||||
yield prefix + line if predicate(line) else line
|
||||
|
||||
return "".join(prefixed_lines())
|
||||
|
||||
|
||||
_IMPORTS = """
|
||||
import _io
|
||||
cStringIO = _io.StringIO
|
||||
filter = filter
|
||||
from itertools import filterfalse
|
||||
input = input
|
||||
from sys import intern
|
||||
map = map
|
||||
range = range
|
||||
from importlib import reload
|
||||
reload_module = lambda module: reload(module)
|
||||
from functools import reduce
|
||||
from shlex import quote as shlex_quote
|
||||
from io import StringIO
|
||||
from collections import UserDict, UserList, UserString
|
||||
xrange = range
|
||||
zip = zip
|
||||
from itertools import zip_longest
|
||||
import builtins
|
||||
import configparser
|
||||
import copyreg
|
||||
import _dummy_thread
|
||||
import http.cookiejar as http_cookiejar
|
||||
import http.cookies as http_cookies
|
||||
import html.entities as html_entities
|
||||
import html.parser as html_parser
|
||||
import http.client as http_client
|
||||
import http.server as http_server
|
||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
|
||||
import pickle as cPickle
|
||||
import queue
|
||||
import reprlib
|
||||
import socketserver
|
||||
import _thread
|
||||
import winreg
|
||||
import xmlrpc.server as xmlrpc_server
|
||||
import xmlrpc.client as xmlrpc_client
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import email.mime.multipart as email_mime_multipart
|
||||
import email.mime.nonmultipart as email_mime_nonmultipart
|
||||
import email.mime.text as email_mime_text
|
||||
import email.mime.base as email_mime_base
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
import tkinter
|
||||
import tkinter.dialog as tkinter_dialog
|
||||
import tkinter.filedialog as tkinter_filedialog
|
||||
import tkinter.scrolledtext as tkinter_scrolledtext
|
||||
import tkinter.simpledialog as tkinder_simpledialog
|
||||
import tkinter.tix as tkinter_tix
|
||||
import tkinter.ttk as tkinter_ttk
|
||||
import tkinter.constants as tkinter_constants
|
||||
import tkinter.dnd as tkinter_dnd
|
||||
import tkinter.colorchooser as tkinter_colorchooser
|
||||
import tkinter.commondialog as tkinter_commondialog
|
||||
import tkinter.filedialog as tkinter_tkfiledialog
|
||||
import tkinter.font as tkinter_font
|
||||
import tkinter.messagebox as tkinter_messagebox
|
||||
import urllib
|
||||
import urllib.request as urllib_request
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
"""
|
||||
|
||||
|
||||
def six_moves_transform():
|
||||
code = dedent(
|
||||
"""
|
||||
class Moves(object):
|
||||
{}
|
||||
moves = Moves()
|
||||
"""
|
||||
).format(_indent(_IMPORTS, " "))
|
||||
module = AstroidBuilder(MANAGER).string_build(code)
|
||||
module.name = "six.moves"
|
||||
return module
|
||||
|
||||
|
||||
def _six_fail_hook(modname):
|
||||
"""Fix six.moves imports due to the dynamic nature of this
|
||||
class.
|
||||
|
||||
Construct a pseudo-module which contains all the necessary imports
|
||||
for six
|
||||
|
||||
:param modname: Name of failed module
|
||||
:type modname: str
|
||||
|
||||
:return: An astroid module
|
||||
:rtype: nodes.Module
|
||||
"""
|
||||
|
||||
attribute_of = modname != "six.moves" and modname.startswith("six.moves")
|
||||
if modname != "six.moves" and not attribute_of:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
|
||||
module.name = "six.moves"
|
||||
if attribute_of:
|
||||
# Facilitate import of submodules in Moves
|
||||
start_index = len(module.name)
|
||||
attribute = modname[start_index:].lstrip(".").replace(".", "_")
|
||||
try:
|
||||
import_attr = module.getattr(attribute)[0]
|
||||
except AttributeInferenceError:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
if isinstance(import_attr, nodes.Import):
|
||||
submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
|
||||
return submodule
|
||||
# Let dummy submodule imports pass through
|
||||
# This will cause an Uninferable result, which is okay
|
||||
return module
|
||||
|
||||
|
||||
def _looks_like_decorated_with_six_add_metaclass(node):
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, nodes.Call):
|
||||
continue
|
||||
if decorator.func.as_string() == SIX_ADD_METACLASS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def transform_six_add_metaclass(node):
|
||||
"""Check if the given class node is decorated with *six.add_metaclass*
|
||||
|
||||
If so, inject its argument as the metaclass of the underlying class.
|
||||
"""
|
||||
if not node.decorators:
|
||||
return
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, nodes.Call):
|
||||
continue
|
||||
|
||||
try:
|
||||
func = next(decorator.func.infer())
|
||||
except InferenceError:
|
||||
continue
|
||||
if func.qname() == SIX_ADD_METACLASS and decorator.args:
|
||||
metaclass = decorator.args[0]
|
||||
node._metaclass = metaclass
|
||||
return node
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "six", six_moves_transform)
|
||||
register_module_extender(
|
||||
MANAGER, "requests.packages.urllib3.packages.six", six_moves_transform
|
||||
)
|
||||
MANAGER.register_failed_import_hook(_six_fail_hook)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
transform_six_add_metaclass,
|
||||
_looks_like_decorated_with_six_add_metaclass,
|
||||
)
|
||||
75
venv/lib/python3.8/site-packages/astroid/brain/brain_ssl.py
Normal file
75
venv/lib/python3.8/site-packages/astroid/brain/brain_ssl.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2019 Benjamin Elven <25181435+S3ntinelX@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the ssl library."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import nodes
|
||||
from astroid import parse
|
||||
|
||||
|
||||
def ssl_transform():
|
||||
return parse(
|
||||
"""
|
||||
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
|
||||
from _ssl import _SSLContext, MemoryBIO
|
||||
from _ssl import (
|
||||
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
|
||||
SSLSyscallError, SSLEOFError,
|
||||
)
|
||||
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
|
||||
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
|
||||
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
|
||||
try:
|
||||
from _ssl import RAND_egd
|
||||
except ImportError:
|
||||
# LibreSSL does not provide RAND_egd
|
||||
pass
|
||||
from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
|
||||
OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
|
||||
OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
|
||||
OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
|
||||
|
||||
from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
|
||||
ALERT_DESCRIPTION_BAD_RECORD_MAC,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
|
||||
ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
|
||||
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
|
||||
ALERT_DESCRIPTION_DECRYPT_ERROR,
|
||||
ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
|
||||
ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
|
||||
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
|
||||
ALERT_DESCRIPTION_INTERNAL_ERROR,
|
||||
ALERT_DESCRIPTION_NO_RENEGOTIATION,
|
||||
ALERT_DESCRIPTION_PROTOCOL_VERSION,
|
||||
ALERT_DESCRIPTION_RECORD_OVERFLOW,
|
||||
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
|
||||
ALERT_DESCRIPTION_UNKNOWN_CA,
|
||||
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
|
||||
ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
|
||||
ALERT_DESCRIPTION_USER_CANCELLED)
|
||||
from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
|
||||
SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
|
||||
SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
|
||||
from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
|
||||
from _ssl import _OPENSSL_API_VERSION
|
||||
from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
|
||||
from _ssl import PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, PROTOCOL_TLS_SERVER
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "ssl", ssl_transform)
|
||||
@@ -0,0 +1,146 @@
|
||||
# Copyright (c) 2016-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Peter Talley <peterctalley@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
PY37 = sys.version_info >= (3, 7)
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
|
||||
def _subprocess_transform():
|
||||
communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
|
||||
communicate_signature = "def communicate(self, input=None, timeout=None)"
|
||||
if PY37:
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=(), *,
|
||||
encoding=None, errors=None, text=None):
|
||||
pass
|
||||
"""
|
||||
elif PY36:
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=(), *,
|
||||
encoding=None, errors=None):
|
||||
pass
|
||||
"""
|
||||
else:
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=()):
|
||||
pass
|
||||
"""
|
||||
wait_signature = "def wait(self, timeout=None)"
|
||||
ctx_manager = """
|
||||
def __enter__(self): return self
|
||||
def __exit__(self, *args): pass
|
||||
"""
|
||||
py3_args = "args = []"
|
||||
|
||||
if PY37:
|
||||
check_output_signature = """
|
||||
check_output(
|
||||
args, *,
|
||||
stdin=None,
|
||||
stderr=None,
|
||||
shell=False,
|
||||
cwd=None,
|
||||
encoding=None,
|
||||
errors=None,
|
||||
universal_newlines=False,
|
||||
timeout=None,
|
||||
env=None,
|
||||
text=None,
|
||||
restore_signals=True,
|
||||
preexec_fn=None,
|
||||
pass_fds=(),
|
||||
input=None,
|
||||
start_new_session=False
|
||||
):
|
||||
""".strip()
|
||||
else:
|
||||
check_output_signature = """
|
||||
check_output(
|
||||
args, *,
|
||||
stdin=None,
|
||||
stderr=None,
|
||||
shell=False,
|
||||
cwd=None,
|
||||
encoding=None,
|
||||
errors=None,
|
||||
universal_newlines=False,
|
||||
timeout=None,
|
||||
env=None,
|
||||
restore_signals=True,
|
||||
preexec_fn=None,
|
||||
pass_fds=(),
|
||||
input=None,
|
||||
start_new_session=False
|
||||
):
|
||||
""".strip()
|
||||
|
||||
code = textwrap.dedent(
|
||||
"""
|
||||
def %(check_output_signature)s
|
||||
if universal_newlines:
|
||||
return ""
|
||||
return b""
|
||||
|
||||
class Popen(object):
|
||||
returncode = pid = 0
|
||||
stdin = stdout = stderr = file()
|
||||
%(py3_args)s
|
||||
|
||||
%(communicate_signature)s:
|
||||
return %(communicate)r
|
||||
%(wait_signature)s:
|
||||
return self.returncode
|
||||
def poll(self):
|
||||
return self.returncode
|
||||
def send_signal(self, signal):
|
||||
pass
|
||||
def terminate(self):
|
||||
pass
|
||||
def kill(self):
|
||||
pass
|
||||
%(ctx_manager)s
|
||||
"""
|
||||
% {
|
||||
"check_output_signature": check_output_signature,
|
||||
"communicate": communicate,
|
||||
"communicate_signature": communicate_signature,
|
||||
"wait_signature": wait_signature,
|
||||
"ctx_manager": ctx_manager,
|
||||
"py3_args": py3_args,
|
||||
}
|
||||
)
|
||||
|
||||
init_lines = textwrap.dedent(init).splitlines()
|
||||
indented_init = "\n".join(" " * 4 + line for line in init_lines)
|
||||
code += indented_init
|
||||
return astroid.parse(code)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "subprocess", _subprocess_transform)
|
||||
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016, 2018-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import astroid
|
||||
|
||||
|
||||
def _thread_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
class lock(object):
|
||||
def acquire(self, blocking=True, timeout=-1):
|
||||
return False
|
||||
def release(self):
|
||||
pass
|
||||
def __enter__(self):
|
||||
return True
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
def locked(self):
|
||||
return False
|
||||
|
||||
def Lock():
|
||||
return lock()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "threading", _thread_transform)
|
||||
@@ -0,0 +1,96 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 David Euresti <github@euresti.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
|
||||
"""Astroid hooks for typing.py support."""
|
||||
import typing
|
||||
|
||||
from astroid import (
|
||||
MANAGER,
|
||||
UseInferenceDefault,
|
||||
extract_node,
|
||||
inference_tip,
|
||||
nodes,
|
||||
InferenceError,
|
||||
)
|
||||
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
|
||||
TYPING_TYPEVARS = {"TypeVar", "NewType"}
|
||||
TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
|
||||
TYPING_TYPE_TEMPLATE = """
|
||||
class Meta(type):
|
||||
def __getitem__(self, item):
|
||||
return self
|
||||
|
||||
@property
|
||||
def __args__(self):
|
||||
return ()
|
||||
|
||||
class {0}(metaclass=Meta):
|
||||
pass
|
||||
"""
|
||||
TYPING_MEMBERS = set(typing.__all__)
|
||||
|
||||
|
||||
def looks_like_typing_typevar_or_newtype(node):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname in TYPING_TYPEVARS
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name in TYPING_TYPEVARS
|
||||
return False
|
||||
|
||||
|
||||
def infer_typing_typevar_or_newtype(node, context=None):
|
||||
"""Infer a typing.TypeVar(...) or typing.NewType(...) call"""
|
||||
try:
|
||||
func = next(node.func.infer(context=context))
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
|
||||
raise UseInferenceDefault
|
||||
if not node.args:
|
||||
raise UseInferenceDefault
|
||||
|
||||
typename = node.args[0].as_string().strip("'")
|
||||
node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _looks_like_typing_subscript(node):
|
||||
"""Try to figure out if a Subscript node *might* be a typing-related subscript"""
|
||||
if isinstance(node, nodes.Name):
|
||||
return node.name in TYPING_MEMBERS
|
||||
elif isinstance(node, nodes.Attribute):
|
||||
return node.attrname in TYPING_MEMBERS
|
||||
elif isinstance(node, nodes.Subscript):
|
||||
return _looks_like_typing_subscript(node.value)
|
||||
return False
|
||||
|
||||
|
||||
def infer_typing_attr(node, context=None):
|
||||
"""Infer a typing.X[...] subscript"""
|
||||
try:
|
||||
value = next(node.value.infer())
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if not value.qname().startswith("typing."):
|
||||
raise UseInferenceDefault
|
||||
|
||||
node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call,
|
||||
inference_tip(infer_typing_typevar_or_newtype),
|
||||
looks_like_typing_typevar_or_newtype,
|
||||
)
|
||||
MANAGER.register_transform(
|
||||
nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
|
||||
)
|
||||
20
venv/lib/python3.8/site-packages/astroid/brain/brain_uuid.py
Normal file
20
venv/lib/python3.8/site-packages/astroid/brain/brain_uuid.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the UUID module."""
|
||||
|
||||
|
||||
from astroid import MANAGER
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
def _patch_uuid_class(node):
|
||||
# The .int member is patched using __dict__
|
||||
node.locals["int"] = [nodes.Const(0, parent=node)]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
|
||||
)
|
||||
Reference in New Issue
Block a user