Initial commit
This commit is contained in:
168
venv/lib/python3.8/site-packages/astroid/__init__.py
Normal file
168
venv/lib/python3.8/site-packages/astroid/__init__.py
Normal file
@@ -0,0 +1,168 @@
|
||||
# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Python Abstract Syntax Tree New Generation
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs.
|
||||
|
||||
It extends class defined in the python's _ast module with some
|
||||
additional methods and attributes. Instance attributes are added by a
|
||||
builder object, which can either generate extended ast (let's call
|
||||
them astroid ;) by visiting an existent ast tree or by inspecting living
|
||||
object. Methods are added by monkey patching ast classes.
|
||||
|
||||
Main modules are:
|
||||
|
||||
* nodes and scoped_nodes for more information about methods and
|
||||
attributes added to different node classes
|
||||
|
||||
* the manager contains a high level object to get astroid trees from
|
||||
source files and living objects. It maintains a cache of previously
|
||||
constructed tree for quick access
|
||||
|
||||
* builder contains the class responsible to build astroid trees
|
||||
"""
|
||||
|
||||
import enum
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
|
||||
import wrapt
|
||||
|
||||
|
||||
_Context = enum.Enum("Context", "Load Store Del")
|
||||
Load = _Context.Load
|
||||
Store = _Context.Store
|
||||
Del = _Context.Del
|
||||
del _Context
|
||||
|
||||
|
||||
# pylint: disable=wrong-import-order,wrong-import-position
|
||||
from .__pkginfo__ import version as __version__
|
||||
|
||||
# WARNING: internal imports order matters !
|
||||
|
||||
# pylint: disable=redefined-builtin
|
||||
|
||||
# make all exception classes accessible from astroid package
|
||||
from astroid.exceptions import *
|
||||
|
||||
# make all node classes accessible from astroid package
|
||||
from astroid.nodes import *
|
||||
|
||||
# trigger extra monkey-patching
|
||||
from astroid import inference
|
||||
|
||||
# more stuff available
|
||||
from astroid import raw_building
|
||||
from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
|
||||
from astroid.node_classes import are_exclusive, unpack_infer
|
||||
from astroid.scoped_nodes import builtin_lookup
|
||||
from astroid.builder import parse, extract_node
|
||||
from astroid.util import Uninferable
|
||||
|
||||
# make a manager instance (borg) accessible from astroid package
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
MANAGER = AstroidManager()
|
||||
del AstroidManager
|
||||
|
||||
# transform utilities (filters and decorator)
|
||||
|
||||
|
||||
# pylint: disable=dangerous-default-value
|
||||
@wrapt.decorator
|
||||
def _inference_tip_cached(func, instance, args, kwargs, _cache={}):
|
||||
"""Cache decorator used for inference tips"""
|
||||
node = args[0]
|
||||
try:
|
||||
return iter(_cache[func, node])
|
||||
except KeyError:
|
||||
result = func(*args, **kwargs)
|
||||
# Need to keep an iterator around
|
||||
original, copy = itertools.tee(result)
|
||||
_cache[func, node] = list(copy)
|
||||
return original
|
||||
|
||||
|
||||
# pylint: enable=dangerous-default-value
|
||||
|
||||
|
||||
def inference_tip(infer_function, raise_on_overwrite=False):
|
||||
"""Given an instance specific inference function, return a function to be
|
||||
given to MANAGER.register_transform to set this inference function.
|
||||
|
||||
:param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
|
||||
if the inference tip will overwrite another. Used for debugging
|
||||
|
||||
Typical usage
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
|
||||
predicate)
|
||||
|
||||
.. Note::
|
||||
|
||||
Using an inference tip will override
|
||||
any previously set inference tip for the given
|
||||
node. Use a predicate in the transform to prevent
|
||||
excess overwrites.
|
||||
"""
|
||||
|
||||
def transform(node, infer_function=infer_function):
|
||||
if (
|
||||
raise_on_overwrite
|
||||
and node._explicit_inference is not None
|
||||
and node._explicit_inference is not infer_function
|
||||
):
|
||||
raise InferenceOverwriteError(
|
||||
"Inference already set to {existing_inference}. "
|
||||
"Trying to overwrite with {new_inference} for {node}".format(
|
||||
existing_inference=infer_function,
|
||||
new_inference=node._explicit_inference,
|
||||
node=node,
|
||||
)
|
||||
)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
node._explicit_inference = _inference_tip_cached(infer_function)
|
||||
return node
|
||||
|
||||
return transform
|
||||
|
||||
|
||||
def register_module_extender(manager, module_name, get_extension_mod):
|
||||
def transform(node):
|
||||
extension_module = get_extension_mod()
|
||||
for name, objs in extension_module.locals.items():
|
||||
node.locals[name] = objs
|
||||
for obj in objs:
|
||||
if obj.parent is extension_module:
|
||||
obj.parent = node
|
||||
|
||||
manager.register_transform(Module, transform, lambda n: n.name == module_name)
|
||||
|
||||
|
||||
# load brain plugins
|
||||
BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), "brain")
|
||||
if BRAIN_MODULES_DIR not in sys.path:
|
||||
# add it to the end of the list so user path take precedence
|
||||
sys.path.append(BRAIN_MODULES_DIR)
|
||||
# load modules in this directory
|
||||
for module in os.listdir(BRAIN_MODULES_DIR):
|
||||
if module.endswith(".py"):
|
||||
__import__(module[:-3])
|
||||
56
venv/lib/python3.8/site-packages/astroid/__pkginfo__.py
Normal file
56
venv/lib/python3.8/site-packages/astroid/__pkginfo__.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Uilian Ries <uilianries@gmail.com>
|
||||
# Copyright (c) 2019 Thomas Hisch <t.hisch@gmail.com>
|
||||
# Copyright (c) 2020 Michael <michael-k@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""astroid packaging information"""
|
||||
|
||||
version = "2.4.0"
|
||||
numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
|
||||
|
||||
extras_require = {}
|
||||
install_requires = [
|
||||
"lazy_object_proxy==1.4.*",
|
||||
"six~=1.12",
|
||||
"wrapt~=1.11",
|
||||
'typed-ast>=1.4.0,<1.5;implementation_name== "cpython" and python_version<"3.8"',
|
||||
]
|
||||
|
||||
# pylint: disable=redefined-builtin; why license is a builtin anyway?
|
||||
license = "LGPL"
|
||||
|
||||
author = "Python Code Quality Authority"
|
||||
author_email = "code-quality@python.org"
|
||||
mailinglist = "mailto://%s" % author_email
|
||||
web = "https://github.com/PyCQA/astroid"
|
||||
|
||||
description = "An abstract syntax tree for Python with inference support."
|
||||
|
||||
classifiers = [
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
54
venv/lib/python3.8/site-packages/astroid/_ast.py
Normal file
54
venv/lib/python3.8/site-packages/astroid/_ast.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import ast
|
||||
from collections import namedtuple
|
||||
from functools import partial
|
||||
from typing import Optional
|
||||
import sys
|
||||
|
||||
_ast_py2 = _ast_py3 = None
|
||||
try:
|
||||
import typed_ast.ast3 as _ast_py3
|
||||
import typed_ast.ast27 as _ast_py2
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
PY38 = sys.version_info[:2] >= (3, 8)
|
||||
if PY38:
|
||||
# On Python 3.8, typed_ast was merged back into `ast`
|
||||
_ast_py3 = ast
|
||||
|
||||
|
||||
FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
|
||||
|
||||
|
||||
def _get_parser_module(parse_python_two=False, type_comments_support=True):
|
||||
if not type_comments_support:
|
||||
return ast
|
||||
|
||||
if parse_python_two:
|
||||
parser_module = _ast_py2
|
||||
else:
|
||||
parser_module = _ast_py3
|
||||
return parser_module or ast
|
||||
|
||||
|
||||
def _parse(string: str, parse_python_two=False, type_comments=True):
|
||||
parse_module = _get_parser_module(
|
||||
parse_python_two=parse_python_two, type_comments_support=type_comments
|
||||
)
|
||||
parse_func = parse_module.parse
|
||||
if parse_module is _ast_py3:
|
||||
if PY38:
|
||||
parse_func = partial(parse_func, type_comments=type_comments)
|
||||
if not parse_python_two:
|
||||
parse_func = partial(parse_func, feature_version=sys.version_info.minor)
|
||||
return parse_func(string)
|
||||
|
||||
|
||||
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
|
||||
"""Given a correct type comment, obtain a FunctionType object"""
|
||||
if _ast_py3 is None:
|
||||
return None
|
||||
|
||||
func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
|
||||
return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
|
||||
300
venv/lib/python3.8/site-packages/astroid/arguments.py
Normal file
300
venv/lib/python3.8/site-packages/astroid/arguments.py
Normal file
@@ -0,0 +1,300 @@
|
||||
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
from astroid import bases
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
class CallSite:
|
||||
"""Class for understanding arguments passed into a call site
|
||||
|
||||
It needs a call context, which contains the arguments and the
|
||||
keyword arguments that were passed into a given call site.
|
||||
In order to infer what an argument represents, call :meth:`infer_argument`
|
||||
with the corresponding function node and the argument name.
|
||||
|
||||
:param callcontext:
|
||||
An instance of :class:`astroid.context.CallContext`, that holds
|
||||
the arguments for the call site.
|
||||
:param argument_context_map:
|
||||
Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context`
|
||||
:param context:
|
||||
An instance of :class:`astroid.context.Context`.
|
||||
"""
|
||||
|
||||
def __init__(self, callcontext, argument_context_map=None, context=None):
|
||||
if argument_context_map is None:
|
||||
argument_context_map = {}
|
||||
self.argument_context_map = argument_context_map
|
||||
args = callcontext.args
|
||||
keywords = callcontext.keywords
|
||||
self.duplicated_keywords = set()
|
||||
self._unpacked_args = self._unpack_args(args, context=context)
|
||||
self._unpacked_kwargs = self._unpack_keywords(keywords, context=context)
|
||||
|
||||
self.positional_arguments = [
|
||||
arg for arg in self._unpacked_args if arg is not util.Uninferable
|
||||
]
|
||||
self.keyword_arguments = {
|
||||
key: value
|
||||
for key, value in self._unpacked_kwargs.items()
|
||||
if value is not util.Uninferable
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_call(cls, call_node, context=None):
|
||||
"""Get a CallSite object from the given Call node.
|
||||
|
||||
:param context:
|
||||
An instance of :class:`astroid.context.Context` that will be used
|
||||
to force a single inference path.
|
||||
"""
|
||||
|
||||
# Determine the callcontext from the given `context` object if any.
|
||||
context = context or contextmod.InferenceContext()
|
||||
callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
|
||||
return cls(callcontext, context=context)
|
||||
|
||||
def has_invalid_arguments(self):
|
||||
"""Check if in the current CallSite were passed *invalid* arguments
|
||||
|
||||
This can mean multiple things. For instance, if an unpacking
|
||||
of an invalid object was passed, then this method will return True.
|
||||
Other cases can be when the arguments can't be inferred by astroid,
|
||||
for example, by passing objects which aren't known statically.
|
||||
"""
|
||||
return len(self.positional_arguments) != len(self._unpacked_args)
|
||||
|
||||
def has_invalid_keywords(self):
|
||||
"""Check if in the current CallSite were passed *invalid* keyword arguments
|
||||
|
||||
For instance, unpacking a dictionary with integer keys is invalid
|
||||
(**{1:2}), because the keys must be strings, which will make this
|
||||
method to return True. Other cases where this might return True if
|
||||
objects which can't be inferred were passed.
|
||||
"""
|
||||
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
|
||||
|
||||
def _unpack_keywords(self, keywords, context=None):
|
||||
values = {}
|
||||
context = context or contextmod.InferenceContext()
|
||||
context.extra_context = self.argument_context_map
|
||||
for name, value in keywords:
|
||||
if name is None:
|
||||
# Then it's an unpacking operation (**)
|
||||
try:
|
||||
inferred = next(value.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
|
||||
if not isinstance(inferred, nodes.Dict):
|
||||
# Not something we can work with.
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
|
||||
for dict_key, dict_value in inferred.items:
|
||||
try:
|
||||
dict_key = next(dict_key.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if not isinstance(dict_key, nodes.Const):
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if not isinstance(dict_key.value, str):
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if dict_key.value in values:
|
||||
# The name is already in the dictionary
|
||||
values[dict_key.value] = util.Uninferable
|
||||
self.duplicated_keywords.add(dict_key.value)
|
||||
continue
|
||||
values[dict_key.value] = dict_value
|
||||
else:
|
||||
values[name] = value
|
||||
return values
|
||||
|
||||
def _unpack_args(self, args, context=None):
|
||||
values = []
|
||||
context = context or contextmod.InferenceContext()
|
||||
context.extra_context = self.argument_context_map
|
||||
for arg in args:
|
||||
if isinstance(arg, nodes.Starred):
|
||||
try:
|
||||
inferred = next(arg.value.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
|
||||
if inferred is util.Uninferable:
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
if not hasattr(inferred, "elts"):
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
values.extend(inferred.elts)
|
||||
else:
|
||||
values.append(arg)
|
||||
return values
|
||||
|
||||
def infer_argument(self, funcnode, name, context):
|
||||
"""infer a function argument value according to the call context
|
||||
|
||||
Arguments:
|
||||
funcnode: The function being called.
|
||||
name: The name of the argument whose value is being inferred.
|
||||
context: Inference context object
|
||||
"""
|
||||
if name in self.duplicated_keywords:
|
||||
raise exceptions.InferenceError(
|
||||
"The arguments passed to {func!r} " " have duplicate keywords.",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
|
||||
# Look into the keywords first, maybe it's already there.
|
||||
try:
|
||||
return self.keyword_arguments[name].infer(context)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Too many arguments given and no variable arguments.
|
||||
if len(self.positional_arguments) > len(funcnode.args.args):
|
||||
if not funcnode.args.vararg:
|
||||
raise exceptions.InferenceError(
|
||||
"Too many positional arguments "
|
||||
"passed to {func!r} that does "
|
||||
"not have *args.",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
|
||||
positional = self.positional_arguments[: len(funcnode.args.args)]
|
||||
vararg = self.positional_arguments[len(funcnode.args.args) :]
|
||||
argindex = funcnode.args.find_argname(name)[0]
|
||||
kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
|
||||
kwargs = {
|
||||
key: value
|
||||
for key, value in self.keyword_arguments.items()
|
||||
if key not in kwonlyargs
|
||||
}
|
||||
# If there are too few positionals compared to
|
||||
# what the function expects to receive, check to see
|
||||
# if the missing positional arguments were passed
|
||||
# as keyword arguments and if so, place them into the
|
||||
# positional args list.
|
||||
if len(positional) < len(funcnode.args.args):
|
||||
for func_arg in funcnode.args.args:
|
||||
if func_arg.name in kwargs:
|
||||
arg = kwargs.pop(func_arg.name)
|
||||
positional.append(arg)
|
||||
|
||||
if argindex is not None:
|
||||
# 2. first argument of instance/class method
|
||||
if argindex == 0 and funcnode.type in ("method", "classmethod"):
|
||||
if context.boundnode is not None:
|
||||
boundnode = context.boundnode
|
||||
else:
|
||||
# XXX can do better ?
|
||||
boundnode = funcnode.parent.frame()
|
||||
|
||||
if isinstance(boundnode, nodes.ClassDef):
|
||||
# Verify that we're accessing a method
|
||||
# of the metaclass through a class, as in
|
||||
# `cls.metaclass_method`. In this case, the
|
||||
# first argument is always the class.
|
||||
method_scope = funcnode.parent.scope()
|
||||
if method_scope is boundnode.metaclass():
|
||||
return iter((boundnode,))
|
||||
|
||||
if funcnode.type == "method":
|
||||
if not isinstance(boundnode, bases.Instance):
|
||||
boundnode = boundnode.instantiate_class()
|
||||
return iter((boundnode,))
|
||||
if funcnode.type == "classmethod":
|
||||
return iter((boundnode,))
|
||||
# if we have a method, extract one position
|
||||
# from the index, so we'll take in account
|
||||
# the extra parameter represented by `self` or `cls`
|
||||
if funcnode.type in ("method", "classmethod"):
|
||||
argindex -= 1
|
||||
# 2. search arg index
|
||||
try:
|
||||
return self.positional_arguments[argindex].infer(context)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if funcnode.args.kwarg == name:
|
||||
# It wants all the keywords that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_keywords():
|
||||
raise exceptions.InferenceError(
|
||||
"Inference failed to find values for all keyword arguments "
|
||||
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
|
||||
"{keyword_arguments!r}.",
|
||||
keyword_arguments=self.keyword_arguments,
|
||||
unpacked_kwargs=self._unpacked_kwargs,
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
kwarg = nodes.Dict(
|
||||
lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args,
|
||||
)
|
||||
kwarg.postinit(
|
||||
[(nodes.const_factory(key), value) for key, value in kwargs.items()]
|
||||
)
|
||||
return iter((kwarg,))
|
||||
if funcnode.args.vararg == name:
|
||||
# It wants all the args that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_arguments():
|
||||
raise exceptions.InferenceError(
|
||||
"Inference failed to find values for all positional "
|
||||
"arguments to {func!r}: {unpacked_args!r} doesn't "
|
||||
"correspond to {positional_arguments!r}.",
|
||||
positional_arguments=self.positional_arguments,
|
||||
unpacked_args=self._unpacked_args,
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
args = nodes.Tuple(
|
||||
lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args,
|
||||
)
|
||||
args.postinit(vararg)
|
||||
return iter((args,))
|
||||
|
||||
# Check if it's a default parameter.
|
||||
try:
|
||||
return funcnode.args.default_value(name).infer(context)
|
||||
except exceptions.NoDefault:
|
||||
pass
|
||||
raise exceptions.InferenceError(
|
||||
"No value found for argument {name} to " "{func!r}",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
631
venv/lib/python3.8/site-packages/astroid/as_string.py
Normal file
631
venv/lib/python3.8/site-packages/astroid/as_string.py
Normal file
@@ -0,0 +1,631 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2013-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017, 2019 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
|
||||
# Copyright (c) 2018 Serhiy Storchaka <storchaka@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 brendanator <brendan.maginnis@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Alex Hall <alex.mojaki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""This module renders Astroid nodes as string:
|
||||
|
||||
* :func:`to_code` function return equivalent (hopefully valid) python string
|
||||
|
||||
* :func:`dump` function return an internal representation of nodes found
|
||||
in the tree, useful for debugging or understanding the tree structure
|
||||
"""
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
DOC_NEWLINE = "\0"
|
||||
|
||||
|
||||
class AsStringVisitor:
|
||||
"""Visitor to render an Astroid node as a valid python code string"""
|
||||
|
||||
def __init__(self, indent):
|
||||
self.indent = indent
|
||||
|
||||
def __call__(self, node):
|
||||
"""Makes this visitor behave as a simple function"""
|
||||
return node.accept(self).replace(DOC_NEWLINE, "\n")
|
||||
|
||||
def _docs_dedent(self, doc):
|
||||
"""Stop newlines in docs being indented by self._stmt_list"""
|
||||
return '\n%s"""%s"""' % (self.indent, doc.replace("\n", DOC_NEWLINE))
|
||||
|
||||
def _stmt_list(self, stmts, indent=True):
|
||||
"""return a list of nodes to string"""
|
||||
stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
|
||||
if indent:
|
||||
return self.indent + stmts.replace("\n", "\n" + self.indent)
|
||||
|
||||
return stmts
|
||||
|
||||
def _precedence_parens(self, node, child, is_left=True):
|
||||
"""Wrap child in parens only if required to keep same semantics"""
|
||||
if self._should_wrap(node, child, is_left):
|
||||
return "(%s)" % child.accept(self)
|
||||
|
||||
return child.accept(self)
|
||||
|
||||
def _should_wrap(self, node, child, is_left):
|
||||
"""Wrap child if:
|
||||
- it has lower precedence
|
||||
- same precedence with position opposite to associativity direction
|
||||
"""
|
||||
node_precedence = node.op_precedence()
|
||||
child_precedence = child.op_precedence()
|
||||
|
||||
if node_precedence > child_precedence:
|
||||
# 3 * (4 + 5)
|
||||
return True
|
||||
|
||||
if (
|
||||
node_precedence == child_precedence
|
||||
and is_left != node.op_left_associative()
|
||||
):
|
||||
# 3 - (4 - 5)
|
||||
# (2**3)**4
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
## visit_<node> methods ###########################################
|
||||
|
||||
def visit_await(self, node):
|
||||
return "await %s" % node.value.accept(self)
|
||||
|
||||
def visit_asyncwith(self, node):
|
||||
return "async %s" % self.visit_with(node)
|
||||
|
||||
def visit_asyncfor(self, node):
|
||||
return "async %s" % self.visit_for(node)
|
||||
|
||||
def visit_arguments(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
return node.format_args()
|
||||
|
||||
def visit_assignattr(self, node):
|
||||
"""return an astroid.AssAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_assert(self, node):
|
||||
"""return an astroid.Assert node as string"""
|
||||
if node.fail:
|
||||
return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self))
|
||||
return "assert %s" % node.test.accept(self)
|
||||
|
||||
def visit_assignname(self, node):
|
||||
"""return an astroid.AssName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_assign(self, node):
|
||||
"""return an astroid.Assign node as string"""
|
||||
lhs = " = ".join(n.accept(self) for n in node.targets)
|
||||
return "%s = %s" % (lhs, node.value.accept(self))
|
||||
|
||||
def visit_augassign(self, node):
|
||||
"""return an astroid.AugAssign node as string"""
|
||||
return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
|
||||
|
||||
def visit_annassign(self, node):
|
||||
"""Return an astroid.AugAssign node as string"""
|
||||
|
||||
target = node.target.accept(self)
|
||||
annotation = node.annotation.accept(self)
|
||||
if node.value is None:
|
||||
return "%s: %s" % (target, annotation)
|
||||
return "%s: %s = %s" % (target, annotation, node.value.accept(self))
|
||||
|
||||
def visit_repr(self, node):
|
||||
"""return an astroid.Repr node as string"""
|
||||
return "`%s`" % node.value.accept(self)
|
||||
|
||||
def visit_binop(self, node):
|
||||
"""return an astroid.BinOp node as string"""
|
||||
left = self._precedence_parens(node, node.left)
|
||||
right = self._precedence_parens(node, node.right, is_left=False)
|
||||
if node.op == "**":
|
||||
return "%s%s%s" % (left, node.op, right)
|
||||
|
||||
return "%s %s %s" % (left, node.op, right)
|
||||
|
||||
def visit_boolop(self, node):
|
||||
"""return an astroid.BoolOp node as string"""
|
||||
values = ["%s" % self._precedence_parens(node, n) for n in node.values]
|
||||
return (" %s " % node.op).join(values)
|
||||
|
||||
def visit_break(self, node):
|
||||
"""return an astroid.Break node as string"""
|
||||
return "break"
|
||||
|
||||
def visit_call(self, node):
|
||||
"""return an astroid.Call node as string"""
|
||||
expr_str = self._precedence_parens(node, node.func)
|
||||
args = [arg.accept(self) for arg in node.args]
|
||||
if node.keywords:
|
||||
keywords = [kwarg.accept(self) for kwarg in node.keywords]
|
||||
else:
|
||||
keywords = []
|
||||
|
||||
args.extend(keywords)
|
||||
return "%s(%s)" % (expr_str, ", ".join(args))
|
||||
|
||||
def visit_classdef(self, node):
|
||||
"""return an astroid.ClassDef node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ""
|
||||
args = [n.accept(self) for n in node.bases]
|
||||
if node._metaclass and not node.has_metaclass_hack():
|
||||
args.append("metaclass=" + node._metaclass.accept(self))
|
||||
args += [n.accept(self) for n in node.keywords]
|
||||
args = "(%s)" % ", ".join(args) if args else ""
|
||||
docs = self._docs_dedent(node.doc) if node.doc else ""
|
||||
return "\n\n%sclass %s%s:%s\n%s\n" % (
|
||||
decorate,
|
||||
node.name,
|
||||
args,
|
||||
docs,
|
||||
self._stmt_list(node.body),
|
||||
)
|
||||
|
||||
def visit_compare(self, node):
|
||||
"""return an astroid.Compare node as string"""
|
||||
rhs_str = " ".join(
|
||||
[
|
||||
"%s %s" % (op, self._precedence_parens(node, expr, is_left=False))
|
||||
for op, expr in node.ops
|
||||
]
|
||||
)
|
||||
return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
ifs = "".join(" if %s" % n.accept(self) for n in node.ifs)
|
||||
generated = "for %s in %s%s" % (
|
||||
node.target.accept(self),
|
||||
node.iter.accept(self),
|
||||
ifs,
|
||||
)
|
||||
return "%s%s" % ("async " if node.is_async else "", generated)
|
||||
|
||||
def visit_const(self, node):
|
||||
"""return an astroid.Const node as string"""
|
||||
if node.value is Ellipsis:
|
||||
return "..."
|
||||
return repr(node.value)
|
||||
|
||||
def visit_continue(self, node):
|
||||
"""return an astroid.Continue node as string"""
|
||||
return "continue"
|
||||
|
||||
def visit_delete(self, node): # XXX check if correct
|
||||
"""return an astroid.Delete node as string"""
|
||||
return "del %s" % ", ".join(child.accept(self) for child in node.targets)
|
||||
|
||||
def visit_delattr(self, node):
|
||||
"""return an astroid.DelAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_delname(self, node):
|
||||
"""return an astroid.DelName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_decorators(self, node):
|
||||
"""return an astroid.Decorators node as string"""
|
||||
return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
|
||||
|
||||
def visit_dict(self, node):
|
||||
"""return an astroid.Dict node as string"""
|
||||
return "{%s}" % ", ".join(self._visit_dict(node))
|
||||
|
||||
def _visit_dict(self, node):
|
||||
for key, value in node.items:
|
||||
key = key.accept(self)
|
||||
value = value.accept(self)
|
||||
if key == "**":
|
||||
# It can only be a DictUnpack node.
|
||||
yield key + value
|
||||
else:
|
||||
yield "%s: %s" % (key, value)
|
||||
|
||||
def visit_dictunpack(self, node):
|
||||
return "**"
|
||||
|
||||
def visit_dictcomp(self, node):
|
||||
"""return an astroid.DictComp node as string"""
|
||||
return "{%s: %s %s}" % (
|
||||
node.key.accept(self),
|
||||
node.value.accept(self),
|
||||
" ".join(n.accept(self) for n in node.generators),
|
||||
)
|
||||
|
||||
def visit_expr(self, node):
|
||||
"""return an astroid.Discard node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_emptynode(self, node):
|
||||
"""dummy method for visiting an Empty node"""
|
||||
return ""
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = "except %s as %s" % (
|
||||
node.type.accept(self),
|
||||
node.name.accept(self),
|
||||
)
|
||||
else:
|
||||
excs = "except %s" % node.type.accept(self)
|
||||
else:
|
||||
excs = "except"
|
||||
return "%s:\n%s" % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_ellipsis(self, node):
|
||||
"""return an astroid.Ellipsis node as string"""
|
||||
return "..."
|
||||
|
||||
def visit_empty(self, node):
|
||||
"""return an Empty node as string"""
|
||||
return ""
|
||||
|
||||
def visit_exec(self, node):
|
||||
"""return an astroid.Exec node as string"""
|
||||
if node.locals:
|
||||
return "exec %s in %s, %s" % (
|
||||
node.expr.accept(self),
|
||||
node.locals.accept(self),
|
||||
node.globals.accept(self),
|
||||
)
|
||||
if node.globals:
|
||||
return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self))
|
||||
return "exec %s" % node.expr.accept(self)
|
||||
|
||||
def visit_extslice(self, node):
|
||||
"""return an astroid.ExtSlice node as string"""
|
||||
return ", ".join(dim.accept(self) for dim in node.dims)
|
||||
|
||||
def visit_for(self, node):
|
||||
"""return an astroid.For node as string"""
|
||||
fors = "for %s in %s:\n%s" % (
|
||||
node.target.accept(self),
|
||||
node.iter.accept(self),
|
||||
self._stmt_list(node.body),
|
||||
)
|
||||
if node.orelse:
|
||||
fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
|
||||
return fors
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""return an astroid.ImportFrom node as string"""
|
||||
return "from %s import %s" % (
|
||||
"." * (node.level or 0) + node.modname,
|
||||
_import_string(node.names),
|
||||
)
|
||||
|
||||
def visit_joinedstr(self, node):
|
||||
string = "".join(
|
||||
# Use repr on the string literal parts
|
||||
# to get proper escapes, e.g. \n, \\, \"
|
||||
# But strip the quotes off the ends
|
||||
# (they will always be one character: ' or ")
|
||||
repr(value.value)[1:-1]
|
||||
# Literal braces must be doubled to escape them
|
||||
.replace("{", "{{").replace("}", "}}")
|
||||
# Each value in values is either a string literal (Const)
|
||||
# or a FormattedValue
|
||||
if type(value).__name__ == "Const" else value.accept(self)
|
||||
for value in node.values
|
||||
)
|
||||
|
||||
# Try to find surrounding quotes that don't appear at all in the string.
|
||||
# Because the formatted values inside {} can't contain backslash (\)
|
||||
# using a triple quote is sometimes necessary
|
||||
for quote in ["'", '"', '"""', "'''"]:
|
||||
if quote not in string:
|
||||
break
|
||||
|
||||
return "f" + quote + string + quote
|
||||
|
||||
def visit_formattedvalue(self, node):
|
||||
result = node.value.accept(self)
|
||||
if node.conversion and node.conversion >= 0:
|
||||
# e.g. if node.conversion == 114: result += "!r"
|
||||
result += "!" + chr(node.conversion)
|
||||
if node.format_spec:
|
||||
# The format spec is itself a JoinedString, i.e. an f-string
|
||||
# We strip the f and quotes of the ends
|
||||
result += ":" + node.format_spec.accept(self)[2:-1]
|
||||
return "{%s}" % result
|
||||
|
||||
def handle_functiondef(self, node, keyword):
|
||||
"""return a (possibly async) function definition node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ""
|
||||
docs = self._docs_dedent(node.doc) if node.doc else ""
|
||||
trailer = ":"
|
||||
if node.returns:
|
||||
return_annotation = " -> " + node.returns.as_string()
|
||||
trailer = return_annotation + ":"
|
||||
def_format = "\n%s%s %s(%s)%s%s\n%s"
|
||||
return def_format % (
|
||||
decorate,
|
||||
keyword,
|
||||
node.name,
|
||||
node.args.accept(self),
|
||||
trailer,
|
||||
docs,
|
||||
self._stmt_list(node.body),
|
||||
)
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
"""return an astroid.FunctionDef node as string"""
|
||||
return self.handle_functiondef(node, "def")
|
||||
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
"""return an astroid.AsyncFunction node as string"""
|
||||
return self.handle_functiondef(node, "async def")
|
||||
|
||||
def visit_generatorexp(self, node):
|
||||
"""return an astroid.GeneratorExp node as string"""
|
||||
return "(%s %s)" % (
|
||||
node.elt.accept(self),
|
||||
" ".join(n.accept(self) for n in node.generators),
|
||||
)
|
||||
|
||||
def visit_attribute(self, node):
|
||||
"""return an astroid.Getattr node as string"""
|
||||
left = self._precedence_parens(node, node.expr)
|
||||
if left.isdigit():
|
||||
left = "(%s)" % left
|
||||
return "%s.%s" % (left, node.attrname)
|
||||
|
||||
def visit_global(self, node):
|
||||
"""return an astroid.Global node as string"""
|
||||
return "global %s" % ", ".join(node.names)
|
||||
|
||||
def visit_if(self, node):
|
||||
"""return an astroid.If node as string"""
|
||||
ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))]
|
||||
if node.has_elif_block():
|
||||
ifs.append("el%s" % self._stmt_list(node.orelse, indent=False))
|
||||
elif node.orelse:
|
||||
ifs.append("else:\n%s" % self._stmt_list(node.orelse))
|
||||
return "\n".join(ifs)
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
"""return an astroid.IfExp node as string"""
|
||||
return "%s if %s else %s" % (
|
||||
self._precedence_parens(node, node.body, is_left=True),
|
||||
self._precedence_parens(node, node.test, is_left=True),
|
||||
self._precedence_parens(node, node.orelse, is_left=False),
|
||||
)
|
||||
|
||||
def visit_import(self, node):
|
||||
"""return an astroid.Import node as string"""
|
||||
return "import %s" % _import_string(node.names)
|
||||
|
||||
def visit_keyword(self, node):
|
||||
"""return an astroid.Keyword node as string"""
|
||||
if node.arg is None:
|
||||
return "**%s" % node.value.accept(self)
|
||||
return "%s=%s" % (node.arg, node.value.accept(self))
|
||||
|
||||
def visit_lambda(self, node):
|
||||
"""return an astroid.Lambda node as string"""
|
||||
args = node.args.accept(self)
|
||||
body = node.body.accept(self)
|
||||
if args:
|
||||
return "lambda %s: %s" % (args, body)
|
||||
|
||||
return "lambda: %s" % body
|
||||
|
||||
def visit_list(self, node):
|
||||
"""return an astroid.List node as string"""
|
||||
return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
|
||||
|
||||
def visit_listcomp(self, node):
|
||||
"""return an astroid.ListComp node as string"""
|
||||
return "[%s %s]" % (
|
||||
node.elt.accept(self),
|
||||
" ".join(n.accept(self) for n in node.generators),
|
||||
)
|
||||
|
||||
def visit_module(self, node):
|
||||
"""return an astroid.Module node as string"""
|
||||
docs = '"""%s"""\n\n' % node.doc if node.doc else ""
|
||||
return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
|
||||
|
||||
def visit_name(self, node):
|
||||
"""return an astroid.Name node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_namedexpr(self, node):
|
||||
"""Return an assignment expression node as string"""
|
||||
target = node.target.accept(self)
|
||||
value = node.value.accept(self)
|
||||
return "%s := %s" % (target, value)
|
||||
|
||||
def visit_nonlocal(self, node):
|
||||
"""return an astroid.Nonlocal node as string"""
|
||||
return "nonlocal %s" % ", ".join(node.names)
|
||||
|
||||
def visit_pass(self, node):
|
||||
"""return an astroid.Pass node as string"""
|
||||
return "pass"
|
||||
|
||||
def visit_print(self, node):
|
||||
"""return an astroid.Print node as string"""
|
||||
nodes = ", ".join(n.accept(self) for n in node.values)
|
||||
if not node.nl:
|
||||
nodes = "%s," % nodes
|
||||
if node.dest:
|
||||
return "print >> %s, %s" % (node.dest.accept(self), nodes)
|
||||
return "print %s" % nodes
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.cause:
|
||||
return "raise %s from %s" % (
|
||||
node.exc.accept(self),
|
||||
node.cause.accept(self),
|
||||
)
|
||||
return "raise %s" % node.exc.accept(self)
|
||||
return "raise"
|
||||
|
||||
def visit_return(self, node):
|
||||
"""return an astroid.Return node as string"""
|
||||
if node.is_tuple_return() and len(node.value.elts) > 1:
|
||||
elts = [child.accept(self) for child in node.value.elts]
|
||||
return "return %s" % ", ".join(elts)
|
||||
|
||||
if node.value:
|
||||
return "return %s" % node.value.accept(self)
|
||||
|
||||
return "return"
|
||||
|
||||
def visit_index(self, node):
|
||||
"""return an astroid.Index node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_set(self, node):
|
||||
"""return an astroid.Set node as string"""
|
||||
return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
|
||||
|
||||
def visit_setcomp(self, node):
|
||||
"""return an astroid.SetComp node as string"""
|
||||
return "{%s %s}" % (
|
||||
node.elt.accept(self),
|
||||
" ".join(n.accept(self) for n in node.generators),
|
||||
)
|
||||
|
||||
def visit_slice(self, node):
|
||||
"""return an astroid.Slice node as string"""
|
||||
lower = node.lower.accept(self) if node.lower else ""
|
||||
upper = node.upper.accept(self) if node.upper else ""
|
||||
step = node.step.accept(self) if node.step else ""
|
||||
if step:
|
||||
return "%s:%s:%s" % (lower, upper, step)
|
||||
return "%s:%s" % (lower, upper)
|
||||
|
||||
def visit_subscript(self, node):
|
||||
"""return an astroid.Subscript node as string"""
|
||||
idx = node.slice
|
||||
if idx.__class__.__name__.lower() == "index":
|
||||
idx = idx.value
|
||||
idxstr = idx.accept(self)
|
||||
if idx.__class__.__name__.lower() == "tuple" and idx.elts:
|
||||
# Remove parenthesis in tuple and extended slice.
|
||||
# a[(::1, 1:)] is not valid syntax.
|
||||
idxstr = idxstr[1:-1]
|
||||
return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""return an astroid.TryExcept node as string"""
|
||||
trys = ["try:\n%s" % self._stmt_list(node.body)]
|
||||
for handler in node.handlers:
|
||||
trys.append(handler.accept(self))
|
||||
if node.orelse:
|
||||
trys.append("else:\n%s" % self._stmt_list(node.orelse))
|
||||
return "\n".join(trys)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""return an astroid.TryFinally node as string"""
|
||||
return "try:\n%s\nfinally:\n%s" % (
|
||||
self._stmt_list(node.body),
|
||||
self._stmt_list(node.finalbody),
|
||||
)
|
||||
|
||||
def visit_tuple(self, node):
|
||||
"""return an astroid.Tuple node as string"""
|
||||
if len(node.elts) == 1:
|
||||
return "(%s, )" % node.elts[0].accept(self)
|
||||
return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
|
||||
|
||||
def visit_unaryop(self, node):
|
||||
"""return an astroid.UnaryOp node as string"""
|
||||
if node.op == "not":
|
||||
operator = "not "
|
||||
else:
|
||||
operator = node.op
|
||||
return "%s%s" % (operator, self._precedence_parens(node, node.operand))
|
||||
|
||||
def visit_while(self, node):
|
||||
"""return an astroid.While node as string"""
|
||||
whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse))
|
||||
return whiles
|
||||
|
||||
def visit_with(self, node): # 'with' without 'as' is possible
|
||||
"""return an astroid.With node as string"""
|
||||
items = ", ".join(
|
||||
("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "")
|
||||
for expr, vars in node.items
|
||||
)
|
||||
return "with %s:\n%s" % (items, self._stmt_list(node.body))
|
||||
|
||||
def visit_yield(self, node):
|
||||
"""yield an ast.Yield node as string"""
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = "yield" + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
""" Return an astroid.YieldFrom node as string. """
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = "yield from" + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
def visit_starred(self, node):
|
||||
"""return Starred node as string"""
|
||||
return "*" + node.value.accept(self)
|
||||
|
||||
# These aren't for real AST nodes, but for inference objects.
|
||||
|
||||
def visit_frozenset(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_super(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_uninferable(self, node):
|
||||
return str(node)
|
||||
|
||||
def visit_property(self, node):
|
||||
return node.function.accept(self)
|
||||
|
||||
def visit_evaluatedobject(self, node):
|
||||
return node.original.accept(self)
|
||||
|
||||
|
||||
def _import_string(names):
|
||||
"""return a list of (name, asname) formatted as a string"""
|
||||
_names = []
|
||||
for name, asname in names:
|
||||
if asname is not None:
|
||||
_names.append("%s as %s" % (name, asname))
|
||||
else:
|
||||
_names.append(name)
|
||||
return ", ".join(_names)
|
||||
|
||||
|
||||
# This sets the default indent to 4 spaces.
|
||||
to_code = AsStringVisitor(" ")
|
||||
548
venv/lib/python3.8/site-packages/astroid/bases.py
Normal file
548
venv/lib/python3.8/site-packages/astroid/bases.py
Normal file
@@ -0,0 +1,548 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017 Calen Pennington <calen.pennington@gmail.com>
|
||||
# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Daniel Colascione <dancol@dancol.org>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""This module contains base classes and functions for the nodes and some
|
||||
inference utils.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import collections
|
||||
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import util
|
||||
|
||||
objectmodel = util.lazy_import("interpreter.objectmodel")
|
||||
helpers = util.lazy_import("helpers")
|
||||
BUILTINS = builtins.__name__
|
||||
manager = util.lazy_import("manager")
|
||||
MANAGER = manager.AstroidManager()
|
||||
|
||||
# TODO: check if needs special treatment
|
||||
BUILTINS = "builtins"
|
||||
BOOL_SPECIAL_METHOD = "__bool__"
|
||||
|
||||
PROPERTIES = {BUILTINS + ".property", "abc.abstractproperty"}
|
||||
# List of possible property names. We use this list in order
|
||||
# to see if a method is a property or not. This should be
|
||||
# pretty reliable and fast, the alternative being to check each
|
||||
# decorator to see if its a real property-like descriptor, which
|
||||
# can be too complicated.
|
||||
# Also, these aren't qualified, because each project can
|
||||
# define them, we shouldn't expect to know every possible
|
||||
# property-like decorator!
|
||||
POSSIBLE_PROPERTIES = {
|
||||
"cached_property",
|
||||
"cachedproperty",
|
||||
"lazyproperty",
|
||||
"lazy_property",
|
||||
"reify",
|
||||
"lazyattribute",
|
||||
"lazy_attribute",
|
||||
"LazyProperty",
|
||||
"lazy",
|
||||
"cache_readonly",
|
||||
}
|
||||
|
||||
|
||||
def _is_property(meth, context=None):
|
||||
decoratornames = meth.decoratornames(context=context)
|
||||
if PROPERTIES.intersection(decoratornames):
|
||||
return True
|
||||
stripped = {
|
||||
name.split(".")[-1] for name in decoratornames if name is not util.Uninferable
|
||||
}
|
||||
if any(name in stripped for name in POSSIBLE_PROPERTIES):
|
||||
return True
|
||||
|
||||
# Lookup for subclasses of *property*
|
||||
if not meth.decorators:
|
||||
return False
|
||||
for decorator in meth.decorators.nodes or ():
|
||||
inferred = helpers.safe_infer(decorator, context=context)
|
||||
if inferred is None or inferred is util.Uninferable:
|
||||
continue
|
||||
if inferred.__class__.__name__ == "ClassDef":
|
||||
for base_class in inferred.bases:
|
||||
if base_class.__class__.__name__ != "Name":
|
||||
continue
|
||||
module, _ = base_class.lookup(base_class.name)
|
||||
if module.name == BUILTINS and base_class.name == "property":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Proxy:
|
||||
"""a simple proxy object
|
||||
|
||||
Note:
|
||||
|
||||
Subclasses of this object will need a custom __getattr__
|
||||
if new instance attributes are created. See the Const class
|
||||
"""
|
||||
|
||||
_proxied = None # proxied object may be set by class or by instance
|
||||
|
||||
def __init__(self, proxied=None):
|
||||
if proxied is not None:
|
||||
self._proxied = proxied
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "_proxied":
|
||||
return getattr(self.__class__, "_proxied")
|
||||
if name in self.__dict__:
|
||||
return self.__dict__[name]
|
||||
return getattr(self._proxied, name)
|
||||
|
||||
def infer(self, context=None):
|
||||
yield self
|
||||
|
||||
|
||||
def _infer_stmts(stmts, context, frame=None):
|
||||
"""Return an iterator on statements inferred by each statement in *stmts*."""
|
||||
inferred = False
|
||||
if context is not None:
|
||||
name = context.lookupname
|
||||
context = context.clone()
|
||||
else:
|
||||
name = None
|
||||
context = contextmod.InferenceContext()
|
||||
|
||||
for stmt in stmts:
|
||||
if stmt is util.Uninferable:
|
||||
yield stmt
|
||||
inferred = True
|
||||
continue
|
||||
context.lookupname = stmt._infer_name(frame, name)
|
||||
try:
|
||||
for inferred in stmt.infer(context=context):
|
||||
yield inferred
|
||||
inferred = True
|
||||
except exceptions.NameInferenceError:
|
||||
continue
|
||||
except exceptions.InferenceError:
|
||||
yield util.Uninferable
|
||||
inferred = True
|
||||
if not inferred:
|
||||
raise exceptions.InferenceError(
|
||||
"Inference failed for all members of {stmts!r}.",
|
||||
stmts=stmts,
|
||||
frame=frame,
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
def _infer_method_result_truth(instance, method_name, context):
|
||||
# Get the method from the instance and try to infer
|
||||
# its return's truth value.
|
||||
meth = next(instance.igetattr(method_name, context=context), None)
|
||||
if meth and hasattr(meth, "infer_call_result"):
|
||||
if not meth.callable():
|
||||
return util.Uninferable
|
||||
try:
|
||||
for value in meth.infer_call_result(instance, context=context):
|
||||
if value is util.Uninferable:
|
||||
return value
|
||||
|
||||
inferred = next(value.infer(context=context))
|
||||
return inferred.bool_value()
|
||||
except exceptions.InferenceError:
|
||||
pass
|
||||
return util.Uninferable
|
||||
|
||||
|
||||
class BaseInstance(Proxy):
|
||||
"""An instance base class, which provides lookup methods for potential instances."""
|
||||
|
||||
special_attributes = None
|
||||
|
||||
def display_type(self):
|
||||
return "Instance of"
|
||||
|
||||
def getattr(self, name, context=None, lookupclass=True):
|
||||
try:
|
||||
values = self._proxied.instance_attr(name, context)
|
||||
except exceptions.AttributeInferenceError as exc:
|
||||
if self.special_attributes and name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
|
||||
if lookupclass:
|
||||
# Class attributes not available through the instance
|
||||
# unless they are explicitly defined.
|
||||
return self._proxied.getattr(name, context, class_context=False)
|
||||
|
||||
raise exceptions.AttributeInferenceError(
|
||||
target=self, attribute=name, context=context
|
||||
) from exc
|
||||
# since we've no context information, return matching class members as
|
||||
# well
|
||||
if lookupclass:
|
||||
try:
|
||||
return values + self._proxied.getattr(
|
||||
name, context, class_context=False
|
||||
)
|
||||
except exceptions.AttributeInferenceError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""inferred getattr"""
|
||||
if not context:
|
||||
context = contextmod.InferenceContext()
|
||||
try:
|
||||
# avoid recursively inferring the same attr on the same class
|
||||
if context.push((self._proxied, name)):
|
||||
raise exceptions.InferenceError(
|
||||
message="Cannot infer the same attribute again",
|
||||
node=self,
|
||||
context=context,
|
||||
)
|
||||
|
||||
# XXX frame should be self._proxied, or not ?
|
||||
get_attr = self.getattr(name, context, lookupclass=False)
|
||||
yield from _infer_stmts(
|
||||
self._wrap_attr(get_attr, context), context, frame=self
|
||||
)
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
try:
|
||||
# fallback to class.igetattr since it has some logic to handle
|
||||
# descriptors
|
||||
# But only if the _proxied is the Class.
|
||||
if self._proxied.__class__.__name__ != "ClassDef":
|
||||
raise
|
||||
attrs = self._proxied.igetattr(name, context, class_context=False)
|
||||
yield from self._wrap_attr(attrs, context)
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
raise exceptions.InferenceError(**vars(error)) from error
|
||||
|
||||
def _wrap_attr(self, attrs, context=None):
|
||||
"""wrap bound methods of attrs in a InstanceMethod proxies"""
|
||||
for attr in attrs:
|
||||
if isinstance(attr, UnboundMethod):
|
||||
if _is_property(attr):
|
||||
yield from attr.infer_call_result(self, context)
|
||||
else:
|
||||
yield BoundMethod(attr, self)
|
||||
elif hasattr(attr, "name") and attr.name == "<lambda>":
|
||||
if attr.args.arguments and attr.args.arguments[0].name == "self":
|
||||
yield BoundMethod(attr, self)
|
||||
continue
|
||||
yield attr
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
"""infer what a class instance is returning when called"""
|
||||
context = contextmod.bind_context_to_node(context, self)
|
||||
inferred = False
|
||||
for node in self._proxied.igetattr("__call__", context):
|
||||
if node is util.Uninferable or not node.callable():
|
||||
continue
|
||||
for res in node.infer_call_result(caller, context):
|
||||
inferred = True
|
||||
yield res
|
||||
if not inferred:
|
||||
raise exceptions.InferenceError(node=self, caller=caller, context=context)
|
||||
|
||||
|
||||
class Instance(BaseInstance):
|
||||
"""A special node representing a class instance."""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
|
||||
|
||||
def __repr__(self):
|
||||
return "<Instance of %s.%s at 0x%s>" % (
|
||||
self._proxied.root().name,
|
||||
self._proxied.name,
|
||||
id(self),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Instance of %s.%s" % (self._proxied.root().name, self._proxied.name)
|
||||
|
||||
def callable(self):
|
||||
try:
|
||||
self._proxied.getattr("__call__", class_context=False)
|
||||
return True
|
||||
except exceptions.AttributeInferenceError:
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
def display_type(self):
|
||||
return "Instance of"
|
||||
|
||||
def bool_value(self, context=None):
|
||||
"""Infer the truth value for an Instance
|
||||
|
||||
The truth value of an instance is determined by these conditions:
|
||||
|
||||
* if it implements __bool__ on Python 3 or __nonzero__
|
||||
on Python 2, then its bool value will be determined by
|
||||
calling this special method and checking its result.
|
||||
* when this method is not defined, __len__() is called, if it
|
||||
is defined, and the object is considered true if its result is
|
||||
nonzero. If a class defines neither __len__() nor __bool__(),
|
||||
all its instances are considered true.
|
||||
"""
|
||||
context = context or contextmod.InferenceContext()
|
||||
context.callcontext = contextmod.CallContext(args=[])
|
||||
context.boundnode = self
|
||||
|
||||
try:
|
||||
result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
|
||||
except (exceptions.InferenceError, exceptions.AttributeInferenceError):
|
||||
# Fallback to __len__.
|
||||
try:
|
||||
result = _infer_method_result_truth(self, "__len__", context)
|
||||
except (exceptions.AttributeInferenceError, exceptions.InferenceError):
|
||||
return True
|
||||
return result
|
||||
|
||||
# This is set in inference.py.
|
||||
def getitem(self, index, context=None):
|
||||
pass
|
||||
|
||||
|
||||
class UnboundMethod(Proxy):
|
||||
"""a special node representing a method not bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
|
||||
|
||||
def __repr__(self):
|
||||
frame = self._proxied.parent.frame()
|
||||
return "<%s %s of %s at 0x%s" % (
|
||||
self.__class__.__name__,
|
||||
self._proxied.name,
|
||||
frame.qname(),
|
||||
id(self),
|
||||
)
|
||||
|
||||
def implicit_parameters(self):
|
||||
return 0
|
||||
|
||||
def is_bound(self):
|
||||
return False
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
return self._proxied.getattr(name, context)
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return iter((self.special_attributes.lookup(name),))
|
||||
return self._proxied.igetattr(name, context)
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
"""
|
||||
The boundnode of the regular context with a function called
|
||||
on ``object.__new__`` will be of type ``object``,
|
||||
which is incorrect for the argument in general.
|
||||
If no context is given the ``object.__new__`` call argument will
|
||||
correctly inferred except when inside a call that requires
|
||||
the additional context (such as a classmethod) of the boundnode
|
||||
to determine which class the method was called from
|
||||
"""
|
||||
|
||||
# If we're unbound method __new__ of builtin object, the result is an
|
||||
# instance of the class given as first argument.
|
||||
if (
|
||||
self._proxied.name == "__new__"
|
||||
and self._proxied.parent.frame().qname() == "%s.object" % BUILTINS
|
||||
):
|
||||
if caller.args:
|
||||
node_context = context.extra_context.get(caller.args[0])
|
||||
infer = caller.args[0].infer(context=node_context)
|
||||
else:
|
||||
infer = []
|
||||
return (Instance(x) if x is not util.Uninferable else x for x in infer)
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
|
||||
class BoundMethod(UnboundMethod):
|
||||
"""a special node representing a method bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
|
||||
|
||||
def __init__(self, proxy, bound):
|
||||
UnboundMethod.__init__(self, proxy)
|
||||
self.bound = bound
|
||||
|
||||
def implicit_parameters(self):
|
||||
if self.name == "__new__":
|
||||
# __new__ acts as a classmethod but the class argument is not implicit.
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def is_bound(self):
|
||||
return True
|
||||
|
||||
def _infer_type_new_call(self, caller, context):
|
||||
"""Try to infer what type.__new__(mcs, name, bases, attrs) returns.
|
||||
|
||||
In order for such call to be valid, the metaclass needs to be
|
||||
a subtype of ``type``, the name needs to be a string, the bases
|
||||
needs to be a tuple of classes
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import node_classes
|
||||
|
||||
# Verify the metaclass
|
||||
mcs = next(caller.args[0].infer(context=context))
|
||||
if mcs.__class__.__name__ != "ClassDef":
|
||||
# Not a valid first argument.
|
||||
return None
|
||||
if not mcs.is_subtype_of("%s.type" % BUILTINS):
|
||||
# Not a valid metaclass.
|
||||
return None
|
||||
|
||||
# Verify the name
|
||||
name = next(caller.args[1].infer(context=context))
|
||||
if name.__class__.__name__ != "Const":
|
||||
# Not a valid name, needs to be a const.
|
||||
return None
|
||||
if not isinstance(name.value, str):
|
||||
# Needs to be a string.
|
||||
return None
|
||||
|
||||
# Verify the bases
|
||||
bases = next(caller.args[2].infer(context=context))
|
||||
if bases.__class__.__name__ != "Tuple":
|
||||
# Needs to be a tuple.
|
||||
return None
|
||||
inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
|
||||
if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
|
||||
# All the bases needs to be Classes
|
||||
return None
|
||||
|
||||
# Verify the attributes.
|
||||
attrs = next(caller.args[3].infer(context=context))
|
||||
if attrs.__class__.__name__ != "Dict":
|
||||
# Needs to be a dictionary.
|
||||
return None
|
||||
cls_locals = collections.defaultdict(list)
|
||||
for key, value in attrs.items:
|
||||
key = next(key.infer(context=context))
|
||||
value = next(value.infer(context=context))
|
||||
# Ignore non string keys
|
||||
if key.__class__.__name__ == "Const" and isinstance(key.value, str):
|
||||
cls_locals[key.value].append(value)
|
||||
|
||||
# Build the class from now.
|
||||
cls = mcs.__class__(
|
||||
name=name.value,
|
||||
lineno=caller.lineno,
|
||||
col_offset=caller.col_offset,
|
||||
parent=caller,
|
||||
)
|
||||
empty = node_classes.Pass()
|
||||
cls.postinit(
|
||||
bases=bases.elts,
|
||||
body=[empty],
|
||||
decorators=[],
|
||||
newstyle=True,
|
||||
metaclass=mcs,
|
||||
keywords=[],
|
||||
)
|
||||
cls.locals = cls_locals
|
||||
return cls
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
context = contextmod.bind_context_to_node(context, self.bound)
|
||||
if (
|
||||
self.bound.__class__.__name__ == "ClassDef"
|
||||
and self.bound.name == "type"
|
||||
and self.name == "__new__"
|
||||
and len(caller.args) == 4
|
||||
):
|
||||
# Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
|
||||
new_cls = self._infer_type_new_call(caller, context)
|
||||
if new_cls:
|
||||
return iter((new_cls,))
|
||||
|
||||
return super(BoundMethod, self).infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
|
||||
class Generator(BaseInstance):
|
||||
"""a special node representing a generator.
|
||||
|
||||
Proxied class is set once for all in raw_building.
|
||||
"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
|
||||
|
||||
# pylint: disable=super-init-not-called
|
||||
def __init__(self, parent=None):
|
||||
self.parent = parent
|
||||
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return "%s.generator" % BUILTINS
|
||||
|
||||
def display_type(self):
|
||||
return "Generator"
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return "<Generator(%s) l.%s at 0x%s>" % (
|
||||
self._proxied.name,
|
||||
self.lineno,
|
||||
id(self),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Generator(%s)" % (self._proxied.name)
|
||||
|
||||
|
||||
class AsyncGenerator(Generator):
|
||||
"""Special node representing an async generator"""
|
||||
|
||||
def pytype(self):
|
||||
return "%s.async_generator" % BUILTINS
|
||||
|
||||
def display_type(self):
|
||||
return "AsyncGenerator"
|
||||
|
||||
def __repr__(self):
|
||||
return "<AsyncGenerator(%s) l.%s at 0x%s>" % (
|
||||
self._proxied.name,
|
||||
self.lineno,
|
||||
id(self),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "AsyncGenerator(%s)" % (self._proxied.name)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,33 @@
|
||||
from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
|
||||
|
||||
|
||||
def infer_namespace(node, context=None):
|
||||
callsite = arguments.CallSite.from_call(node, context=context)
|
||||
if not callsite.keyword_arguments:
|
||||
# Cannot make sense of it.
|
||||
raise UseInferenceDefault()
|
||||
|
||||
class_node = nodes.ClassDef("Namespace", "docstring")
|
||||
class_node.parent = node.parent
|
||||
for attr in set(callsite.keyword_arguments):
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return iter((class_node.instantiate_class(),))
|
||||
|
||||
|
||||
def _looks_like_namespace(node):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return (
|
||||
func.attrname == "Namespace"
|
||||
and isinstance(func.expr, nodes.Name)
|
||||
and func.expr.name == "argparse"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
|
||||
)
|
||||
@@ -0,0 +1,65 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
"""
|
||||
Astroid hook for the attrs library
|
||||
|
||||
Without this hook pylint reports unsupported-assignment-operation
|
||||
for attrs classes
|
||||
"""
|
||||
|
||||
import astroid
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib"))
|
||||
ATTRS_NAMES = frozenset(("attr.s", "attrs", "attr.attrs", "attr.attributes"))
|
||||
|
||||
|
||||
def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
|
||||
"""Return True if a decorated node has
|
||||
an attr decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
|
||||
decorator_attribute = decorator_attribute.func
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def attr_attributes_transform(node):
|
||||
"""Given that the ClassNode has an attr decorator,
|
||||
rewrite class attributes as instance attributes
|
||||
"""
|
||||
# Astroid can't infer this attribute properly
|
||||
# Prevents https://github.com/PyCQA/pylint/issues/1884
|
||||
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
|
||||
|
||||
for cdefbodynode in node.body:
|
||||
if not isinstance(cdefbodynode, (astroid.Assign, astroid.AnnAssign)):
|
||||
continue
|
||||
if isinstance(cdefbodynode.value, astroid.Call):
|
||||
if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
targets = (
|
||||
cdefbodynode.targets
|
||||
if hasattr(cdefbodynode, "targets")
|
||||
else [cdefbodynode.target]
|
||||
)
|
||||
for target in targets:
|
||||
|
||||
rhs_node = astroid.Unknown(
|
||||
lineno=cdefbodynode.lineno,
|
||||
col_offset=cdefbodynode.col_offset,
|
||||
parent=cdefbodynode,
|
||||
)
|
||||
node.locals[target.name] = [rhs_node]
|
||||
node.instance_attrs[target.name] = [rhs_node]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for understanding boto3.ServiceRequest()"""
|
||||
import astroid
|
||||
from astroid import MANAGER, extract_node
|
||||
|
||||
BOTO_SERVICE_FACTORY_QUALIFIED_NAME = "boto3.resources.base.ServiceResource"
|
||||
|
||||
|
||||
def service_request_transform(node):
|
||||
"""Transform ServiceResource to look like dynamic classes"""
|
||||
code = """
|
||||
def __getattr__(self, attr):
|
||||
return 0
|
||||
"""
|
||||
func_getattr = extract_node(code)
|
||||
node.locals["__getattr__"] = [func_getattr]
|
||||
return node
|
||||
|
||||
|
||||
def _looks_like_boto3_service_request(node):
|
||||
return node.qname() == BOTO_SERVICE_FACTORY_QUALIFIED_NAME
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, service_request_transform, _looks_like_boto3_service_request
|
||||
)
|
||||
@@ -0,0 +1,873 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Stanislav Levin <slev@altlinux.org>
|
||||
# Copyright (c) 2019 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2019 Bryce Guinta <bryce.guinta@protonmail.com>
|
||||
# Copyright (c) 2019 Frédéric Chapoton <fchapoton2@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for various builtins."""
|
||||
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
from astroid import (
|
||||
MANAGER,
|
||||
UseInferenceDefault,
|
||||
AttributeInferenceError,
|
||||
inference_tip,
|
||||
InferenceError,
|
||||
NameInferenceError,
|
||||
AstroidTypeError,
|
||||
MroError,
|
||||
)
|
||||
from astroid import arguments
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import helpers
|
||||
from astroid import nodes
|
||||
from astroid import objects
|
||||
from astroid import scoped_nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
OBJECT_DUNDER_NEW = "object.__new__"
|
||||
|
||||
|
||||
def _extend_str(class_node, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
code = dedent(
|
||||
"""
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return ''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
"""
|
||||
)
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
|
||||
for method in fake.mymethods():
|
||||
method.parent = class_node
|
||||
method.lineno = None
|
||||
method.col_offset = None
|
||||
if "__class__" in method.locals:
|
||||
method.locals["__class__"] = [class_node]
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
|
||||
def _extend_builtins(class_transforms):
|
||||
builtin_ast = MANAGER.builtins_module
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
|
||||
_extend_builtins(
|
||||
{
|
||||
"bytes": partial(_extend_str, rvalue="b''"),
|
||||
"str": partial(_extend_str, rvalue="''"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _builtin_filter_predicate(node, builtin_name):
|
||||
if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
|
||||
return True
|
||||
if isinstance(node.func, nodes.Attribute):
|
||||
return (
|
||||
node.func.attrname == "fromkeys"
|
||||
and isinstance(node.func.expr, nodes.Name)
|
||||
and node.func.expr.name == "dict"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
if not result.parent:
|
||||
# Let the transformation function determine
|
||||
# the parent for its result. Otherwise,
|
||||
# we set it to be the node we transformed from.
|
||||
result.parent = node
|
||||
|
||||
if result.lineno is None:
|
||||
result.lineno = node.lineno
|
||||
if result.col_offset is None:
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call,
|
||||
inference_tip(_transform_wrapper),
|
||||
partial(_builtin_filter_predicate, builtin_name=builtin_name),
|
||||
)
|
||||
|
||||
|
||||
def _container_generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
(arg,) = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
inferred = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
raise UseInferenceDefault()
|
||||
if inferred is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
transformed = transform(inferred)
|
||||
if not transformed or transformed is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
return transformed
|
||||
|
||||
|
||||
def _container_generic_transform(arg, context, klass, iterables, build_elts):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
elif isinstance(arg, iterables):
|
||||
if all(isinstance(elt, nodes.Const) for elt in arg.elts):
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
else:
|
||||
# TODO: Does not handle deduplication for sets.
|
||||
elts = []
|
||||
for element in arg.elts:
|
||||
inferred = helpers.safe_infer(element, context=context)
|
||||
if inferred:
|
||||
evaluated_object = nodes.EvaluatedObject(
|
||||
original=element, value=inferred
|
||||
)
|
||||
elts.append(evaluated_object)
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
# Dicts need to have consts as strings already.
|
||||
if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif isinstance(arg, nodes.Const) and isinstance(
|
||||
arg.value, (six.string_types, six.binary_type)
|
||||
):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass.from_elements(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin_container(
|
||||
node, context, klass=None, iterables=None, build_elts=None
|
||||
):
|
||||
transform_func = partial(
|
||||
_container_generic_transform,
|
||||
context=context,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts,
|
||||
)
|
||||
|
||||
return _container_generic_inference(node, context, klass, transform_func)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(
|
||||
nodes.List,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=tuple,
|
||||
)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.List,
|
||||
iterables=(
|
||||
nodes.Tuple,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=list,
|
||||
)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=set,
|
||||
)
|
||||
|
||||
infer_frozenset = partial(
|
||||
_infer_builtin_container,
|
||||
klass=objects.FrozenSet,
|
||||
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=frozenset,
|
||||
)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
|
||||
try:
|
||||
inferred = next(arg.infer(context))
|
||||
except (InferenceError, NameInferenceError):
|
||||
raise UseInferenceDefault()
|
||||
if isinstance(inferred, nodes.Dict):
|
||||
items = inferred.items
|
||||
elif is_iterable(inferred):
|
||||
items = []
|
||||
for elt in inferred.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be inferred, we'll fallback to default inference.
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.has_invalid_arguments() or call.has_invalid_keywords():
|
||||
raise UseInferenceDefault
|
||||
|
||||
args = call.positional_arguments
|
||||
kwargs = list(call.keyword_arguments.items())
|
||||
|
||||
if not args and not kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
elif kwargs and not args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
elif len(args) == 1 and kwargs:
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(args[0], context)
|
||||
keys = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
items = elts + keys
|
||||
elif len(args) == 1:
|
||||
items = _get_elts(args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
value = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
value.postinit(items)
|
||||
return value
|
||||
|
||||
|
||||
def infer_super(node, context=None):
|
||||
"""Understand super calls.
|
||||
|
||||
There are some restrictions for what can be understood:
|
||||
|
||||
* unbounded super (one argument form) is not understood.
|
||||
|
||||
* if the super call is not inside a function (classmethod or method),
|
||||
then the default inference will be used.
|
||||
|
||||
* if the super arguments can't be inferred, the default inference
|
||||
will be used.
|
||||
"""
|
||||
if len(node.args) == 1:
|
||||
# Ignore unbounded super.
|
||||
raise UseInferenceDefault
|
||||
|
||||
scope = node.scope()
|
||||
if not isinstance(scope, nodes.FunctionDef):
|
||||
# Ignore non-method uses of super.
|
||||
raise UseInferenceDefault
|
||||
if scope.type not in ("classmethod", "method"):
|
||||
# Not interested in staticmethods.
|
||||
raise UseInferenceDefault
|
||||
|
||||
cls = scoped_nodes.get_wrapping_class(scope)
|
||||
if not len(node.args):
|
||||
mro_pointer = cls
|
||||
# In we are in a classmethod, the interpreter will fill
|
||||
# automatically the class as the second argument, not an instance.
|
||||
if scope.type == "classmethod":
|
||||
mro_type = cls
|
||||
else:
|
||||
mro_type = cls.instantiate_class()
|
||||
else:
|
||||
try:
|
||||
mro_pointer = next(node.args[0].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
mro_type = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
|
||||
# No way we could understand this.
|
||||
raise UseInferenceDefault
|
||||
|
||||
super_obj = objects.Super(
|
||||
mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
|
||||
)
|
||||
super_obj.parent = node
|
||||
return super_obj
|
||||
|
||||
|
||||
def _infer_getattr_args(node, context):
|
||||
if len(node.args) not in (2, 3):
|
||||
# Not a valid getattr call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
obj = next(node.args[0].infer(context=context))
|
||||
attr = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if obj is util.Uninferable or attr is util.Uninferable:
|
||||
# If one of the arguments is something we can't infer,
|
||||
# then also make the result of the getattr call something
|
||||
# which is unknown.
|
||||
return util.Uninferable, util.Uninferable
|
||||
|
||||
is_string = isinstance(attr, nodes.Const) and isinstance(
|
||||
attr.value, six.string_types
|
||||
)
|
||||
if not is_string:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return obj, attr.value
|
||||
|
||||
|
||||
def infer_getattr(node, context=None):
|
||||
"""Understand getattr calls
|
||||
|
||||
If one of the arguments is an Uninferable object, then the
|
||||
result will be an Uninferable object. Otherwise, the normal attribute
|
||||
lookup will be done.
|
||||
"""
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "igetattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
|
||||
try:
|
||||
return next(obj.igetattr(attr, context=context))
|
||||
except (StopIteration, InferenceError, AttributeInferenceError):
|
||||
if len(node.args) == 3:
|
||||
# Try to infer the default and return it instead.
|
||||
try:
|
||||
return next(node.args[2].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
raise UseInferenceDefault
|
||||
|
||||
|
||||
def infer_hasattr(node, context=None):
|
||||
"""Understand hasattr calls
|
||||
|
||||
This always guarantees three possible outcomes for calling
|
||||
hasattr: Const(False) when we are sure that the object
|
||||
doesn't have the intended attribute, Const(True) when
|
||||
we know that the object has the attribute and Uninferable
|
||||
when we are unsure of the outcome of the function call.
|
||||
"""
|
||||
try:
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "getattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
obj.getattr(attr, context=context)
|
||||
except UseInferenceDefault:
|
||||
# Can't infer something from this function call.
|
||||
return util.Uninferable
|
||||
except AttributeInferenceError:
|
||||
# Doesn't have it.
|
||||
return nodes.Const(False)
|
||||
return nodes.Const(True)
|
||||
|
||||
|
||||
def infer_callable(node, context=None):
|
||||
"""Understand callable calls
|
||||
|
||||
This follows Python's semantics, where an object
|
||||
is callable if it provides an attribute __call__,
|
||||
even though that attribute is something which can't be
|
||||
called.
|
||||
"""
|
||||
if len(node.args) != 1:
|
||||
# Invalid callable call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(inferred.callable())
|
||||
|
||||
|
||||
def infer_property(node, context=None):
|
||||
"""Understand `property` class
|
||||
|
||||
This only infers the output of `property`
|
||||
call, not the arguments themselves.
|
||||
"""
|
||||
if len(node.args) < 1:
|
||||
# Invalid property call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
getter = node.args[0]
|
||||
try:
|
||||
inferred = next(getter.infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(inferred, (nodes.FunctionDef, nodes.Lambda)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
return objects.Property(
|
||||
function=inferred,
|
||||
name=inferred.name,
|
||||
doc=getattr(inferred, "doc", None),
|
||||
lineno=node.lineno,
|
||||
parent=node,
|
||||
col_offset=node.col_offset,
|
||||
)
|
||||
|
||||
|
||||
def infer_bool(node, context=None):
|
||||
"""Understand bool calls."""
|
||||
if len(node.args) > 1:
|
||||
# Invalid bool call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not node.args:
|
||||
return nodes.Const(False)
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
|
||||
bool_value = inferred.bool_value(context=context)
|
||||
if bool_value is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(bool_value)
|
||||
|
||||
|
||||
def infer_type(node, context=None):
|
||||
"""Understand the one-argument form of *type*."""
|
||||
if len(node.args) != 1:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return helpers.object_type(node.args[0], context)
|
||||
|
||||
|
||||
def infer_slice(node, context=None):
|
||||
"""Understand `slice` calls."""
|
||||
args = node.args
|
||||
if not 0 < len(args) <= 3:
|
||||
raise UseInferenceDefault
|
||||
|
||||
infer_func = partial(helpers.safe_infer, context=context)
|
||||
args = [infer_func(arg) for arg in args]
|
||||
for arg in args:
|
||||
if not arg or arg is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg, nodes.Const):
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg.value, (type(None), int)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(args) < 3:
|
||||
# Make sure we have 3 arguments.
|
||||
args.extend([None] * (3 - len(args)))
|
||||
|
||||
slice_node = nodes.Slice(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
slice_node.postinit(*args)
|
||||
return slice_node
|
||||
|
||||
|
||||
def _infer_object__new__decorator(node, context=None):
|
||||
# Instantiate class immediately
|
||||
# since that's what @object.__new__ does
|
||||
return iter((node.instantiate_class(),))
|
||||
|
||||
|
||||
def _infer_object__new__decorator_check(node):
|
||||
"""Predicate before inference_tip
|
||||
|
||||
Check if the given ClassDef has an @object.__new__ decorator
|
||||
"""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, nodes.Attribute):
|
||||
if decorator.as_string() == OBJECT_DUNDER_NEW:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def infer_issubclass(callnode, context=None):
|
||||
"""Infer issubclass() calls
|
||||
|
||||
:param nodes.Call callnode: an `issubclass` call
|
||||
:param InferenceContext: the context for the inference
|
||||
:rtype nodes.Const: Boolean Const value of the `issubclass` call
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# issubclass doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
"Expected two arguments, got {count}".format(
|
||||
count=len(call.positional_arguments)
|
||||
)
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
|
||||
try:
|
||||
obj_type = next(obj_node.infer(context=context))
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if not isinstance(obj_type, nodes.ClassDef):
|
||||
raise UseInferenceDefault("TypeError: arg 1 must be class")
|
||||
|
||||
# The right hand argument is the class(es) that the given
|
||||
# object is to be checked against.
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
try:
|
||||
issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
return nodes.Const(issubclass_bool)
|
||||
|
||||
|
||||
def infer_isinstance(callnode, context=None):
|
||||
"""Infer isinstance calls
|
||||
|
||||
:param nodes.Call callnode: an isinstance call
|
||||
:param InferenceContext: context for call
|
||||
(currently unused but is a common interface for inference)
|
||||
:rtype nodes.Const: Boolean Const value of isinstance call
|
||||
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# isinstance doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
"Expected two arguments, got {count}".format(
|
||||
count=len(call.positional_arguments)
|
||||
)
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
# The right hand argument is the class(es) that the given
|
||||
# obj is to be check is an instance of
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc))
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if isinstance_bool is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
return nodes.Const(isinstance_bool)
|
||||
|
||||
|
||||
def _class_or_tuple_to_container(node, context=None):
|
||||
# Move inferences results into container
|
||||
# to simplify later logic
|
||||
# raises InferenceError if any of the inferences fall through
|
||||
node_infer = next(node.infer(context=context))
|
||||
# arg2 MUST be a type or a TUPLE of types
|
||||
# for isinstance
|
||||
if isinstance(node_infer, nodes.Tuple):
|
||||
class_container = [
|
||||
next(node.infer(context=context)) for node in node_infer.elts
|
||||
]
|
||||
class_container = [
|
||||
klass_node for klass_node in class_container if klass_node is not None
|
||||
]
|
||||
else:
|
||||
class_container = [node_infer]
|
||||
return class_container
|
||||
|
||||
|
||||
def infer_len(node, context=None):
|
||||
"""Infer length calls
|
||||
|
||||
:param nodes.Call node: len call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const node with the inferred length, if possible
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
|
||||
if len(call.positional_arguments) != 1:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: len() must take exactly one argument "
|
||||
"({len}) given".format(len=len(call.positional_arguments))
|
||||
)
|
||||
[argument_node] = call.positional_arguments
|
||||
try:
|
||||
return nodes.Const(helpers.object_len(argument_node, context=context))
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_str(node, context=None):
|
||||
"""Infer str() calls
|
||||
|
||||
:param nodes.Call node: str() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing an empty string
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
|
||||
try:
|
||||
return nodes.Const("")
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_int(node, context=None):
|
||||
"""Infer int() calls
|
||||
|
||||
:param nodes.Call node: int() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing the integer value of the int() call
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
|
||||
if call.positional_arguments:
|
||||
try:
|
||||
first_value = next(call.positional_arguments[0].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
if first_value is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if isinstance(first_value, nodes.Const) and isinstance(
|
||||
first_value.value, (int, str)
|
||||
):
|
||||
try:
|
||||
actual_value = int(first_value.value)
|
||||
except ValueError:
|
||||
return nodes.Const(0)
|
||||
return nodes.Const(actual_value)
|
||||
|
||||
return nodes.Const(0)
|
||||
|
||||
|
||||
def infer_dict_fromkeys(node, context=None):
|
||||
"""Infer dict.fromkeys
|
||||
|
||||
:param nodes.Call node: dict.fromkeys() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Dict:
|
||||
a Dictionary containing the values that astroid was able to infer.
|
||||
In case the inference failed for any reason, an empty dictionary
|
||||
will be inferred instead.
|
||||
"""
|
||||
|
||||
def _build_dict_with_elements(elements):
|
||||
new_node = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
new_node.postinit(elements)
|
||||
return new_node
|
||||
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
if len(call.positional_arguments) not in {1, 2}:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: Needs between 1 and 2 positional arguments"
|
||||
)
|
||||
|
||||
default = nodes.Const(None)
|
||||
values = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_values = next(values.infer(context=context))
|
||||
except InferenceError:
|
||||
return _build_dict_with_elements([])
|
||||
if inferred_values is util.Uninferable:
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
# Limit to a couple of potential values, as this can become pretty complicated
|
||||
accepted_iterable_elements = (nodes.Const,)
|
||||
if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
|
||||
elements = inferred_values.elts
|
||||
for element in elements:
|
||||
if not isinstance(element, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in elements]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
|
||||
elif isinstance(inferred_values, nodes.Const) and isinstance(
|
||||
inferred_values.value, (str, bytes)
|
||||
):
|
||||
elements = [
|
||||
(nodes.Const(element), default) for element in inferred_values.value
|
||||
]
|
||||
return _build_dict_with_elements(elements)
|
||||
elif isinstance(inferred_values, nodes.Dict):
|
||||
keys = inferred_values.itered()
|
||||
for key in keys:
|
||||
if not isinstance(key, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in keys]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
|
||||
# Fallback to an empty dictionary
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_bool, "bool")
|
||||
register_builtin_transform(infer_super, "super")
|
||||
register_builtin_transform(infer_callable, "callable")
|
||||
register_builtin_transform(infer_property, "property")
|
||||
register_builtin_transform(infer_getattr, "getattr")
|
||||
register_builtin_transform(infer_hasattr, "hasattr")
|
||||
register_builtin_transform(infer_tuple, "tuple")
|
||||
register_builtin_transform(infer_set, "set")
|
||||
register_builtin_transform(infer_list, "list")
|
||||
register_builtin_transform(infer_dict, "dict")
|
||||
register_builtin_transform(infer_frozenset, "frozenset")
|
||||
register_builtin_transform(infer_type, "type")
|
||||
register_builtin_transform(infer_slice, "slice")
|
||||
register_builtin_transform(infer_isinstance, "isinstance")
|
||||
register_builtin_transform(infer_issubclass, "issubclass")
|
||||
register_builtin_transform(infer_len, "len")
|
||||
register_builtin_transform(infer_str, "str")
|
||||
register_builtin_transform(infer_int, "int")
|
||||
register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
|
||||
|
||||
|
||||
# Infer object.__new__ calls
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
inference_tip(_infer_object__new__decorator),
|
||||
_infer_object__new__decorator_check,
|
||||
)
|
||||
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _collections_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
def __getitem__(self, key): return default_factory
|
||||
|
||||
"""
|
||||
+ _deque_mock()
|
||||
+ _ordered_dict_mock()
|
||||
)
|
||||
|
||||
|
||||
def _deque_mock():
|
||||
base_deque_class = """
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None):
|
||||
self.iterable = iterable or []
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): return self.iterable[0]
|
||||
def popleft(self): return self.iterable[0]
|
||||
def remove(self, value): pass
|
||||
def reverse(self): return reversed(self.iterable)
|
||||
def rotate(self, n=1): return self
|
||||
def __iter__(self): return self
|
||||
def __reversed__(self): return self.iterable[::-1]
|
||||
def __getitem__(self, index): return self.iterable[index]
|
||||
def __setitem__(self, index, value): pass
|
||||
def __delitem__(self, index): pass
|
||||
def __bool__(self): return bool(self.iterable)
|
||||
def __nonzero__(self): return bool(self.iterable)
|
||||
def __contains__(self, o): return o in self.iterable
|
||||
def __len__(self): return len(self.iterable)
|
||||
def __copy__(self): return deque(self.iterable)
|
||||
def copy(self): return deque(self.iterable)
|
||||
def index(self, x, start=0, end=0): return 0
|
||||
def insert(self, x, i): pass
|
||||
def __add__(self, other): pass
|
||||
def __iadd__(self, other): pass
|
||||
def __mul__(self, other): pass
|
||||
def __imul__(self, other): pass
|
||||
def __rmul__(self, other): pass"""
|
||||
return base_deque_class
|
||||
|
||||
|
||||
def _ordered_dict_mock():
|
||||
base_ordered_dict_class = """
|
||||
class OrderedDict(dict):
|
||||
def __reversed__(self): return self[::-1]
|
||||
def move_to_end(self, key, last=False): pass"""
|
||||
return base_ordered_dict_class
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)
|
||||
@@ -0,0 +1,26 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
import astroid
|
||||
|
||||
PY37 = sys.version_info >= (3, 7)
|
||||
|
||||
if PY37:
|
||||
# Since Python 3.7 Hashing Methods are added
|
||||
# dynamically to globals()
|
||||
|
||||
def _re_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_Method = namedtuple('_Method', 'name ident salt_chars total_size')
|
||||
|
||||
METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
|
||||
METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
|
||||
METHOD_BLOWFISH = _Method('BLOWFISH', 2, 'b', 22)
|
||||
METHOD_MD5 = _Method('MD5', '1', 8, 34)
|
||||
METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
|
||||
"""
|
||||
)
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "crypt", _re_transform)
|
||||
179
venv/lib/python3.8/site-packages/astroid/brain/brain_curses.py
Normal file
179
venv/lib/python3.8/site-packages/astroid/brain/brain_curses.py
Normal file
@@ -0,0 +1,179 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import astroid
|
||||
|
||||
|
||||
def _curses_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
A_ALTCHARSET = 1
|
||||
A_BLINK = 1
|
||||
A_BOLD = 1
|
||||
A_DIM = 1
|
||||
A_INVIS = 1
|
||||
A_ITALIC = 1
|
||||
A_NORMAL = 1
|
||||
A_PROTECT = 1
|
||||
A_REVERSE = 1
|
||||
A_STANDOUT = 1
|
||||
A_UNDERLINE = 1
|
||||
A_HORIZONTAL = 1
|
||||
A_LEFT = 1
|
||||
A_LOW = 1
|
||||
A_RIGHT = 1
|
||||
A_TOP = 1
|
||||
A_VERTICAL = 1
|
||||
A_CHARTEXT = 1
|
||||
A_ATTRIBUTES = 1
|
||||
A_CHARTEXT = 1
|
||||
A_COLOR = 1
|
||||
KEY_MIN = 1
|
||||
KEY_BREAK = 1
|
||||
KEY_DOWN = 1
|
||||
KEY_UP = 1
|
||||
KEY_LEFT = 1
|
||||
KEY_RIGHT = 1
|
||||
KEY_HOME = 1
|
||||
KEY_BACKSPACE = 1
|
||||
KEY_F0 = 1
|
||||
KEY_Fn = 1
|
||||
KEY_DL = 1
|
||||
KEY_IL = 1
|
||||
KEY_DC = 1
|
||||
KEY_IC = 1
|
||||
KEY_EIC = 1
|
||||
KEY_CLEAR = 1
|
||||
KEY_EOS = 1
|
||||
KEY_EOL = 1
|
||||
KEY_SF = 1
|
||||
KEY_SR = 1
|
||||
KEY_NPAGE = 1
|
||||
KEY_PPAGE = 1
|
||||
KEY_STAB = 1
|
||||
KEY_CTAB = 1
|
||||
KEY_CATAB = 1
|
||||
KEY_ENTER = 1
|
||||
KEY_SRESET = 1
|
||||
KEY_RESET = 1
|
||||
KEY_PRINT = 1
|
||||
KEY_LL = 1
|
||||
KEY_A1 = 1
|
||||
KEY_A3 = 1
|
||||
KEY_B2 = 1
|
||||
KEY_C1 = 1
|
||||
KEY_C3 = 1
|
||||
KEY_BTAB = 1
|
||||
KEY_BEG = 1
|
||||
KEY_CANCEL = 1
|
||||
KEY_CLOSE = 1
|
||||
KEY_COMMAND = 1
|
||||
KEY_COPY = 1
|
||||
KEY_CREATE = 1
|
||||
KEY_END = 1
|
||||
KEY_EXIT = 1
|
||||
KEY_FIND = 1
|
||||
KEY_HELP = 1
|
||||
KEY_MARK = 1
|
||||
KEY_MESSAGE = 1
|
||||
KEY_MOVE = 1
|
||||
KEY_NEXT = 1
|
||||
KEY_OPEN = 1
|
||||
KEY_OPTIONS = 1
|
||||
KEY_PREVIOUS = 1
|
||||
KEY_REDO = 1
|
||||
KEY_REFERENCE = 1
|
||||
KEY_REFRESH = 1
|
||||
KEY_REPLACE = 1
|
||||
KEY_RESTART = 1
|
||||
KEY_RESUME = 1
|
||||
KEY_SAVE = 1
|
||||
KEY_SBEG = 1
|
||||
KEY_SCANCEL = 1
|
||||
KEY_SCOMMAND = 1
|
||||
KEY_SCOPY = 1
|
||||
KEY_SCREATE = 1
|
||||
KEY_SDC = 1
|
||||
KEY_SDL = 1
|
||||
KEY_SELECT = 1
|
||||
KEY_SEND = 1
|
||||
KEY_SEOL = 1
|
||||
KEY_SEXIT = 1
|
||||
KEY_SFIND = 1
|
||||
KEY_SHELP = 1
|
||||
KEY_SHOME = 1
|
||||
KEY_SIC = 1
|
||||
KEY_SLEFT = 1
|
||||
KEY_SMESSAGE = 1
|
||||
KEY_SMOVE = 1
|
||||
KEY_SNEXT = 1
|
||||
KEY_SOPTIONS = 1
|
||||
KEY_SPREVIOUS = 1
|
||||
KEY_SPRINT = 1
|
||||
KEY_SREDO = 1
|
||||
KEY_SREPLACE = 1
|
||||
KEY_SRIGHT = 1
|
||||
KEY_SRSUME = 1
|
||||
KEY_SSAVE = 1
|
||||
KEY_SSUSPEND = 1
|
||||
KEY_SUNDO = 1
|
||||
KEY_SUSPEND = 1
|
||||
KEY_UNDO = 1
|
||||
KEY_MOUSE = 1
|
||||
KEY_RESIZE = 1
|
||||
KEY_MAX = 1
|
||||
ACS_BBSS = 1
|
||||
ACS_BLOCK = 1
|
||||
ACS_BOARD = 1
|
||||
ACS_BSBS = 1
|
||||
ACS_BSSB = 1
|
||||
ACS_BSSS = 1
|
||||
ACS_BTEE = 1
|
||||
ACS_BULLET = 1
|
||||
ACS_CKBOARD = 1
|
||||
ACS_DARROW = 1
|
||||
ACS_DEGREE = 1
|
||||
ACS_DIAMOND = 1
|
||||
ACS_GEQUAL = 1
|
||||
ACS_HLINE = 1
|
||||
ACS_LANTERN = 1
|
||||
ACS_LARROW = 1
|
||||
ACS_LEQUAL = 1
|
||||
ACS_LLCORNER = 1
|
||||
ACS_LRCORNER = 1
|
||||
ACS_LTEE = 1
|
||||
ACS_NEQUAL = 1
|
||||
ACS_PI = 1
|
||||
ACS_PLMINUS = 1
|
||||
ACS_PLUS = 1
|
||||
ACS_RARROW = 1
|
||||
ACS_RTEE = 1
|
||||
ACS_S1 = 1
|
||||
ACS_S3 = 1
|
||||
ACS_S7 = 1
|
||||
ACS_S9 = 1
|
||||
ACS_SBBS = 1
|
||||
ACS_SBSB = 1
|
||||
ACS_SBSS = 1
|
||||
ACS_SSBB = 1
|
||||
ACS_SSBS = 1
|
||||
ACS_SSSB = 1
|
||||
ACS_SSSS = 1
|
||||
ACS_STERLING = 1
|
||||
ACS_TTEE = 1
|
||||
ACS_UARROW = 1
|
||||
ACS_ULCORNER = 1
|
||||
ACS_URCORNER = 1
|
||||
ACS_VLINE = 1
|
||||
COLOR_BLACK = 1
|
||||
COLOR_BLUE = 1
|
||||
COLOR_CYAN = 1
|
||||
COLOR_GREEN = 1
|
||||
COLOR_MAGENTA = 1
|
||||
COLOR_RED = 1
|
||||
COLOR_WHITE = 1
|
||||
COLOR_YELLOW = 1
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)
|
||||
@@ -0,0 +1,50 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
"""
|
||||
Astroid hook for the dataclasses library
|
||||
"""
|
||||
|
||||
import astroid
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
DATACLASSES_DECORATORS = frozenset(("dataclasses.dataclass", "dataclass"))
|
||||
|
||||
|
||||
def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS):
|
||||
"""Return True if a decorated node has a `dataclass` decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
|
||||
decorator_attribute = decorator_attribute.func
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def dataclass_transform(node):
|
||||
"""Rewrite a dataclass to be easily understood by pylint"""
|
||||
|
||||
for assign_node in node.body:
|
||||
if not isinstance(assign_node, (astroid.AnnAssign, astroid.Assign)):
|
||||
continue
|
||||
|
||||
targets = (
|
||||
assign_node.targets
|
||||
if hasattr(assign_node, "targets")
|
||||
else [assign_node.target]
|
||||
)
|
||||
for target in targets:
|
||||
rhs_node = astroid.Unknown(
|
||||
lineno=assign_node.lineno,
|
||||
col_offset=assign_node.col_offset,
|
||||
parent=assign_node,
|
||||
)
|
||||
node.instance_attrs[target.name] = [rhs_node]
|
||||
node.locals[target.name] = [rhs_node]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.ClassDef, dataclass_transform, is_decorated_with_dataclass
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 raylu <lurayl@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for dateutil"""
|
||||
|
||||
import textwrap
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def dateutil_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import datetime
|
||||
def parse(timestr, parserinfo=None, **kwargs):
|
||||
return datetime.datetime()
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)
|
||||
@@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import collections
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
|
||||
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
|
||||
if other_fields:
|
||||
init_params.update({param: getattr(node, param) for param in other_fields})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, "postinit") and _astroid_fields:
|
||||
for param, child in postinit_params.items():
|
||||
if child and not isinstance(child, collections.Sequence):
|
||||
cloned_child = _clone_node_with_lineno(
|
||||
node=child, lineno=new_node.lineno, parent=new_node
|
||||
)
|
||||
postinit_params[param] = cloned_child
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def _transform_formatted_value(node):
|
||||
if node.value and node.value.lineno == 1:
|
||||
if node.lineno != node.value.lineno:
|
||||
new_node = astroid.FormattedValue(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
new_value = _clone_node_with_lineno(
|
||||
node=node.value, lineno=node.lineno, parent=new_node
|
||||
)
|
||||
new_node.postinit(value=new_value, format_spec=node.format_spec)
|
||||
return new_node
|
||||
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051
|
||||
# The problem is that FormattedValue.value, which is a Name node,
|
||||
# has wrong line numbers, usually 1. This creates problems for pylint,
|
||||
# which expects correct line numbers for things such as message control.
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.FormattedValue, _transform_formatted_value
|
||||
)
|
||||
@@ -0,0 +1,159 @@
|
||||
# Copyright (c) 2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
|
||||
"""Astroid hooks for understanding functools library module."""
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
import astroid
|
||||
from astroid import arguments
|
||||
from astroid import BoundMethod
|
||||
from astroid import extract_node
|
||||
from astroid import helpers
|
||||
from astroid.interpreter import objectmodel
|
||||
from astroid import MANAGER
|
||||
from astroid import objects
|
||||
|
||||
|
||||
LRU_CACHE = "functools.lru_cache"
|
||||
|
||||
|
||||
class LruWrappedModel(objectmodel.FunctionModel):
|
||||
"""Special attribute model for functions decorated with functools.lru_cache.
|
||||
|
||||
The said decorators patches at decoration time some functions onto
|
||||
the decorated function.
|
||||
"""
|
||||
|
||||
@property
|
||||
def attr___wrapped__(self):
|
||||
return self._instance
|
||||
|
||||
@property
|
||||
def attr_cache_info(self):
|
||||
cache_info = extract_node(
|
||||
"""
|
||||
from functools import _CacheInfo
|
||||
_CacheInfo(0, 0, 0, 0)
|
||||
"""
|
||||
)
|
||||
|
||||
class CacheInfoBoundMethod(BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield helpers.safe_infer(cache_info)
|
||||
|
||||
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
|
||||
|
||||
@property
|
||||
def attr_cache_clear(self):
|
||||
node = extract_node("""def cache_clear(self): pass""")
|
||||
return BoundMethod(proxy=node, bound=self._instance.parent.scope())
|
||||
|
||||
|
||||
def _transform_lru_cache(node, context=None):
|
||||
# TODO: this is not ideal, since the node should be immutable,
|
||||
# but due to https://github.com/PyCQA/astroid/issues/354,
|
||||
# there's not much we can do now.
|
||||
# Replacing the node would work partially, because,
|
||||
# in pylint, the old node would still be available, leading
|
||||
# to spurious false positives.
|
||||
node.special_attributes = LruWrappedModel()(node)
|
||||
return
|
||||
|
||||
|
||||
def _functools_partial_inference(node, context=None):
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
number_of_positional = len(call.positional_arguments)
|
||||
if number_of_positional < 1:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"functools.partial takes at least one argument"
|
||||
)
|
||||
if number_of_positional == 1 and not call.keyword_arguments:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"functools.partial needs at least to have some filled arguments"
|
||||
)
|
||||
|
||||
partial_function = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_wrapped_function = next(partial_function.infer(context=context))
|
||||
except astroid.InferenceError as exc:
|
||||
raise astroid.UseInferenceDefault from exc
|
||||
if inferred_wrapped_function is astroid.Uninferable:
|
||||
raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
|
||||
if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
|
||||
raise astroid.UseInferenceDefault("The wrapped function is not a function")
|
||||
|
||||
# Determine if the passed keywords into the callsite are supported
|
||||
# by the wrapped function.
|
||||
function_parameters = chain(
|
||||
inferred_wrapped_function.args.args or (),
|
||||
inferred_wrapped_function.args.posonlyargs or (),
|
||||
inferred_wrapped_function.args.kwonlyargs or (),
|
||||
)
|
||||
parameter_names = set(
|
||||
param.name
|
||||
for param in function_parameters
|
||||
if isinstance(param, astroid.AssignName)
|
||||
)
|
||||
if set(call.keyword_arguments) - parameter_names:
|
||||
raise astroid.UseInferenceDefault(
|
||||
"wrapped function received unknown parameters"
|
||||
)
|
||||
|
||||
partial_function = objects.PartialFunction(
|
||||
call,
|
||||
name=inferred_wrapped_function.name,
|
||||
doc=inferred_wrapped_function.doc,
|
||||
lineno=inferred_wrapped_function.lineno,
|
||||
col_offset=inferred_wrapped_function.col_offset,
|
||||
parent=inferred_wrapped_function.parent,
|
||||
)
|
||||
partial_function.postinit(
|
||||
args=inferred_wrapped_function.args,
|
||||
body=inferred_wrapped_function.body,
|
||||
decorators=inferred_wrapped_function.decorators,
|
||||
returns=inferred_wrapped_function.returns,
|
||||
type_comment_returns=inferred_wrapped_function.type_comment_returns,
|
||||
type_comment_args=inferred_wrapped_function.type_comment_args,
|
||||
)
|
||||
return iter((partial_function,))
|
||||
|
||||
|
||||
def _looks_like_lru_cache(node):
|
||||
"""Check if the given function node is decorated with lru_cache."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, astroid.Call):
|
||||
continue
|
||||
if _looks_like_functools_member(decorator, "lru_cache"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _looks_like_functools_member(node, member):
|
||||
"""Check if the given Call node is a functools.partial call"""
|
||||
if isinstance(node.func, astroid.Name):
|
||||
return node.func.name == member
|
||||
elif isinstance(node.func, astroid.Attribute):
|
||||
return (
|
||||
node.func.attrname == member
|
||||
and isinstance(node.func.expr, astroid.Name)
|
||||
and node.func.expr.name == "functools"
|
||||
)
|
||||
|
||||
|
||||
_looks_like_partial = partial(_looks_like_functools_member, member="partial")
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
|
||||
)
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.Call,
|
||||
astroid.inference_tip(_functools_partial_inference),
|
||||
_looks_like_partial,
|
||||
)
|
||||
253
venv/lib/python3.8/site-packages/astroid/brain/brain_gi.py
Normal file
253
venv/lib/python3.8/site-packages/astroid/brain/brain_gi.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Cole Robinson <crobinso@redhat.com>
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Giuseppe Scrivano <gscrivan@redhat.com>
|
||||
# Copyright (c) 2018 Christoph Reiter <reiter.christoph@gmail.com>
|
||||
# Copyright (c) 2019 Philipp Hörist <philipp@hoerist.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from astroid import MANAGER, AstroidBuildingError, nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r"^[A-Za-z_]\w*$"
|
||||
|
||||
_special_methods = frozenset(
|
||||
{
|
||||
"__lt__",
|
||||
"__le__",
|
||||
"__eq__",
|
||||
"__ne__",
|
||||
"__ge__",
|
||||
"__gt__",
|
||||
"__iter__",
|
||||
"__getitem__",
|
||||
"__setitem__",
|
||||
"__delitem__",
|
||||
"__len__",
|
||||
"__bool__",
|
||||
"__nonzero__",
|
||||
"__next__",
|
||||
"__str__",
|
||||
"__len__",
|
||||
"__contains__",
|
||||
"__enter__",
|
||||
"__exit__",
|
||||
"__repr__",
|
||||
"__getattr__",
|
||||
"__setattr__",
|
||||
"__delattr__",
|
||||
"__del__",
|
||||
"__hash__",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__") and name not in _special_methods:
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
|
||||
functions[name] = obj
|
||||
elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
|
||||
methods[name] = obj
|
||||
elif (
|
||||
str(obj).startswith("<flags")
|
||||
or str(obj).startswith("<enum ")
|
||||
or str(obj).startswith("<GType ")
|
||||
or inspect.isdatadescriptor(obj)
|
||||
):
|
||||
constants[name] = 0
|
||||
elif isinstance(obj, (int, str)):
|
||||
constants[name] = obj
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += "# %s constants\n\n" % parent.__name__
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if isinstance(val, str):
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += "%s = %s\n" % (name, strval)
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += "# %s functions\n\n" % parent.__name__
|
||||
for name in sorted(functions):
|
||||
ret += "def %s(*args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += "# %s methods\n\n" % parent.__name__
|
||||
for name in sorted(methods):
|
||||
ret += "def %s(self, *args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += "# %s classes\n\n" % parent.__name__
|
||||
for name, obj in sorted(classes.items()):
|
||||
base = "object"
|
||||
if issubclass(obj, Exception):
|
||||
base = "Exception"
|
||||
ret += "class %s(%s):\n" % (name, base)
|
||||
|
||||
classret = _gi_build_stub(obj)
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith("gi.repository."):
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == "gi.repository.GLib":
|
||||
optional_modnames.append("gi._glib")
|
||||
elif modname == "gi.repository.GObject":
|
||||
optional_modnames.append("gi._gobject")
|
||||
|
||||
try:
|
||||
modcode = ""
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
# Just inspecting the code can raise gi deprecation
|
||||
# warnings, so ignore them.
|
||||
try:
|
||||
from gi import PyGIDeprecationWarning, PyGIWarning
|
||||
|
||||
warnings.simplefilter("ignore", PyGIDeprecationWarning)
|
||||
warnings.simplefilter("ignore", PyGIWarning)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
return astng
|
||||
|
||||
|
||||
def _looks_like_require_version(node):
|
||||
# Return whether this looks like a call to gi.require_version(<name>, <version>)
|
||||
# Only accept function calls with two constant arguments
|
||||
if len(node.args) != 2:
|
||||
return False
|
||||
|
||||
if not all(isinstance(arg, nodes.Const) for arg in node.args):
|
||||
return False
|
||||
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
if func.attrname != "require_version":
|
||||
return False
|
||||
if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == "require_version"
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _register_require_version(node):
|
||||
# Load the gi.require_version locally
|
||||
try:
|
||||
import gi
|
||||
|
||||
gi.require_version(node.args[0].value, node.args[1].value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return node
|
||||
|
||||
|
||||
MANAGER.register_failed_import_hook(_import_gi_module)
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, _register_require_version, _looks_like_require_version
|
||||
)
|
||||
@@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 David Poirier <david-poirier-csn@users.noreply.github.com>
|
||||
# Copyright (c) 2018 wgehalo <wgehalo@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import astroid
|
||||
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
|
||||
def _hashlib_transform():
|
||||
signature = "value=''"
|
||||
template = """
|
||||
class %(name)s(object):
|
||||
def __init__(self, %(signature)s): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
"""
|
||||
algorithms_with_signature = dict.fromkeys(
|
||||
["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
|
||||
)
|
||||
if PY36:
|
||||
blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
new_algorithms = dict.fromkeys(
|
||||
["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
|
||||
signature,
|
||||
)
|
||||
algorithms_with_signature.update(new_algorithms)
|
||||
algorithms_with_signature.update(
|
||||
{"blake2b": blake2b_signature, "blake2s": blake2s_signature}
|
||||
)
|
||||
classes = "".join(
|
||||
template
|
||||
% {
|
||||
"name": hashfunc,
|
||||
"digest": 'b""' if six.PY3 else '""',
|
||||
"signature": signature,
|
||||
}
|
||||
for hashfunc, signature in algorithms_with_signature.items()
|
||||
)
|
||||
return astroid.parse(classes)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
|
||||
211
venv/lib/python3.8/site-packages/astroid/brain/brain_http.py
Normal file
211
venv/lib/python3.8/site-packages/astroid/brain/brain_http.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Copyright (c) 2018-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid brain hints for some of the `http` module."""
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def _http_transform():
|
||||
code = textwrap.dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description')
|
||||
|
||||
class HTTPStatus:
|
||||
|
||||
@property
|
||||
def phrase(self):
|
||||
return ""
|
||||
@property
|
||||
def value(self):
|
||||
return 0
|
||||
@property
|
||||
def description(self):
|
||||
return ""
|
||||
|
||||
# informational
|
||||
CONTINUE = _HTTPStatus(100, 'Continue', 'Request received, please continue')
|
||||
SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols',
|
||||
'Switching to new protocol; obey Upgrade header')
|
||||
PROCESSING = _HTTPStatus(102, 'Processing', '')
|
||||
OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows')
|
||||
CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows')
|
||||
ACCEPTED = _HTTPStatus(202, 'Accepted',
|
||||
'Request accepted, processing continues off-line')
|
||||
NON_AUTHORITATIVE_INFORMATION = _HTTPStatus(203,
|
||||
'Non-Authoritative Information', 'Request fulfilled from cache')
|
||||
NO_CONTENT = _HTTPStatus(204, 'No Content', 'Request fulfilled, nothing follows')
|
||||
RESET_CONTENT =_HTTPStatus(205, 'Reset Content', 'Clear input form for further input')
|
||||
PARTIAL_CONTENT = _HTTPStatus(206, 'Partial Content', 'Partial content follows')
|
||||
MULTI_STATUS = _HTTPStatus(207, 'Multi-Status', '')
|
||||
ALREADY_REPORTED = _HTTPStatus(208, 'Already Reported', '')
|
||||
IM_USED = _HTTPStatus(226, 'IM Used', '')
|
||||
MULTIPLE_CHOICES = _HTTPStatus(300, 'Multiple Choices',
|
||||
'Object has several resources -- see URI list')
|
||||
MOVED_PERMANENTLY = _HTTPStatus(301, 'Moved Permanently',
|
||||
'Object moved permanently -- see URI list')
|
||||
FOUND = _HTTPStatus(302, 'Found', 'Object moved temporarily -- see URI list')
|
||||
SEE_OTHER = _HTTPStatus(303, 'See Other', 'Object moved -- see Method and URL list')
|
||||
NOT_MODIFIED = _HTTPStatus(304, 'Not Modified',
|
||||
'Document has not changed since given time')
|
||||
USE_PROXY = _HTTPStatus(305, 'Use Proxy',
|
||||
'You must use proxy specified in Location to access this resource')
|
||||
TEMPORARY_REDIRECT = _HTTPStatus(307, 'Temporary Redirect',
|
||||
'Object moved temporarily -- see URI list')
|
||||
PERMANENT_REDIRECT = _HTTPStatus(308, 'Permanent Redirect',
|
||||
'Object moved permanently -- see URI list')
|
||||
BAD_REQUEST = _HTTPStatus(400, 'Bad Request',
|
||||
'Bad request syntax or unsupported method')
|
||||
UNAUTHORIZED = _HTTPStatus(401, 'Unauthorized',
|
||||
'No permission -- see authorization schemes')
|
||||
PAYMENT_REQUIRED = _HTTPStatus(402, 'Payment Required',
|
||||
'No payment -- see charging schemes')
|
||||
FORBIDDEN = _HTTPStatus(403, 'Forbidden',
|
||||
'Request forbidden -- authorization will not help')
|
||||
NOT_FOUND = _HTTPStatus(404, 'Not Found',
|
||||
'Nothing matches the given URI')
|
||||
METHOD_NOT_ALLOWED = _HTTPStatus(405, 'Method Not Allowed',
|
||||
'Specified method is invalid for this resource')
|
||||
NOT_ACCEPTABLE = _HTTPStatus(406, 'Not Acceptable',
|
||||
'URI not available in preferred format')
|
||||
PROXY_AUTHENTICATION_REQUIRED = _HTTPStatus(407,
|
||||
'Proxy Authentication Required',
|
||||
'You must authenticate with this proxy before proceeding')
|
||||
REQUEST_TIMEOUT = _HTTPStatus(408, 'Request Timeout',
|
||||
'Request timed out; try again later')
|
||||
CONFLICT = _HTTPStatus(409, 'Conflict', 'Request conflict')
|
||||
GONE = _HTTPStatus(410, 'Gone',
|
||||
'URI no longer exists and has been permanently removed')
|
||||
LENGTH_REQUIRED = _HTTPStatus(411, 'Length Required',
|
||||
'Client must specify Content-Length')
|
||||
PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed',
|
||||
'Precondition in headers is false')
|
||||
REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large',
|
||||
'Entity is too large')
|
||||
REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long',
|
||||
'URI is too long')
|
||||
UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type',
|
||||
'Entity body in unsupported format')
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416,
|
||||
'Requested Range Not Satisfiable',
|
||||
'Cannot satisfy request range')
|
||||
EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed',
|
||||
'Expect condition could not be satisfied')
|
||||
MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request',
|
||||
'Server is not able to produce a response')
|
||||
UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity')
|
||||
LOCKED = _HTTPStatus(423, 'Locked')
|
||||
FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency')
|
||||
UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required')
|
||||
PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required',
|
||||
'The origin server requires the request to be conditional')
|
||||
TOO_MANY_REQUESTS = _HTTPStatus(429, 'Too Many Requests',
|
||||
'The user has sent too many requests in '
|
||||
'a given amount of time ("rate limiting")')
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = _HTTPStatus(431,
|
||||
'Request Header Fields Too Large',
|
||||
'The server is unwilling to process the request because its header '
|
||||
'fields are too large')
|
||||
UNAVAILABLE_FOR_LEGAL_REASONS = _HTTPStatus(451,
|
||||
'Unavailable For Legal Reasons',
|
||||
'The server is denying access to the '
|
||||
'resource as a consequence of a legal demand')
|
||||
INTERNAL_SERVER_ERROR = _HTTPStatus(500, 'Internal Server Error',
|
||||
'Server got itself in trouble')
|
||||
NOT_IMPLEMENTED = _HTTPStatus(501, 'Not Implemented',
|
||||
'Server does not support this operation')
|
||||
BAD_GATEWAY = _HTTPStatus(502, 'Bad Gateway',
|
||||
'Invalid responses from another server/proxy')
|
||||
SERVICE_UNAVAILABLE = _HTTPStatus(503, 'Service Unavailable',
|
||||
'The server cannot process the request due to a high load')
|
||||
GATEWAY_TIMEOUT = _HTTPStatus(504, 'Gateway Timeout',
|
||||
'The gateway server did not receive a timely response')
|
||||
HTTP_VERSION_NOT_SUPPORTED = _HTTPStatus(505, 'HTTP Version Not Supported',
|
||||
'Cannot fulfill request')
|
||||
VARIANT_ALSO_NEGOTIATES = _HTTPStatus(506, 'Variant Also Negotiates')
|
||||
INSUFFICIENT_STORAGE = _HTTPStatus(507, 'Insufficient Storage')
|
||||
LOOP_DETECTED = _HTTPStatus(508, 'Loop Detected')
|
||||
NOT_EXTENDED = _HTTPStatus(510, 'Not Extended')
|
||||
NETWORK_AUTHENTICATION_REQUIRED = _HTTPStatus(511,
|
||||
'Network Authentication Required',
|
||||
'The client needs to authenticate to gain network access')
|
||||
"""
|
||||
)
|
||||
return AstroidBuilder(astroid.MANAGER).string_build(code)
|
||||
|
||||
|
||||
def _http_client_transform():
|
||||
return AstroidBuilder(astroid.MANAGER).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
from http import HTTPStatus
|
||||
|
||||
CONTINUE = HTTPStatus.CONTINUE
|
||||
SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
|
||||
PROCESSING = HTTPStatus.PROCESSING
|
||||
OK = HTTPStatus.OK
|
||||
CREATED = HTTPStatus.CREATED
|
||||
ACCEPTED = HTTPStatus.ACCEPTED
|
||||
NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
|
||||
NO_CONTENT = HTTPStatus.NO_CONTENT
|
||||
RESET_CONTENT = HTTPStatus.RESET_CONTENT
|
||||
PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
|
||||
MULTI_STATUS = HTTPStatus.MULTI_STATUS
|
||||
ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
|
||||
IM_USED = HTTPStatus.IM_USED
|
||||
MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
|
||||
MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
|
||||
FOUND = HTTPStatus.FOUND
|
||||
SEE_OTHER = HTTPStatus.SEE_OTHER
|
||||
NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
|
||||
USE_PROXY = HTTPStatus.USE_PROXY
|
||||
TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
|
||||
PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
|
||||
BAD_REQUEST = HTTPStatus.BAD_REQUEST
|
||||
UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
|
||||
PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
|
||||
FORBIDDEN = HTTPStatus.FORBIDDEN
|
||||
NOT_FOUND = HTTPStatus.NOT_FOUND
|
||||
METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
|
||||
NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
|
||||
PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
|
||||
REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
|
||||
CONFLICT = HTTPStatus.CONFLICT
|
||||
GONE = HTTPStatus.GONE
|
||||
LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
|
||||
PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
|
||||
REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
|
||||
REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
|
||||
UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
|
||||
EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
|
||||
UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
|
||||
LOCKED = HTTPStatus.LOCKED
|
||||
FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
|
||||
UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
|
||||
PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
|
||||
TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
|
||||
INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
|
||||
BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
|
||||
SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
|
||||
GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
|
||||
HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
|
||||
VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
|
||||
INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
|
||||
LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
|
||||
NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
|
||||
NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, "http", _http_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, "http.client", _http_client_transform)
|
||||
45
venv/lib/python3.8/site-packages/astroid/brain/brain_io.py
Normal file
45
venv/lib/python3.8/site-packages/astroid/brain/brain_io.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid brain hints for some of the _io C objects."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
BUFFERED = {"BufferedWriter", "BufferedReader"}
|
||||
TextIOWrapper = "TextIOWrapper"
|
||||
FileIO = "FileIO"
|
||||
BufferedWriter = "BufferedWriter"
|
||||
|
||||
|
||||
def _generic_io_transform(node, name, cls):
|
||||
"""Transform the given name, by adding the given *class* as a member of the node."""
|
||||
|
||||
io_module = astroid.MANAGER.ast_from_module_name("_io")
|
||||
attribute_object = io_module[cls]
|
||||
instance = attribute_object.instantiate_class()
|
||||
node.locals[name] = [instance]
|
||||
|
||||
|
||||
def _transform_text_io_wrapper(node):
|
||||
# This is not always correct, since it can vary with the type of the descriptor,
|
||||
# being stdout, stderr or stdin. But we cannot get access to the name of the
|
||||
# stream, which is why we are using the BufferedWriter class as a default
|
||||
# value
|
||||
return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
|
||||
|
||||
|
||||
def _transform_buffered(node):
|
||||
return _generic_io_transform(node, name="raw", cls=FileIO)
|
||||
|
||||
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.ClassDef,
|
||||
_transform_text_io_wrapper,
|
||||
lambda node: node.name == TextIOWrapper,
|
||||
)
|
||||
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
|
||||
class Browser(object):
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(MANAGER, "mechanize", mechanize_transform)
|
||||
@@ -0,0 +1,107 @@
|
||||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import exceptions
|
||||
|
||||
|
||||
def _multiprocessing_transform():
|
||||
module = astroid.parse(
|
||||
"""
|
||||
from multiprocessing.managers import SyncManager
|
||||
def Manager():
|
||||
return SyncManager()
|
||||
"""
|
||||
)
|
||||
# Multiprocessing uses a getattr lookup inside contexts,
|
||||
# in order to get the attributes they need. Since it's extremely
|
||||
# dynamic, we use this approach to fake it.
|
||||
node = astroid.parse(
|
||||
"""
|
||||
from multiprocessing.context import DefaultContext, BaseContext
|
||||
default = DefaultContext()
|
||||
base = BaseContext()
|
||||
"""
|
||||
)
|
||||
try:
|
||||
context = next(node["default"].infer())
|
||||
base = next(node["base"].infer())
|
||||
except exceptions.InferenceError:
|
||||
return module
|
||||
|
||||
for node in (context, base):
|
||||
for key, value in node.locals.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
value = value[0]
|
||||
if isinstance(value, astroid.FunctionDef):
|
||||
# We need to rebound this, since otherwise
|
||||
# it will have an extra argument (self).
|
||||
value = astroid.BoundMethod(value, node)
|
||||
module[key] = value
|
||||
return module
|
||||
|
||||
|
||||
def _multiprocessing_managers_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
import array
|
||||
import threading
|
||||
import multiprocessing.pool as pool
|
||||
|
||||
import six
|
||||
|
||||
class Namespace(object):
|
||||
pass
|
||||
|
||||
class Value(object):
|
||||
def __init__(self, typecode, value, lock=True):
|
||||
self._typecode = typecode
|
||||
self._value = value
|
||||
def get(self):
|
||||
return self._value
|
||||
def set(self, value):
|
||||
self._value = value
|
||||
def __repr__(self):
|
||||
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
||||
value = property(get, set)
|
||||
|
||||
def Array(typecode, sequence, lock=True):
|
||||
return array.array(typecode, sequence)
|
||||
|
||||
class SyncManager(object):
|
||||
Queue = JoinableQueue = six.moves.queue.Queue
|
||||
Event = threading.Event
|
||||
RLock = threading.RLock
|
||||
BoundedSemaphore = threading.BoundedSemaphore
|
||||
Condition = threading.Condition
|
||||
Barrier = threading.Barrier
|
||||
Pool = pool.Pool
|
||||
list = list
|
||||
dict = dict
|
||||
Value = Value
|
||||
Array = Array
|
||||
Namespace = Namespace
|
||||
__enter__ = lambda self: self
|
||||
__exit__ = lambda *args: args
|
||||
|
||||
def start(self, initializer=None, initargs=None):
|
||||
pass
|
||||
def shutdown(self):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
|
||||
)
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "multiprocessing", _multiprocessing_transform
|
||||
)
|
||||
@@ -0,0 +1,453 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Mateusz Bysiek <mb@mbdev.pl>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python standard library."""
|
||||
|
||||
import functools
|
||||
import keyword
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
|
||||
from astroid import arguments
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder, extract_node
|
||||
from astroid import util
|
||||
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
|
||||
ENUM_BASE_NAMES = {
|
||||
"Enum",
|
||||
"IntEnum",
|
||||
"enum.Enum",
|
||||
"enum.IntEnum",
|
||||
"IntFlag",
|
||||
"enum.IntFlag",
|
||||
}
|
||||
|
||||
|
||||
def _infer_first(node, context):
|
||||
if node is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
if value is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
else:
|
||||
return value
|
||||
except StopIteration:
|
||||
raise InferenceError()
|
||||
|
||||
|
||||
def _find_func_form_arguments(node, context):
|
||||
def _extract_namedtuple_arg_or_keyword(position, key_name=None):
|
||||
|
||||
if len(args) > position:
|
||||
return _infer_first(args[position], context)
|
||||
if key_name and key_name in found_keywords:
|
||||
return _infer_first(found_keywords[key_name], context)
|
||||
|
||||
args = node.args
|
||||
keywords = node.keywords
|
||||
found_keywords = (
|
||||
{keyword.arg: keyword.value for keyword in keywords} if keywords else {}
|
||||
)
|
||||
|
||||
name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
|
||||
names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
|
||||
if name and names:
|
||||
return name.value, names
|
||||
|
||||
raise UseInferenceDefault()
|
||||
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum. """
|
||||
# node is a Call node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name, names = _find_func_form_arguments(node, context)
|
||||
try:
|
||||
attributes = names.value.replace(",", " ").split()
|
||||
except AttributeError:
|
||||
if not enum:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
if hasattr(names, "items") and isinstance(names.items, list):
|
||||
attributes = [
|
||||
_infer_first(const[0], context).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)
|
||||
]
|
||||
elif hasattr(names, "elts"):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple) for const in names.elts):
|
||||
attributes = [
|
||||
_infer_first(const.elts[0], context).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)
|
||||
]
|
||||
else:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
raise AttributeError
|
||||
if not attributes:
|
||||
raise AttributeError
|
||||
except (AttributeError, exceptions.InferenceError):
|
||||
raise UseInferenceDefault()
|
||||
|
||||
# If we can't infer the name of the class, don't crash, up to this point
|
||||
# we know it is a namedtuple anyway.
|
||||
name = name or "Uninferable"
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.ClassDef(name, "docstring")
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
def _has_namedtuple_base(node):
|
||||
"""Predicate for class inference tip
|
||||
|
||||
:type node: ClassDef
|
||||
:rtype: bool
|
||||
"""
|
||||
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
|
||||
|
||||
|
||||
def _looks_like(node, name):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname == name
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == name
|
||||
return False
|
||||
|
||||
|
||||
_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
|
||||
_looks_like_enum = functools.partial(_looks_like, name="Enum")
|
||||
_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
|
||||
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple Call node"""
|
||||
tuple_base_name = nodes.Name(name="tuple", parent=node.root())
|
||||
class_node, name, attributes = infer_func_form(
|
||||
node, tuple_base_name, context=context
|
||||
)
|
||||
call_site = arguments.CallSite.from_call(node, context=context)
|
||||
func = next(extract_node("import collections; collections.namedtuple").infer())
|
||||
try:
|
||||
rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
|
||||
except InferenceError:
|
||||
rename = False
|
||||
|
||||
if rename:
|
||||
attributes = _get_renamed_namedtuple_attributes(attributes)
|
||||
|
||||
replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
|
||||
field_def = (
|
||||
" {name} = property(lambda self: self[{index:d}], "
|
||||
"doc='Alias for field number {index:d}')"
|
||||
)
|
||||
field_defs = "\n".join(
|
||||
field_def.format(name=name, index=index)
|
||||
for index, name in enumerate(attributes)
|
||||
)
|
||||
fake = AstroidBuilder(MANAGER).string_build(
|
||||
"""
|
||||
class %(name)s(tuple):
|
||||
__slots__ = ()
|
||||
_fields = %(fields)r
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(self, %(replace_args)s):
|
||||
return self
|
||||
def __getnewargs__(self):
|
||||
return tuple(self)
|
||||
%(field_defs)s
|
||||
"""
|
||||
% {
|
||||
"name": name,
|
||||
"fields": attributes,
|
||||
"field_defs": field_defs,
|
||||
"replace_args": replace_args,
|
||||
}
|
||||
)
|
||||
class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
|
||||
class_node.locals["_make"] = fake.body[0].locals["_make"]
|
||||
class_node.locals["_replace"] = fake.body[0].locals["_replace"]
|
||||
class_node.locals["_fields"] = fake.body[0].locals["_fields"]
|
||||
for attr in attributes:
|
||||
class_node.locals[attr] = fake.body[0].locals[attr]
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
|
||||
def _get_renamed_namedtuple_attributes(field_names):
|
||||
names = list(field_names)
|
||||
seen = set()
|
||||
for i, name in enumerate(field_names):
|
||||
if (
|
||||
not all(c.isalnum() or c == "_" for c in name)
|
||||
or keyword.iskeyword(name)
|
||||
or not name
|
||||
or name[0].isdigit()
|
||||
or name.startswith("_")
|
||||
or name in seen
|
||||
):
|
||||
names[i] = "_%d" % i
|
||||
seen.add(name)
|
||||
return tuple(names)
|
||||
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
""" Specific inference function for enum Call node. """
|
||||
enum_meta = extract_node(
|
||||
"""
|
||||
class EnumMeta(object):
|
||||
'docstring'
|
||||
def __call__(self, node):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return EnumAttribute()
|
||||
def __iter__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return [EnumAttribute()]
|
||||
def __reversed__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return (EnumAttribute, )
|
||||
def __next__(self):
|
||||
return next(iter(self))
|
||||
def __getitem__(self, attr):
|
||||
class Value(object):
|
||||
@property
|
||||
def name(self):
|
||||
return ''
|
||||
@property
|
||||
def value(self):
|
||||
return attr
|
||||
|
||||
return Value()
|
||||
__members__ = ['']
|
||||
"""
|
||||
)
|
||||
class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
|
||||
return iter([class_node.instantiate_class()])
|
||||
|
||||
|
||||
INT_FLAG_ADDITION_METHODS = """
|
||||
def __or__(self, other):
|
||||
return {name}(self.value | other.value)
|
||||
def __and__(self, other):
|
||||
return {name}(self.value & other.value)
|
||||
def __xor__(self, other):
|
||||
return {name}(self.value ^ other.value)
|
||||
def __add__(self, other):
|
||||
return {name}(self.value + other.value)
|
||||
def __div__(self, other):
|
||||
return {name}(self.value / other.value)
|
||||
def __invert__(self):
|
||||
return {name}(~self.value)
|
||||
def __mul__(self, other):
|
||||
return {name}(self.value * other.value)
|
||||
"""
|
||||
|
||||
|
||||
def infer_enum_class(node):
|
||||
""" Specific inference for enums. """
|
||||
for basename in node.basenames:
|
||||
# TODO: doesn't handle subclasses yet. This implementation
|
||||
# is a hack to support enums.
|
||||
if basename not in ENUM_BASE_NAMES:
|
||||
continue
|
||||
if node.root().name == "enum":
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssignName) for value in values):
|
||||
continue
|
||||
|
||||
targets = []
|
||||
stmt = values[0].statement()
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
elif isinstance(stmt, nodes.AnnAssign):
|
||||
targets = [stmt.target]
|
||||
else:
|
||||
continue
|
||||
|
||||
inferred_return_value = None
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.value, nodes.Const):
|
||||
if isinstance(stmt.value.value, str):
|
||||
inferred_return_value = repr(stmt.value.value)
|
||||
else:
|
||||
inferred_return_value = stmt.value.value
|
||||
else:
|
||||
inferred_return_value = stmt.value.as_string()
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent(
|
||||
"""
|
||||
class {name}({types}):
|
||||
@property
|
||||
def value(self):
|
||||
return {return_value}
|
||||
@property
|
||||
def name(self):
|
||||
return "{name}"
|
||||
""".format(
|
||||
name=target.name,
|
||||
types=", ".join(node.basenames),
|
||||
return_value=inferred_return_value,
|
||||
)
|
||||
)
|
||||
if "IntFlag" in basename:
|
||||
# Alright, we need to add some additional methods.
|
||||
# Unfortunately we still can't infer the resulting objects as
|
||||
# Enum members, but once we'll be able to do that, the following
|
||||
# should result in some nice symbolic execution
|
||||
classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
|
||||
|
||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instantiate_class())
|
||||
node.locals[local] = new_targets
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
def infer_typing_namedtuple_class(class_node, context=None):
|
||||
"""Infer a subclass of typing.NamedTuple"""
|
||||
# Check if it has the corresponding bases
|
||||
annassigns_fields = [
|
||||
annassign.target.name
|
||||
for annassign in class_node.body
|
||||
if isinstance(annassign, nodes.AnnAssign)
|
||||
]
|
||||
code = dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
namedtuple({typename!r}, {fields!r})
|
||||
"""
|
||||
).format(typename=class_node.name, fields=",".join(annassigns_fields))
|
||||
node = extract_node(code)
|
||||
generated_class_node = next(infer_named_tuple(node, context))
|
||||
for method in class_node.mymethods():
|
||||
generated_class_node.locals[method.name] = [method]
|
||||
|
||||
for assign in class_node.body:
|
||||
if not isinstance(assign, nodes.Assign):
|
||||
continue
|
||||
|
||||
for target in assign.targets:
|
||||
attr = target.name
|
||||
generated_class_node.locals[attr] = class_node.locals[attr]
|
||||
|
||||
return iter((generated_class_node,))
|
||||
|
||||
|
||||
def infer_typing_namedtuple(node, context=None):
|
||||
"""Infer a typing.NamedTuple(...) call."""
|
||||
# This is essentially a namedtuple with different arguments
|
||||
# so we extract the args and infer a named tuple.
|
||||
try:
|
||||
func = next(node.func.infer())
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if func.qname() != "typing.NamedTuple":
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(node.args) != 2:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
names = []
|
||||
for elt in node.args[1].elts:
|
||||
if not isinstance(elt, (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault
|
||||
names.append(elt.elts[0].as_string())
|
||||
|
||||
typename = node.args[0].as_string()
|
||||
if names:
|
||||
field_names = "({},)".format(",".join(names))
|
||||
else:
|
||||
field_names = "''"
|
||||
node = extract_node(
|
||||
"namedtuple({typename}, {fields})".format(typename=typename, fields=field_names)
|
||||
)
|
||||
return infer_named_tuple(node, context)
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
|
||||
)
|
||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
infer_enum_class,
|
||||
predicate=lambda cls: any(
|
||||
basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
|
||||
),
|
||||
)
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
|
||||
)
|
||||
MANAGER.register_transform(
|
||||
nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
|
||||
)
|
||||
77
venv/lib/python3.8/site-packages/astroid/brain/brain_nose.py
Normal file
77
venv/lib/python3.8/site-packages/astroid/brain/brain_nose.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
import astroid.builder
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
|
||||
|
||||
|
||||
def _pep8(name, caps=re.compile("([A-Z])")):
|
||||
return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
"""
|
||||
)
|
||||
)
|
||||
try:
|
||||
case = next(module["a"].infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith("assert") and "_" not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
if method.name == "assertEqual":
|
||||
# nose also exports assert_equals.
|
||||
yield "assert_equals", astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build("""__all__ = []""")
|
||||
all_entries = ["ok_", "eq_"]
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub["__all__"].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.fromnumeric module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_fromnumeric_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.fromnumeric", numpy_core_fromnumeric_transform
|
||||
)
|
||||
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.function_base module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"logspace": """def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"geomspace": """def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for func_name, func_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, func_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, func_name),
|
||||
)
|
||||
@@ -0,0 +1,92 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.multiarray module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
def numpy_core_multiarray_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different functions defined in multiarray.py
|
||||
def inner(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
def vdot(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.multiarray", numpy_core_multiarray_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"dot": """def dot(a, b, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty_like": """def empty_like(a, dtype=None, order='K', subok=True):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"concatenate": """def concatenate(arrays, axis=None, out=None):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"where": """def where(condition, x=None, y=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty": """def empty(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"bincount": """def bincount(x, weights=None, minlength=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_count": """def busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_offset": """def busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"can_cast": """def can_cast(from_, to, casting='safe'):
|
||||
return True""",
|
||||
"copyto": """def copyto(dst, src, casting='same_kind', where=True):
|
||||
return None""",
|
||||
"datetime_as_string": """def datetime_as_string(arr, unit=None, timezone='naive', casting='same_kind'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"is_busday": """def is_busday(dates, weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"lexsort": """def lexsort(keys, axis=-1):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"may_share_memory": """def may_share_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "min_scalar_type": """def min_scalar_type(a):
|
||||
# return numpy.dtype('int16')""",
|
||||
"packbits": """def packbits(a, axis=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "result_type": """def result_type(*arrays_and_dtypes):
|
||||
# return numpy.dtype('int16')""",
|
||||
"shares_memory": """def shares_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
"unpackbits": """def unpackbits(a, axis=None, count=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"unravel_index": """def unravel_index(indices, shape, order='C'):
|
||||
return (numpy.ndarray([0, 0]),)""",
|
||||
"zeros": """def zeros(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Name,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
@@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.numeric module."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
|
||||
|
||||
|
||||
def numpy_core_numeric_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different functions defined in numeric.py
|
||||
import numpy
|
||||
def zeros_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def ones_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def full_like(a, fill_value, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.numeric", numpy_core_numeric_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"ones": """def ones(shape, dtype=None, order='C'):
|
||||
return numpy.ndarray([0, 0])"""
|
||||
}
|
||||
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
@@ -0,0 +1,254 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
# TODO(hippo91) : correct the methods signature.
|
||||
|
||||
"""Astroid hooks for numpy.core.numerictypes module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_numerictypes_transform():
|
||||
# TODO: Uniformize the generic API with the ndarray one.
|
||||
# According to numpy doc the generic object should expose
|
||||
# the same API than ndarray. This has been done here partially
|
||||
# through the astype method.
|
||||
return astroid.parse(
|
||||
"""
|
||||
# different types defined in numerictypes.py
|
||||
class generic(object):
|
||||
def __init__(self, value):
|
||||
self.T = None
|
||||
self.base = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
self.flat = None
|
||||
self.imag = None
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = None
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def all(self): return uninferable
|
||||
def any(self): return uninferable
|
||||
def argmax(self): return uninferable
|
||||
def argmin(self): return uninferable
|
||||
def argsort(self): return uninferable
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def base(self): return uninferable
|
||||
def byteswap(self): return uninferable
|
||||
def choose(self): return uninferable
|
||||
def clip(self): return uninferable
|
||||
def compress(self): return uninferable
|
||||
def conj(self): return uninferable
|
||||
def conjugate(self): return uninferable
|
||||
def copy(self): return uninferable
|
||||
def cumprod(self): return uninferable
|
||||
def cumsum(self): return uninferable
|
||||
def data(self): return uninferable
|
||||
def diagonal(self): return uninferable
|
||||
def dtype(self): return uninferable
|
||||
def dump(self): return uninferable
|
||||
def dumps(self): return uninferable
|
||||
def fill(self): return uninferable
|
||||
def flags(self): return uninferable
|
||||
def flat(self): return uninferable
|
||||
def flatten(self): return uninferable
|
||||
def getfield(self): return uninferable
|
||||
def imag(self): return uninferable
|
||||
def item(self): return uninferable
|
||||
def itemset(self): return uninferable
|
||||
def itemsize(self): return uninferable
|
||||
def max(self): return uninferable
|
||||
def mean(self): return uninferable
|
||||
def min(self): return uninferable
|
||||
def nbytes(self): return uninferable
|
||||
def ndim(self): return uninferable
|
||||
def newbyteorder(self): return uninferable
|
||||
def nonzero(self): return uninferable
|
||||
def prod(self): return uninferable
|
||||
def ptp(self): return uninferable
|
||||
def put(self): return uninferable
|
||||
def ravel(self): return uninferable
|
||||
def real(self): return uninferable
|
||||
def repeat(self): return uninferable
|
||||
def reshape(self): return uninferable
|
||||
def resize(self): return uninferable
|
||||
def round(self): return uninferable
|
||||
def searchsorted(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
def setflags(self): return uninferable
|
||||
def shape(self): return uninferable
|
||||
def size(self): return uninferable
|
||||
def sort(self): return uninferable
|
||||
def squeeze(self): return uninferable
|
||||
def std(self): return uninferable
|
||||
def strides(self): return uninferable
|
||||
def sum(self): return uninferable
|
||||
def swapaxes(self): return uninferable
|
||||
def take(self): return uninferable
|
||||
def tobytes(self): return uninferable
|
||||
def tofile(self): return uninferable
|
||||
def tolist(self): return uninferable
|
||||
def tostring(self): return uninferable
|
||||
def trace(self): return uninferable
|
||||
def transpose(self): return uninferable
|
||||
def var(self): return uninferable
|
||||
def view(self): return uninferable
|
||||
|
||||
|
||||
class dtype(object):
|
||||
def __init__(self, obj, align=False, copy=False):
|
||||
self.alignment = None
|
||||
self.base = None
|
||||
self.byteorder = None
|
||||
self.char = None
|
||||
self.descr = None
|
||||
self.fields = None
|
||||
self.flags = None
|
||||
self.hasobject = None
|
||||
self.isalignedstruct = None
|
||||
self.isbuiltin = None
|
||||
self.isnative = None
|
||||
self.itemsize = None
|
||||
self.kind = None
|
||||
self.metadata = None
|
||||
self.name = None
|
||||
self.names = None
|
||||
self.num = None
|
||||
self.shape = None
|
||||
self.str = None
|
||||
self.subdtype = None
|
||||
self.type = None
|
||||
|
||||
def newbyteorder(self, new_order='S'): return uninferable
|
||||
def __neg__(self): return uninferable
|
||||
|
||||
class busdaycalendar(object):
|
||||
def __init__(self, weekmask='1111100', holidays=None):
|
||||
self.holidays = None
|
||||
self.weekmask = None
|
||||
|
||||
class flexible(generic): pass
|
||||
class bool_(generic): pass
|
||||
class number(generic):
|
||||
def __neg__(self): return uninferable
|
||||
class datetime64(generic):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
|
||||
|
||||
class void(flexible):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.base = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
def getfield(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
|
||||
|
||||
class character(flexible): pass
|
||||
|
||||
|
||||
class integer(number):
|
||||
def __init__(self, value):
|
||||
self.denominator = None
|
||||
self.numerator = None
|
||||
|
||||
|
||||
class inexact(number): pass
|
||||
|
||||
|
||||
class str_(str, character):
|
||||
def maketrans(self, x, y=None, z=None): return uninferable
|
||||
|
||||
|
||||
class bytes_(bytes, character):
|
||||
def fromhex(self, string): return uninferable
|
||||
def maketrans(self, frm, to): return uninferable
|
||||
|
||||
|
||||
class signedinteger(integer): pass
|
||||
|
||||
|
||||
class unsignedinteger(integer): pass
|
||||
|
||||
|
||||
class complexfloating(inexact): pass
|
||||
|
||||
|
||||
class floating(inexact): pass
|
||||
|
||||
|
||||
class float64(floating, float):
|
||||
def fromhex(self, string): return uninferable
|
||||
|
||||
|
||||
class uint64(unsignedinteger): pass
|
||||
class complex64(complexfloating): pass
|
||||
class int16(signedinteger): pass
|
||||
class float96(floating): pass
|
||||
class int8(signedinteger): pass
|
||||
class uint32(unsignedinteger): pass
|
||||
class uint8(unsignedinteger): pass
|
||||
class _typedict(dict): pass
|
||||
class complex192(complexfloating): pass
|
||||
class timedelta64(signedinteger):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
class int32(signedinteger): pass
|
||||
class uint16(unsignedinteger): pass
|
||||
class float32(floating): pass
|
||||
class complex128(complexfloating, complex): pass
|
||||
class float16(floating): pass
|
||||
class int64(signedinteger): pass
|
||||
|
||||
buffer_type = memoryview
|
||||
bool8 = bool_
|
||||
byte = int8
|
||||
bytes0 = bytes_
|
||||
cdouble = complex128
|
||||
cfloat = complex128
|
||||
clongdouble = complex192
|
||||
clongfloat = complex192
|
||||
complex_ = complex128
|
||||
csingle = complex64
|
||||
double = float64
|
||||
float_ = float64
|
||||
half = float16
|
||||
int0 = int32
|
||||
int_ = int32
|
||||
intc = int32
|
||||
intp = int32
|
||||
long = int32
|
||||
longcomplex = complex192
|
||||
longdouble = float96
|
||||
longfloat = float96
|
||||
longlong = int64
|
||||
object0 = object_
|
||||
object_ = object_
|
||||
short = int16
|
||||
single = float32
|
||||
singlecomplex = complex64
|
||||
str0 = str_
|
||||
string_ = bytes_
|
||||
ubyte = uint8
|
||||
uint = uint32
|
||||
uint0 = uint32
|
||||
uintc = uint32
|
||||
uintp = uint32
|
||||
ulonglong = uint64
|
||||
unicode = str_
|
||||
unicode_ = str_
|
||||
ushort = uint16
|
||||
void0 = void
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
|
||||
)
|
||||
@@ -0,0 +1,147 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.umath module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_core_umath_transform():
|
||||
ufunc_optional_keyword_arguments = (
|
||||
"""out=None, where=True, casting='same_kind', order='K', """
|
||||
"""dtype=None, subok=True"""
|
||||
)
|
||||
return astroid.parse(
|
||||
"""
|
||||
class FakeUfunc:
|
||||
def __init__(self):
|
||||
self.__doc__ = str()
|
||||
self.__name__ = str()
|
||||
self.nin = 0
|
||||
self.nout = 0
|
||||
self.nargs = 0
|
||||
self.ntypes = 0
|
||||
self.types = None
|
||||
self.identity = None
|
||||
self.signature = None
|
||||
|
||||
@classmethod
|
||||
def reduce(cls, a, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def accumulate(cls, array, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def reduceat(cls, a, indices, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def outer(cls, A, B, **kwargs):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def at(cls, a, indices, b=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArg(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArgBis(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0]), numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncTwoArgs(FakeUfunc):
|
||||
def __call__(self, x1, x2, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
# Constants
|
||||
e = 2.718281828459045
|
||||
euler_gamma = 0.5772156649015329
|
||||
|
||||
# One arg functions with optional kwargs
|
||||
arccos = FakeUfuncOneArg()
|
||||
arccosh = FakeUfuncOneArg()
|
||||
arcsin = FakeUfuncOneArg()
|
||||
arcsinh = FakeUfuncOneArg()
|
||||
arctan = FakeUfuncOneArg()
|
||||
arctanh = FakeUfuncOneArg()
|
||||
cbrt = FakeUfuncOneArg()
|
||||
conj = FakeUfuncOneArg()
|
||||
conjugate = FakeUfuncOneArg()
|
||||
cosh = FakeUfuncOneArg()
|
||||
deg2rad = FakeUfuncOneArg()
|
||||
exp2 = FakeUfuncOneArg()
|
||||
expm1 = FakeUfuncOneArg()
|
||||
fabs = FakeUfuncOneArg()
|
||||
frexp = FakeUfuncOneArgBis()
|
||||
isfinite = FakeUfuncOneArg()
|
||||
isinf = FakeUfuncOneArg()
|
||||
log = FakeUfuncOneArg()
|
||||
log1p = FakeUfuncOneArg()
|
||||
log2 = FakeUfuncOneArg()
|
||||
logical_not = FakeUfuncOneArg()
|
||||
modf = FakeUfuncOneArgBis()
|
||||
negative = FakeUfuncOneArg()
|
||||
positive = FakeUfuncOneArg()
|
||||
rad2deg = FakeUfuncOneArg()
|
||||
reciprocal = FakeUfuncOneArg()
|
||||
rint = FakeUfuncOneArg()
|
||||
sign = FakeUfuncOneArg()
|
||||
signbit = FakeUfuncOneArg()
|
||||
sinh = FakeUfuncOneArg()
|
||||
spacing = FakeUfuncOneArg()
|
||||
square = FakeUfuncOneArg()
|
||||
tan = FakeUfuncOneArg()
|
||||
tanh = FakeUfuncOneArg()
|
||||
trunc = FakeUfuncOneArg()
|
||||
|
||||
# Two args functions with optional kwargs
|
||||
bitwise_and = FakeUfuncTwoArgs()
|
||||
bitwise_or = FakeUfuncTwoArgs()
|
||||
bitwise_xor = FakeUfuncTwoArgs()
|
||||
copysign = FakeUfuncTwoArgs()
|
||||
divide = FakeUfuncTwoArgs()
|
||||
divmod = FakeUfuncTwoArgs()
|
||||
equal = FakeUfuncTwoArgs()
|
||||
float_power = FakeUfuncTwoArgs()
|
||||
floor_divide = FakeUfuncTwoArgs()
|
||||
fmax = FakeUfuncTwoArgs()
|
||||
fmin = FakeUfuncTwoArgs()
|
||||
fmod = FakeUfuncTwoArgs()
|
||||
greater = FakeUfuncTwoArgs()
|
||||
gcd = FakeUfuncTwoArgs()
|
||||
hypot = FakeUfuncTwoArgs()
|
||||
heaviside = FakeUfuncTwoArgs()
|
||||
lcm = FakeUfuncTwoArgs()
|
||||
ldexp = FakeUfuncTwoArgs()
|
||||
left_shift = FakeUfuncTwoArgs()
|
||||
less = FakeUfuncTwoArgs()
|
||||
logaddexp = FakeUfuncTwoArgs()
|
||||
logaddexp2 = FakeUfuncTwoArgs()
|
||||
logical_and = FakeUfuncTwoArgs()
|
||||
logical_or = FakeUfuncTwoArgs()
|
||||
logical_xor = FakeUfuncTwoArgs()
|
||||
maximum = FakeUfuncTwoArgs()
|
||||
minimum = FakeUfuncTwoArgs()
|
||||
nextafter = FakeUfuncTwoArgs()
|
||||
not_equal = FakeUfuncTwoArgs()
|
||||
power = FakeUfuncTwoArgs()
|
||||
remainder = FakeUfuncTwoArgs()
|
||||
right_shift = FakeUfuncTwoArgs()
|
||||
subtract = FakeUfuncTwoArgs()
|
||||
true_divide = FakeUfuncTwoArgs()
|
||||
""".format(
|
||||
opt_args=ufunc_optional_keyword_arguments
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
|
||||
)
|
||||
@@ -0,0 +1,153 @@
|
||||
# Copyright (c) 2015-2016, 2018-2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy ndarray class."""
|
||||
|
||||
import functools
|
||||
import astroid
|
||||
|
||||
|
||||
def infer_numpy_ndarray(node, context=None):
|
||||
ndarray = """
|
||||
class ndarray(object):
|
||||
def __init__(self, shape, dtype=float, buffer=None, offset=0,
|
||||
strides=None, order=None):
|
||||
self.T = None
|
||||
self.base = None
|
||||
self.ctypes = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
self.flat = None
|
||||
self.imag = np.ndarray([0, 0])
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = np.ndarray([0, 0])
|
||||
self.shape = numpy.ndarray([0, 0])
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def __abs__(self): return numpy.ndarray([0, 0])
|
||||
def __add__(self, value): return numpy.ndarray([0, 0])
|
||||
def __and__(self, value): return numpy.ndarray([0, 0])
|
||||
def __array__(self, dtype=None): return numpy.ndarray([0, 0])
|
||||
def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
|
||||
def __contains__(self, key): return True
|
||||
def __copy__(self): return numpy.ndarray([0, 0])
|
||||
def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
|
||||
def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
|
||||
def __eq__(self, value): return numpy.ndarray([0, 0])
|
||||
def __float__(self): return 0.
|
||||
def __floordiv__(self): return numpy.ndarray([0, 0])
|
||||
def __ge__(self, value): return numpy.ndarray([0, 0])
|
||||
def __getitem__(self, key): return uninferable
|
||||
def __gt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iadd__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iand__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ilshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __int__(self): return 0
|
||||
def __invert__(self): return numpy.ndarray([0, 0])
|
||||
def __ior__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ipow__(self, value): return numpy.ndarray([0, 0])
|
||||
def __irshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __isub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __itruediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ixor__(self, value): return numpy.ndarray([0, 0])
|
||||
def __le__(self, value): return numpy.ndarray([0, 0])
|
||||
def __len__(self): return 1
|
||||
def __lshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __lt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __matmul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ne__(self, value): return numpy.ndarray([0, 0])
|
||||
def __neg__(self): return numpy.ndarray([0, 0])
|
||||
def __or__(self): return numpy.ndarray([0, 0])
|
||||
def __pos__(self): return numpy.ndarray([0, 0])
|
||||
def __pow__(self): return numpy.ndarray([0, 0])
|
||||
def __repr__(self): return str()
|
||||
def __rshift__(self): return numpy.ndarray([0, 0])
|
||||
def __setitem__(self, key, value): return uninferable
|
||||
def __str__(self): return str()
|
||||
def __sub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __truediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __xor__(self, value): return numpy.ndarray([0, 0])
|
||||
def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
|
||||
def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def byteswap(self, inplace=False): return np.ndarray([0, 0])
|
||||
def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
|
||||
def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def conj(self): return np.ndarray([0, 0])
|
||||
def conjugate(self): return np.ndarray([0, 0])
|
||||
def copy(self, order='C'): return np.ndarray([0, 0])
|
||||
def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
|
||||
def dot(self, b, out=None): return np.ndarray([0, 0])
|
||||
def dump(self, file): return None
|
||||
def dumps(self): return str()
|
||||
def fill(self, value): return None
|
||||
def flatten(self, order='C'): return np.ndarray([0, 0])
|
||||
def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
|
||||
def item(self, *args): return uninferable
|
||||
def itemset(self, *args): return None
|
||||
def max(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
|
||||
def nonzero(self): return (1,)
|
||||
def partition(self, kth, axis=-1, kind='introselect', order=None): return None
|
||||
def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def put(self, indices, values, mode='raise'): return None
|
||||
def ravel(self, order='C'): return np.ndarray([0, 0])
|
||||
def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
|
||||
def reshape(self, shape, order='C'): return np.ndarray([0, 0])
|
||||
def resize(self, new_shape, refcheck=True): return None
|
||||
def round(self, decimals=0, out=None): return np.ndarray([0, 0])
|
||||
def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
|
||||
def setfield(self, val, dtype, offset=0): return None
|
||||
def setflags(self, write=None, align=None, uic=None): return None
|
||||
def sort(self, axis=-1, kind='quicksort', order=None): return None
|
||||
def squeeze(self, axis=None): return np.ndarray([0, 0])
|
||||
def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
|
||||
def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def tobytes(self, order='C'): return b''
|
||||
def tofile(self, fid, sep="", format="%s"): return None
|
||||
def tolist(self, ): return []
|
||||
def tostring(self, order='C'): return b''
|
||||
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def transpose(self, *axes): return np.ndarray([0, 0])
|
||||
def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def view(self, dtype=None, type=None): return np.ndarray([0, 0])
|
||||
"""
|
||||
node = astroid.extract_node(ndarray)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _looks_like_numpy_ndarray(node):
|
||||
return isinstance(node, astroid.Attribute) and node.attrname == "ndarray"
|
||||
|
||||
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.Attribute,
|
||||
astroid.inference_tip(infer_numpy_ndarray),
|
||||
_looks_like_numpy_ndarray,
|
||||
)
|
||||
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
# TODO(hippo91) : correct the functions return types
|
||||
"""Astroid hooks for numpy.random.mtrand module."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_random_mtrand_transform():
|
||||
return astroid.parse(
|
||||
"""
|
||||
def beta(a, b, size=None): return uninferable
|
||||
def binomial(n, p, size=None): return uninferable
|
||||
def bytes(length): return uninferable
|
||||
def chisquare(df, size=None): return uninferable
|
||||
def choice(a, size=None, replace=True, p=None): return uninferable
|
||||
def dirichlet(alpha, size=None): return uninferable
|
||||
def exponential(scale=1.0, size=None): return uninferable
|
||||
def f(dfnum, dfden, size=None): return uninferable
|
||||
def gamma(shape, scale=1.0, size=None): return uninferable
|
||||
def geometric(p, size=None): return uninferable
|
||||
def get_state(): return uninferable
|
||||
def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
|
||||
def laplace(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def logistic(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
|
||||
def logseries(p, size=None): return uninferable
|
||||
def multinomial(n, pvals, size=None): return uninferable
|
||||
def multivariate_normal(mean, cov, size=None): return uninferable
|
||||
def negative_binomial(n, p, size=None): return uninferable
|
||||
def noncentral_chisquare(df, nonc, size=None): return uninferable
|
||||
def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
|
||||
def normal(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def pareto(a, size=None): return uninferable
|
||||
def permutation(x): return uninferable
|
||||
def poisson(lam=1.0, size=None): return uninferable
|
||||
def power(a, size=None): return uninferable
|
||||
def rand(*args): return uninferable
|
||||
def randint(low, high=None, size=None, dtype='l'):
|
||||
import numpy
|
||||
return numpy.ndarray((1,1))
|
||||
def randn(*args): return uninferable
|
||||
def random_integers(low, high=None, size=None): return uninferable
|
||||
def random_sample(size=None): return uninferable
|
||||
def rayleigh(scale=1.0, size=None): return uninferable
|
||||
def seed(seed=None): return uninferable
|
||||
def set_state(state): return uninferable
|
||||
def shuffle(x): return uninferable
|
||||
def standard_cauchy(size=None): return uninferable
|
||||
def standard_exponential(size=None): return uninferable
|
||||
def standard_gamma(shape, size=None): return uninferable
|
||||
def standard_normal(size=None): return uninferable
|
||||
def standard_t(df, size=None): return uninferable
|
||||
def triangular(left, mode, right, size=None): return uninferable
|
||||
def uniform(low=0.0, high=1.0, size=None): return uninferable
|
||||
def vonmises(mu, kappa, size=None): return uninferable
|
||||
def wald(mean, scale, size=None): return uninferable
|
||||
def weibull(a, size=None): return uninferable
|
||||
def zipf(a, size=None): return uninferable
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
astroid.register_module_extender(
|
||||
astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
|
||||
)
|
||||
@@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2019 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Different utilities for the numpy brains"""
|
||||
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def infer_numpy_member(src, node, context=None):
|
||||
node = astroid.extract_node(src)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _is_a_numpy_module(node: astroid.node_classes.Name) -> bool:
|
||||
"""
|
||||
Returns True if the node is a representation of a numpy module.
|
||||
|
||||
For example in :
|
||||
import numpy as np
|
||||
x = np.linspace(1, 2)
|
||||
The node <Name.np> is a representation of the numpy module.
|
||||
|
||||
:param node: node to test
|
||||
:return: True if the node is a representation of the numpy module.
|
||||
"""
|
||||
module_nickname = node.name
|
||||
potential_import_target = [
|
||||
x for x in node.lookup(module_nickname)[1] if isinstance(x, astroid.Import)
|
||||
]
|
||||
for target in potential_import_target:
|
||||
if ("numpy", module_nickname) in target.names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def looks_like_numpy_member(
|
||||
member_name: str, node: astroid.node_classes.NodeNG
|
||||
) -> bool:
|
||||
"""
|
||||
Returns True if the node is a member of numpy whose
|
||||
name is member_name.
|
||||
|
||||
:param member_name: name of the member
|
||||
:param node: node to test
|
||||
:return: True if the node is a member of numpy
|
||||
"""
|
||||
if (
|
||||
isinstance(node, astroid.Attribute)
|
||||
and node.attrname == member_name
|
||||
and isinstance(node.expr, astroid.Name)
|
||||
and _is_a_numpy_module(node.expr)
|
||||
):
|
||||
return True
|
||||
if (
|
||||
isinstance(node, astroid.Name)
|
||||
and node.name == member_name
|
||||
and node.root().name.startswith("numpy")
|
||||
):
|
||||
return True
|
||||
return False
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user