summaryrefslogtreecommitdiff
path: root/astroid/inference.py
diff options
context:
space:
mode:
authorMarc Mueller <30130371+cdce8p@users.noreply.github.com>2022-06-25 15:15:35 +0200
committerGitHub <noreply@github.com>2022-06-25 15:15:35 +0200
commit257f4e776646521995a8d2b4b500543a6c0337c0 (patch)
tree1a3ff40cc0d81ca25f00af0be0c64c9a0b2622a9 /astroid/inference.py
parent8e7174a99a9b9dde848632f99b0498f8d67999f3 (diff)
downloadastroid-git-257f4e776646521995a8d2b4b500543a6c0337c0.tar.gz
Revert "Type ``_infer`` of Attribute, AssignAttr, Global and Subscript (#1657)" (#1666)
This reverts commit 90362ad816c0e3ac586519edb5bc9d9cd7b384cb.
Diffstat (limited to 'astroid/inference.py')
-rw-r--r--astroid/inference.py49
1 files changed, 25 insertions, 24 deletions
diff --git a/astroid/inference.py b/astroid/inference.py
index de78e303..9fe0aaec 100644
--- a/astroid/inference.py
+++ b/astroid/inference.py
@@ -277,7 +277,8 @@ def infer_import(
**kwargs: Any,
) -> Generator[nodes.Module, None, None]:
"""infer an Import node: return the imported module/object"""
- context = context or InferenceContext()
+ if not context:
+ raise InferenceError(node=self, context=context)
name = context.lookupname
if name is None:
raise InferenceError(node=self, context=context)
@@ -303,7 +304,8 @@ def infer_import_from(
**kwargs: Any,
) -> Generator[InferenceResult, None, None]:
"""infer a ImportFrom node: return the imported module/object"""
- context = context or InferenceContext()
+ if not context:
+ raise InferenceError(node=self, context=context)
name = context.lookupname
if name is None:
raise InferenceError(node=self, context=context)
@@ -332,19 +334,18 @@ def infer_import_from(
nodes.ImportFrom._infer = infer_import_from # type: ignore[assignment]
-@decorators.raise_if_nothing_inferred
-def infer_attribute(
- self: nodes.Attribute | nodes.AssignAttr,
- context: InferenceContext | None = None,
- **kwargs: Any,
-) -> Generator[InferenceResult, None, InferenceErrorInfo]:
+def infer_attribute(self, context=None):
"""infer an Attribute node by using getattr on the associated object"""
for owner in self.expr.infer(context):
if owner is util.Uninferable:
yield owner
continue
- context = copy_context(context)
+ if not context:
+ context = InferenceContext()
+ else:
+ context = copy_context(context)
+
old_boundnode = context.boundnode
try:
context.boundnode = owner
@@ -357,20 +358,20 @@ def infer_attribute(
pass
finally:
context.boundnode = old_boundnode
- return InferenceErrorInfo(node=self, context=context)
+ return dict(node=self, context=context)
-nodes.Attribute._infer = decorators.path_wrapper(infer_attribute) # type: ignore[assignment]
+nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
+ decorators.path_wrapper(infer_attribute)
+)
# won't work with a path wrapper
-nodes.AssignAttr.infer_lhs = infer_attribute
+nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute)
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
-def infer_global(
- self: nodes.Global, context: InferenceContext | None = None, **kwargs: Any
-) -> Generator[InferenceResult, None, None]:
- if context is None or context.lookupname is None:
+def infer_global(self, context=None):
+ if context.lookupname is None:
raise InferenceError(node=self, context=context)
try:
return bases._infer_stmts(self.root().getattr(context.lookupname), context)
@@ -386,10 +387,7 @@ nodes.Global._infer = infer_global # type: ignore[assignment]
_SUBSCRIPT_SENTINEL = object()
-@decorators.raise_if_nothing_inferred
-def infer_subscript(
- self: nodes.Subscript, context: InferenceContext | None = None, **kwargs: Any
-) -> Generator[InferenceResult, None, InferenceErrorInfo | None]:
+def infer_subscript(self, context=None):
"""Inference for subscripts
We're understanding if the index is a Const
@@ -441,12 +439,14 @@ def infer_subscript(
found_one = True
if found_one:
- return InferenceErrorInfo(node=self, context=context)
+ return dict(node=self, context=context)
return None
-nodes.Subscript._infer = decorators.path_wrapper(infer_subscript) # type: ignore[assignment]
-nodes.Subscript.infer_lhs = infer_subscript
+nodes.Subscript._infer = decorators.raise_if_nothing_inferred( # type: ignore[assignment]
+ decorators.path_wrapper(infer_subscript)
+)
+nodes.Subscript.infer_lhs = decorators.raise_if_nothing_inferred(infer_subscript)
@decorators.raise_if_nothing_inferred
@@ -981,7 +981,8 @@ def _infer_augassign(
self: nodes.AugAssign, context: InferenceContext | None = None
) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]:
"""Inference logic for augmented binary operations."""
- context = context or InferenceContext()
+ if context is None:
+ context = InferenceContext()
rhs_context = context.clone()