summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnthony Sottile <asottile@umich.edu>2022-09-16 00:14:09 -0400
committerGitHub <noreply@github.com>2022-09-16 00:14:09 -0400
commit04ecb0c324ef3b61124e2f80f9e1af6c3a4c7b26 (patch)
tree99b73ea74e99a27ab09f59ea4b42f06c7f850749
parent4a2407d7aa0834c37c9fe97e2db37fa8d01caa5b (diff)
downloadpyflakes-04ecb0c324ef3b61124e2f80f9e1af6c3a4c7b26.tar.gz
remove handling of `# type:` comments now that pyflakes is py3+ (#684)
-rw-r--r--pyflakes/api.py3
-rw-r--r--pyflakes/checker.py108
-rw-r--r--pyflakes/messages.py8
-rw-r--r--pyflakes/test/harness.py5
-rw-r--r--pyflakes/test/test_checker.py184
-rw-r--r--pyflakes/test/test_type_annotations.py149
-rw-r--r--pyflakes/test/test_undefined_names.py3
7 files changed, 30 insertions, 430 deletions
diff --git a/pyflakes/api.py b/pyflakes/api.py
index b701ec5..cd4d943 100644
--- a/pyflakes/api.py
+++ b/pyflakes/api.py
@@ -44,8 +44,7 @@ def check(codeString, filename, reporter=None):
reporter.unexpectedError(filename, 'problem decoding source')
return 1
# Okay, it's syntactically valid. Now check it.
- file_tokens = checker.make_tokens(codeString)
- w = checker.Checker(tree, file_tokens=file_tokens, filename=filename)
+ w = checker.Checker(tree, filename=filename)
w.messages.sort(key=lambda m: m.lineno)
for warning in w.messages:
reporter.flake(warning)
diff --git a/pyflakes/checker.py b/pyflakes/checker.py
index 89c9d0a..29d15d3 100644
--- a/pyflakes/checker.py
+++ b/pyflakes/checker.py
@@ -7,8 +7,6 @@ Also, it models the Bindings and Scopes.
import __future__
import builtins
import ast
-import bisect
-import collections
import contextlib
import doctest
import functools
@@ -16,7 +14,7 @@ import os
import re
import string
import sys
-import tokenize
+import warnings
from pyflakes import messages
@@ -78,16 +76,6 @@ def _is_name_or_attr(node, name): # type: (ast.AST, str) -> bool
)
-# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L102-L104
-TYPE_COMMENT_RE = re.compile(r'^#\s*type:\s*')
-# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L1408-L1413
-ASCII_NON_ALNUM = ''.join([chr(i) for i in range(128) if not chr(i).isalnum()])
-TYPE_IGNORE_RE = re.compile(
- TYPE_COMMENT_RE.pattern + fr'ignore([{ASCII_NON_ALNUM}]|$)')
-# https://github.com/python/typed_ast/blob/1.4.0/ast27/Grammar/Grammar#L147
-TYPE_FUNC_RE = re.compile(r'^(\(.*?\))\s*->\s*(.*)$')
-
-
MAPPING_KEY_RE = re.compile(r'\(([^()]*)\)')
CONVERSION_FLAG_RE = re.compile('[#0+ -]*')
WIDTH_RE = re.compile(r'(?:\*|\d*)')
@@ -623,13 +611,6 @@ class DoctestScope(ModuleScope):
"""Scope for a doctest."""
-class DummyNode:
- """Used in place of an `ast.AST` to set error message positions"""
- def __init__(self, lineno, col_offset):
- self.lineno = lineno
- self.col_offset = col_offset
-
-
class DetectClassScopedMagic:
names = dir()
@@ -749,63 +730,6 @@ def in_string_annotation(func):
return in_annotation_func
-def make_tokens(code):
- # PY3: tokenize.tokenize requires readline of bytes
- if not isinstance(code, bytes):
- code = code.encode('UTF-8')
- lines = iter(code.splitlines(True))
- # next(lines, b'') is to prevent an error in pypy3
- return tuple(tokenize.tokenize(lambda: next(lines, b'')))
-
-
-class _TypeableVisitor(ast.NodeVisitor):
- """Collect the line number and nodes which are deemed typeable by
- PEP 484
-
- https://www.python.org/dev/peps/pep-0484/#type-comments
- """
- def __init__(self):
- self.typeable_lines = []
- self.typeable_nodes = {}
-
- def _typeable(self, node):
- # if there is more than one typeable thing on a line last one wins
- self.typeable_lines.append(node.lineno)
- self.typeable_nodes[node.lineno] = node
-
- self.generic_visit(node)
-
- visit_Assign = visit_For = visit_FunctionDef = visit_With = _typeable
- visit_AsyncFor = visit_AsyncFunctionDef = visit_AsyncWith = _typeable
-
-
-def _collect_type_comments(tree, tokens):
- visitor = _TypeableVisitor()
- visitor.visit(tree)
-
- type_comments = collections.defaultdict(list)
- for tp, text, start, _, _ in tokens:
- if (
- tp != tokenize.COMMENT or # skip non comments
- not TYPE_COMMENT_RE.match(text) or # skip non-type comments
- TYPE_IGNORE_RE.match(text) # skip ignores
- ):
- continue
-
- # search for the typeable node at or before the line number of the
- # type comment.
- # if the bisection insertion point is before any nodes this is an
- # invalid type comment which is ignored.
- lineno, _ = start
- idx = bisect.bisect_right(visitor.typeable_lines, lineno)
- if idx == 0:
- continue
- node = visitor.typeable_nodes[visitor.typeable_lines[idx - 1]]
- type_comments[node].append((start, text))
-
- return type_comments
-
-
class Checker:
"""
I check the cleanliness and sanity of Python code.
@@ -842,9 +766,6 @@ class Checker:
builtIns.update(_customBuiltIns.split(','))
del _customBuiltIns
- # TODO: file_tokens= is required to perform checks on type comments,
- # eventually make this a required positional argument. For now it
- # is defaulted to `()` for api compatibility.
def __init__(self, tree, filename='(none)', builtins=None,
withDoctest='PYFLAKES_DOCTEST' in os.environ, file_tokens=()):
self._nodeHandlers = {}
@@ -862,7 +783,6 @@ class Checker:
raise RuntimeError('No scope implemented for the node %r' % tree)
self.exceptHandlers = [()]
self.root = tree
- self._type_comments = _collect_type_comments(tree, file_tokens)
for builtin in self.builtIns:
self.addBinding(None, Builtin(builtin))
self.handleChildren(tree)
@@ -879,6 +799,12 @@ class Checker:
self.popScope()
self.checkDeadScopes()
+ if file_tokens:
+ warnings.warn(
+ '`file_tokens` will be removed in a future version',
+ stacklevel=2,
+ )
+
def deferFunction(self, callable):
"""
Schedule a function handler to be called just before completion.
@@ -1308,27 +1234,7 @@ class Checker:
self.annotationsFutureEnabled
)
- def _handle_type_comments(self, node):
- for (lineno, col_offset), comment in self._type_comments.get(node, ()):
- comment = comment.split(':', 1)[1].strip()
- func_match = TYPE_FUNC_RE.match(comment)
- if func_match:
- parts = (
- func_match.group(1).replace('*', ''),
- func_match.group(2).strip(),
- )
- else:
- parts = (comment,)
-
- for part in parts:
- self.deferFunction(functools.partial(
- self.handleStringAnnotation,
- part, DummyNode(lineno, col_offset), lineno, col_offset,
- messages.CommentAnnotationSyntaxError,
- ))
-
def handleChildren(self, tree, omit=None):
- self._handle_type_comments(tree)
for node in iter_child_nodes(tree, omit=omit):
self.handleNode(node, tree)
diff --git a/pyflakes/messages.py b/pyflakes/messages.py
index c2246cf..f45fd46 100644
--- a/pyflakes/messages.py
+++ b/pyflakes/messages.py
@@ -248,14 +248,6 @@ class ForwardAnnotationSyntaxError(Message):
self.message_args = (annotation,)
-class CommentAnnotationSyntaxError(Message):
- message = 'syntax error in type comment %r'
-
- def __init__(self, filename, loc, annotation):
- Message.__init__(self, filename, loc)
- self.message_args = (annotation,)
-
-
class RaiseNotImplemented(Message):
message = "'raise NotImplemented' should be 'raise NotImplementedError'"
diff --git a/pyflakes/test/harness.py b/pyflakes/test/harness.py
index 9bcc0bb..863921e 100644
--- a/pyflakes/test/harness.py
+++ b/pyflakes/test/harness.py
@@ -16,13 +16,10 @@ class TestCase(unittest.TestCase):
def flakes(self, input, *expectedOutputs, **kw):
tree = ast.parse(textwrap.dedent(input))
- file_tokens = checker.make_tokens(textwrap.dedent(input))
if kw.get('is_segment'):
tree = tree.body[0]
kw.pop('is_segment')
- w = checker.Checker(
- tree, file_tokens=file_tokens, withDoctest=self.withDoctest, **kw
- )
+ w = checker.Checker(tree, withDoctest=self.withDoctest, **kw)
outputs = [type(o) for o in w.messages]
expectedOutputs = list(expectedOutputs)
outputs.sort(key=lambda t: t.__name__)
diff --git a/pyflakes/test/test_checker.py b/pyflakes/test/test_checker.py
deleted file mode 100644
index 66e3501..0000000
--- a/pyflakes/test/test_checker.py
+++ /dev/null
@@ -1,184 +0,0 @@
-import ast
-
-from pyflakes import checker
-from pyflakes.test.harness import TestCase
-
-
-class TypeableVisitorTests(TestCase):
- """
- Tests of L{_TypeableVisitor}
- """
-
- @staticmethod
- def _run_visitor(s):
- """
- Run L{_TypeableVisitor} on the parsed source and return the visitor.
- """
- tree = ast.parse(s)
- visitor = checker._TypeableVisitor()
- visitor.visit(tree)
- return visitor
-
- def test_node_types(self):
- """
- Test that the typeable node types are collected
- """
- visitor = self._run_visitor(
- """\
-x = 1 # assignment
-for x in range(1): pass # for loop
-def f(): pass # function definition
-with a as b: pass # with statement
-"""
- )
- self.assertEqual(visitor.typeable_lines, [1, 2, 3, 4])
- self.assertIsInstance(visitor.typeable_nodes[1], ast.Assign)
- self.assertIsInstance(visitor.typeable_nodes[2], ast.For)
- self.assertIsInstance(visitor.typeable_nodes[3], ast.FunctionDef)
- self.assertIsInstance(visitor.typeable_nodes[4], ast.With)
-
- def test_visitor_recurses(self):
- """
- Test the common pitfall of missing `generic_visit` in visitors by
- ensuring that nested nodes are reported
- """
- visitor = self._run_visitor(
- """\
-def f():
- x = 1
-"""
- )
- self.assertEqual(visitor.typeable_lines, [1, 2])
- self.assertIsInstance(visitor.typeable_nodes[1], ast.FunctionDef)
- self.assertIsInstance(visitor.typeable_nodes[2], ast.Assign)
-
- def test_py35_node_types(self):
- """
- Test that the PEP 492 node types are collected
- """
- visitor = self._run_visitor(
- """\
-async def f(): # async def
- async for x in y: pass # async for
- async with a as b: pass # async with
-"""
- )
- self.assertEqual(visitor.typeable_lines, [1, 2, 3])
- self.assertIsInstance(visitor.typeable_nodes[1], ast.AsyncFunctionDef)
- self.assertIsInstance(visitor.typeable_nodes[2], ast.AsyncFor)
- self.assertIsInstance(visitor.typeable_nodes[3], ast.AsyncWith)
-
- def test_last_node_wins(self):
- """
- Test that when two typeable nodes are present on a line, the last
- typeable one wins.
- """
- visitor = self._run_visitor('x = 1; y = 1')
- # detected both assignable nodes
- self.assertEqual(visitor.typeable_lines, [1, 1])
- # but the assignment to `y` wins
- self.assertEqual(visitor.typeable_nodes[1].targets[0].id, 'y')
-
-
-class CollectTypeCommentsTests(TestCase):
- """
- Tests of L{_collect_type_comments}
- """
-
- @staticmethod
- def _collect(s):
- """
- Run L{_collect_type_comments} on the parsed source and return the
- mapping from nodes to comments. The return value is converted to
- a set: {(node_type, tuple of comments), ...}
- """
- tree = ast.parse(s)
- tokens = checker.make_tokens(s)
- ret = checker._collect_type_comments(tree, tokens)
- return {(type(k), tuple(s for _, s in v)) for k, v in ret.items()}
-
- def test_bytes(self):
- """
- Test that the function works for binary source
- """
- ret = self._collect(b'x = 1 # type: int')
- self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
-
- def test_text(self):
- """
- Test that the function works for text source
- """
- ret = self._collect('x = 1 # type: int')
- self.assertEqual(ret, {(ast.Assign, ('# type: int',))})
-
- def test_non_type_comment_ignored(self):
- """
- Test that a non-type comment is ignored
- """
- ret = self._collect('x = 1 # noqa')
- self.assertSetEqual(ret, set())
-
- def test_type_comment_before_typeable(self):
- """
- Test that a type comment before something typeable is ignored.
- """
- ret = self._collect('# type: int\nx = 1')
- self.assertSetEqual(ret, set())
-
- def test_type_ignore_comment_ignored(self):
- """
- Test that `# type: ignore` comments are not collected.
- """
- ret = self._collect('x = 1 # type: ignore')
- self.assertSetEqual(ret, set())
-
- def test_type_ignore_with_other_things_ignored(self):
- """
- Test that `# type: ignore` comments with more content are also not
- collected.
- """
- ret = self._collect('x = 1 # type: ignore # noqa')
- self.assertSetEqual(ret, set())
- ret = self._collect('x = 1 #type:ignore#noqa')
- self.assertSetEqual(ret, set())
-
- def test_type_comment_with_extra_still_collected(self):
- ret = self._collect('x = 1 # type: int # noqa')
- self.assertSetEqual(ret, {(ast.Assign, ('# type: int # noqa',))})
-
- def test_type_comment_without_whitespace(self):
- ret = self._collect('x = 1 #type:int')
- self.assertSetEqual(ret, {(ast.Assign, ('#type:int',))})
-
- def test_type_comment_starts_with_word_ignore(self):
- ret = self._collect('x = 1 # type: ignore[T]')
- self.assertSetEqual(ret, set())
-
- def test_last_node_wins(self):
- """
- Test that when two typeable nodes are present on a line, the last
- typeable one wins.
- """
- ret = self._collect('def f(): x = 1 # type: int')
- self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
-
- def test_function_def_assigned_comments(self):
- """
- Test that type comments for function arguments are all attributed to
- the function definition.
- """
- ret = self._collect(
- """\
-def f(
- a, # type: int
- b, # type: str
-):
- # type: (...) -> None
- pass
-"""
- )
- expected = {(
- ast.FunctionDef,
- ('# type: int', '# type: str', '# type: (...) -> None'),
- )}
- self.assertSetEqual(ret, expected)
diff --git a/pyflakes/test/test_type_annotations.py b/pyflakes/test/test_type_annotations.py
index 2ad9f45..885302c 100644
--- a/pyflakes/test/test_type_annotations.py
+++ b/pyflakes/test/test_type_annotations.py
@@ -17,22 +17,22 @@ class TestTypeAnnotations(TestCase):
from typing import overload
@overload
- def f(s): # type: (None) -> None
+ def f(s: None) -> None:
pass
@overload
- def f(s): # type: (int) -> int
+ def f(s: int) -> int:
pass
def f(s):
return s
@typing.overload
- def g(s): # type: (None) -> None
+ def g(s: None) -> None:
pass
@typing.overload
- def g(s): # type: (int) -> int
+ def g(s: int) -> int:
pass
def g(s):
@@ -46,22 +46,22 @@ class TestTypeAnnotations(TestCase):
from typing_extensions import overload
@overload
- def f(s): # type: (None) -> None
+ def f(s: None) -> None:
pass
@overload
- def f(s): # type: (int) -> int
+ def f(s: int) -> int:
pass
def f(s):
return s
@typing_extensions.overload
- def g(s): # type: (None) -> None
+ def g(s: None) -> None:
pass
@typing_extensions.overload
- def g(s): # type: (int) -> int
+ def g(s: int) -> int:
pass
def g(s):
@@ -74,11 +74,11 @@ class TestTypeAnnotations(TestCase):
from typing import overload
@overload
- async def f(s): # type: (None) -> None
+ async def f(s: None) -> None:
pass
@overload
- async def f(s): # type: (int) -> int
+ async def f(s: int) -> int:
pass
async def f(s):
@@ -92,12 +92,12 @@ class TestTypeAnnotations(TestCase):
@dec
@overload
- def f(x): # type: (int) -> int
+ def f(x: int) -> int:
pass
@dec
@overload
- def f(x): # type: (str) -> str
+ def f(x: str) -> str:
pass
@dec
@@ -110,11 +110,11 @@ class TestTypeAnnotations(TestCase):
class C:
@overload
- def f(self, x): # type: (int) -> int
+ def f(self, x: int) -> int:
pass
@overload
- def f(self, x): # type: (str) -> str
+ def f(self, x: str) -> str:
pass
def f(self, x): return x
@@ -126,11 +126,11 @@ class TestTypeAnnotations(TestCase):
import typing as t
@t.overload
- def f(s): # type: (None) -> None
+ def f(s: None) -> None:
pass
@t.overload
- def f(s): # type: (int) -> int
+ def f(s: int) -> int:
pass
def f(s):
@@ -416,115 +416,6 @@ class TestTypeAnnotations(TestCase):
__all__: List[str]
''')
- def test_typeCommentsMarkImportsAsUsed(self):
- self.flakes("""
- from mod import A, B, C, D, E, F, G
-
-
- def f(
- a, # type: A
- ):
- # type: (...) -> B
- for b in a: # type: C
- with b as c: # type: D
- d = c.x # type: E
- return d
-
-
- def g(x): # type: (F) -> G
- return x.y
- """)
-
- def test_typeCommentsFullSignature(self):
- self.flakes("""
- from mod import A, B, C, D
- def f(a, b):
- # type: (A, B[C]) -> D
- return a + b
- """)
-
- def test_typeCommentsStarArgs(self):
- self.flakes("""
- from mod import A, B, C, D
- def f(a, *b, **c):
- # type: (A, *B, **C) -> D
- return a + b
- """)
-
- def test_typeCommentsFullSignatureWithDocstring(self):
- self.flakes('''
- from mod import A, B, C, D
- def f(a, b):
- # type: (A, B[C]) -> D
- """do the thing!"""
- return a + b
- ''')
-
- def test_typeCommentsAdditionalComment(self):
- self.flakes("""
- from mod import F
-
- x = 1 # type: F # noqa
- """)
-
- def test_typeCommentsNoWhitespaceAnnotation(self):
- self.flakes("""
- from mod import F
-
- x = 1 #type:F
- """)
-
- def test_typeCommentsInvalidDoesNotMarkAsUsed(self):
- self.flakes("""
- from mod import F
-
- # type: F
- """, m.UnusedImport)
-
- def test_typeCommentsSyntaxError(self):
- self.flakes("""
- def f(x): # type: (F[) -> None
- pass
- """, m.CommentAnnotationSyntaxError)
-
- def test_typeCommentsSyntaxErrorCorrectLine(self):
- checker = self.flakes("""\
- x = 1
- # type: definitely not a PEP 484 comment
- """, m.CommentAnnotationSyntaxError)
- self.assertEqual(checker.messages[0].lineno, 2)
-
- def test_typeCommentsAssignedToPreviousNode(self):
- # This test demonstrates an issue in the implementation which
- # associates the type comment with a node above it, however the type
- # comment isn't valid according to mypy. If an improved approach
- # which can detect these "invalid" type comments is implemented, this
- # test should be removed / improved to assert that new check.
- self.flakes("""
- from mod import F
- x = 1
- # type: F
- """)
-
- def test_typeIgnore(self):
- self.flakes("""
- a = 0 # type: ignore
- b = 0 # type: ignore[excuse]
- c = 0 # type: ignore=excuse
- d = 0 # type: ignore [excuse]
- e = 0 # type: ignore whatever
- """)
-
- def test_typeIgnoreBogus(self):
- self.flakes("""
- x = 1 # type: ignored
- """, m.UndefinedName)
-
- def test_typeIgnoreBogusUnicode(self):
- self.flakes("""
- x = 2 # type: ignore\xc3
- """, m.UndefinedName)
-
def test_return_annotation_is_class_scope_variable(self):
self.flakes("""
from typing import TypeVar
@@ -714,7 +605,7 @@ class TestTypeAnnotations(TestCase):
if TYPE_CHECKING:
from t import T
- def f(): # type: () -> T
+ def f() -> T:
pass
""")
# False: the old, more-compatible approach
@@ -722,7 +613,7 @@ class TestTypeAnnotations(TestCase):
if False:
from t import T
- def f(): # type: () -> T
+ def f() -> T:
pass
""")
# some choose to assign a constant and do it that way
@@ -732,7 +623,7 @@ class TestTypeAnnotations(TestCase):
if MYPY:
from t import T
- def f(): # type: () -> T
+ def f() -> T:
pass
""")
@@ -746,7 +637,7 @@ class TestTypeAnnotations(TestCase):
Protocol = object
class C(Protocol):
- def f(): # type: () -> int
+ def f() -> int:
pass
""")
diff --git a/pyflakes/test/test_undefined_names.py b/pyflakes/test/test_undefined_names.py
index f3b89ea..c2d2d87 100644
--- a/pyflakes/test/test_undefined_names.py
+++ b/pyflakes/test/test_undefined_names.py
@@ -814,7 +814,6 @@ class NameTests(TestCase):
raised.
"""
tree = ast.parse("x = 10")
- file_tokens = checker.make_tokens("x = 10")
# Make it into something unrecognizable.
tree.body[0].targets[0].ctx = object()
- self.assertRaises(RuntimeError, checker.Checker, tree, file_tokens=file_tokens)
+ self.assertRaises(RuntimeError, checker.Checker, tree)