diff options
author | Torsten Marek <tmarek@google.com> | 2013-05-03 14:27:03 +0200 |
---|---|---|
committer | Torsten Marek <tmarek@google.com> | 2013-05-03 14:27:03 +0200 |
commit | 17fb1d38dc5123609c0d5064707654ddffdef349 (patch) | |
tree | 7c612798a5c1e972b71fee4cfb17a61e736775d8 | |
parent | 6d809a8ee479e662d5a614de66f27ff8f0b302b3 (diff) | |
download | pylint-17fb1d38dc5123609c0d5064707654ddffdef349.tar.gz |
Minor updates for the token checker interface, triggered by the review.
-rw-r--r-- | ChangeLog | 4 | ||||
-rw-r--r-- | interfaces.py | 4 | ||||
-rw-r--r-- | lint.py | 13 | ||||
-rw-r--r-- | utils.py | 2 |
4 files changed, 13 insertions, 10 deletions
@@ -4,6 +4,10 @@ ChangeLog for Pylint -- * bitbucket #6: put back documentation in source distribution + * Added a new base class and interface for checkers that work on the + tokens rather than the syntax, and only tokenize the input file + once. + 2013-04-25 -- 0.28.0 * bitbucket #1: fix "dictionary changed size during iteration" crash diff --git a/interfaces.py b/interfaces.py index 3d8eb18..a24e36f 100644 --- a/interfaces.py +++ b/interfaces.py @@ -51,11 +51,11 @@ class IRawChecker(IChecker): class ITokenChecker(IChecker): - """Interface for checkers that need access to the token stream.""" + """Interface for checkers that need access to the token list.""" def process_tokens(self, tokens): """Process a module. - tokens contains the token stream. + tokens is a list of all source code tokens in the file. """ @@ -565,10 +565,9 @@ This is used by the global evaluation report (RP0004).'}), files_or_modules = (files_or_modules,) walker = PyLintASTWalker(self) checkers = self.prepare_checkers() - token_checkers = [c for c in checkers if implements(c, ITokenChecker) - and c is not self] - rawcheckers = [c for c in checkers if implements(c, IRawChecker) - and c is not self] + tokencheckers = [c for c in checkers if implements(c, ITokenChecker) + and c is not self] + rawcheckers = [c for c in checkers if implements(c, IRawChecker)] # notify global begin for checker in checkers: checker.open() @@ -591,7 +590,7 @@ This is used by the global evaluation report (RP0004).'}), # fix the current file (if the source file was not available or # if it's actually a c extension) self.current_file = astng.file - self.check_astng_module(astng, walker, rawcheckers, token_checkers) + self.check_astng_module(astng, walker, rawcheckers, tokencheckers) self._add_suppression_messages() # notify global end self.set_current_module('') @@ -647,7 +646,7 @@ This is used by the global evaluation report (RP0004).'}), traceback.print_exc() self.add_message('F0002', args=(ex.__class__, ex)) - def check_astng_module(self, astng, walker, rawcheckers, token_checkers): + def check_astng_module(self, astng, walker, rawcheckers, tokencheckers): """check a module from its astng representation, real work""" # call raw checkers if possible tokens = tokenize_module(astng) @@ -670,7 +669,7 @@ This is used by the global evaluation report (RP0004).'}), self.collect_block_lines(astng, orig_state) for checker in rawcheckers: checker.process_module(astng) - for checker in token_checkers: + for checker in tokencheckers: checker.process_tokens(tokens) # generate events to astng checkers walker.walk(astng) @@ -159,7 +159,7 @@ class MessagesHandlerMixIn: chkid = None for msgid, msg_tuple in msgs_dict.iteritems(): - if implements(checker, IRawChecker) or implements(checker, ITokenChecker): + if implements(checker, (IRawChecker, ITokenChecker)): scope = WarningScope.LINE else: scope = WarningScope.NODE |