diff options
author | Greg Ward <greg@gerg.ca> | 2013-11-08 14:37:25 -0500 |
---|---|---|
committer | Greg Ward <greg@gerg.ca> | 2013-11-08 16:19:26 -0500 |
commit | 99b8b4d9e487a17f012030b700fbf59aebd95680 (patch) | |
tree | 068e71c1e539636030f5ef642a35370cf92dffe3 | |
parent | 6fa11a1d255c48e5cda78b9582393eca08cb8efc (diff) | |
download | pep8-99b8b4d9e487a17f012030b700fbf59aebd95680.tar.gz |
Check physical lines after tokenizing them.
This will make it possible to treat physical lines in multiline
strings distinctly, e.g. for issue #224 and #242 (not addressed yet;
this is just a prerequisite).
Details:
- readline_check_physical() is gone: now we only need readline()
- but now setting self.indent_char in check_physical() comes too late,
so move that side effect up to readline()
- add maybe_check_physical() to decide after every token if it's time
to check physical lines and, if so, do it
-rwxr-xr-x | pep8.py | 38 |
1 files changed, 23 insertions, 15 deletions
@@ -1235,16 +1235,9 @@ class Checker(object): self.line_number += 1 if self.line_number > len(self.lines): return '' - return self.lines[self.line_number - 1] - - def readline_check_physical(self): - """ - Check and return the next physical line. This method can be - used to feed tokenize.generate_tokens. - """ - line = self.readline() - if line: - self.check_physical(line) + line = self.lines[self.line_number - 1] + if self.indent_char is None and line[:1] in WHITESPACE: + self.indent_char = line[0] return line def run_check(self, check, argument_names): @@ -1256,18 +1249,16 @@ class Checker(object): arguments.append(getattr(self, name)) return check(*arguments) - def check_physical(self, line): + def check_physical(self, line_number, line): """ Run all physical checks on a raw input line. """ self.physical_line = line - if self.indent_char is None and line[:1] in WHITESPACE: - self.indent_char = line[0] for name, check, argument_names in self._physical_checks: result = self.run_check(check, argument_names) if result is not None: offset, text = result - self.report_error(self.line_number, offset, text, check) + self.report_error(line_number, offset, text, check) def build_tokens_line(self): """ @@ -1350,13 +1341,30 @@ class Checker(object): def generate_tokens(self): if self._io_error: self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) - tokengen = tokenize.generate_tokens(self.readline_check_physical) + tokengen = tokenize.generate_tokens(self.readline) try: for token in tokengen: yield token + self.maybe_check_physical(token) except (SyntaxError, tokenize.TokenError): self.report_invalid_syntax() + def maybe_check_physical(self, token): + """ + If token calls for it, check current physical line(s). + """ + if token[0] == tokenize.STRING and token[1].count('\n'): + # Check the physical lines that make up a multiline string. Do + # *not* check the last line: its newline is outside of the + # multiline string, so we consider it a regular physical line + # (it will be checked when we see the newline token). + line_number = token[2][0] + for line in token[1].split('\n')[:-1]: + self.check_physical(line_number, line + '\n') + line_number += 1 + elif token[0] in (tokenize.NEWLINE, tokenize.NL): + self.check_physical(self.line_number, token[4]) + def check_all(self, expected=None, line_offset=0): """ Run all checks on the input file. |