diff options
Diffstat (limited to 'pep8.py')
-rwxr-xr-x | pep8.py | 58 |
1 files changed, 44 insertions, 14 deletions
@@ -200,7 +200,7 @@ def missing_newline(physical_line): return len(physical_line), "W292 no newline at end of file" -def maximum_line_length(physical_line, max_line_length): +def maximum_line_length(physical_line, max_line_length, multiline): """ Limit all lines to a maximum of 79 characters. @@ -216,6 +216,10 @@ def maximum_line_length(physical_line, max_line_length): line = physical_line.rstrip() length = len(line) if length > max_line_length and not noqa(line): + # Sometimes, long lines in docstrings are hard to avoid -- like, + # a long URL that can't be wrapped because it has no whitespace. + if multiline and re.match(r'^\s*\S+$', line): + return if hasattr(line, 'decode'): # Python 2 # The line could contain multi-byte characters try: @@ -1189,6 +1193,7 @@ class Checker(object): self._logical_checks = options.logical_checks self._ast_checks = options.ast_checks self.max_line_length = options.max_line_length + self.multiline = False # in a multiline string? self.hang_closing = options.hang_closing self.verbose = options.verbose self.filename = filename @@ -1237,16 +1242,9 @@ class Checker(object): self.line_number += 1 if self.line_number > len(self.lines): return '' - return self.lines[self.line_number - 1] - - def readline_check_physical(self): - """ - Check and return the next physical line. This method can be - used to feed tokenize.generate_tokens. - """ - line = self.readline() - if line: - self.check_physical(line) + line = self.lines[self.line_number - 1] + if self.indent_char is None and line[:1] in WHITESPACE: + self.indent_char = line[0] return line def run_check(self, check, argument_names): @@ -1263,8 +1261,6 @@ class Checker(object): Run all physical checks on a raw input line. """ self.physical_line = line - if self.indent_char is None and line[:1] in WHITESPACE: - self.indent_char = line[0] for name, check, argument_names in self._physical_checks: result = self.run_check(check, argument_names) if result is not None: @@ -1352,13 +1348,47 @@ class Checker(object): def generate_tokens(self): if self._io_error: self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) - tokengen = tokenize.generate_tokens(self.readline_check_physical) + tokengen = tokenize.generate_tokens(self.readline) try: for token in tokengen: yield token + self.maybe_check_physical(token) except (SyntaxError, tokenize.TokenError): self.report_invalid_syntax() + def maybe_check_physical(self, token): + """ + If appropriate (based on token), check current physical line(s). + """ + # This is called after every token, but we only want to take action + # after a token that ends a line. + if token[0] in (tokenize.NEWLINE, tokenize.NL): + # Obviously, a newline token ends a single physical line. + self.check_physical(token[4]) + elif token[0] == tokenize.STRING and token[1].count('\n'): + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - we don't *completely* ignore the last line; if it contains + # the magical "# noqa" comment, we disable all physical + # checks for the entire multiline string + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + if noqa(token[4]): + return + self.multiline = True + self.line_number = token[2][0] + for line in token[1].split('\n')[:-1]: + self.check_physical(line + '\n') + self.line_number += 1 + self.multiline = False + def check_all(self, expected=None, line_offset=0): """ Run all checks on the input file. |