diff options
author | Georg Brandl <georg@python.org> | 2020-02-29 15:45:08 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-02-29 15:45:08 +0100 |
commit | 35544e2fc6eed0ce4a27ec7285aac71ff0ddc473 (patch) | |
tree | 4390507bf0d4d5a4596cfc57c575da12f9da40f9 /pygments/lexers/robotframework.py | |
parent | 14fc057d300102d88a07eda5558f238d49dd23f6 (diff) | |
download | pygments-git-35544e2fc6eed0ce4a27ec7285aac71ff0ddc473.tar.gz |
Remove Python 2 compatibility (#1348)
* Remove Python 2 compatibility
* remove 2/3 shims in pygments.util
* update setup.py metadata
* Remove unneeded object inheritance.
* Remove unneeded future imports.
Diffstat (limited to 'pygments/lexers/robotframework.py')
-rw-r--r-- | pygments/lexers/robotframework.py | 15 |
1 files changed, 7 insertions, 8 deletions
diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py index 642c90c5..ddaddb22 100644 --- a/pygments/lexers/robotframework.py +++ b/pygments/lexers/robotframework.py @@ -27,7 +27,6 @@ import re from pygments.lexer import Lexer from pygments.token import Token -from pygments.util import text_type __all__ = ['RobotFrameworkLexer'] @@ -80,11 +79,11 @@ class RobotFrameworkLexer(Lexer): for value, token in row_tokenizer.tokenize(row): for value, token in var_tokenizer.tokenize(value, token): if value: - yield index, token, text_type(value) + yield index, token, str(value) index += len(value) -class VariableTokenizer(object): +class VariableTokenizer: def tokenize(self, string, token): var = VariableSplitter(string, identifiers='$@%&') @@ -111,7 +110,7 @@ class VariableTokenizer(object): yield value, token -class RowTokenizer(object): +class RowTokenizer: def __init__(self): self._table = UnknownTable() @@ -159,7 +158,7 @@ class RowTokenizer(object): yield value, token -class RowSplitter(object): +class RowSplitter: _space_splitter = re.compile('( {2,})') _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))') @@ -185,7 +184,7 @@ class RowSplitter(object): yield rest -class Tokenizer(object): +class Tokenizer: _tokens = None def __init__(self): @@ -292,7 +291,7 @@ class KeywordCall(Tokenizer): return GherkinTokenizer().tokenize(value, KEYWORD) -class GherkinTokenizer(object): +class GherkinTokenizer: _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE) def tokenize(self, value, token): @@ -320,7 +319,7 @@ class ForLoop(Tokenizer): return token -class _Table(object): +class _Table: _tokenizer_class = None def __init__(self, prev_tokenizer=None): |