diff options
author | Andi Albrecht <albrecht.andi@gmail.com> | 2019-05-01 11:55:31 +0200 |
---|---|---|
committer | Andi Albrecht <albrecht.andi@gmail.com> | 2019-05-01 11:55:31 +0200 |
commit | 93262c895b955ff50a1bc13536de3024d3eede50 (patch) | |
tree | 98a121b71942a0be9fd556f0d242018ae12d15f9 | |
parent | 913b56e34edc7e3025feea4744dbd762774805c3 (diff) | |
download | sqlparse-93262c895b955ff50a1bc13536de3024d3eede50.tar.gz |
Identitfy NULLS FIRST/LAST as keywords (fixes #487).
-rw-r--r-- | CHANGELOG | 1 | ||||
-rw-r--r-- | sqlparse/keywords.py | 1 | ||||
-rw-r--r-- | tests/test_tokenize.py | 7 |
3 files changed, 9 insertions, 0 deletions
@@ -9,6 +9,7 @@ Bug Fixes * Fix splitting of SQL with multiple statements inside parentheses (issue485, pr486 by win39). +* Correctly identify NULLS FIRST / NULLS LAST as keywords (issue487). Release 0.3.0 (Mar 11, 2019) diff --git a/sqlparse/keywords.py b/sqlparse/keywords.py index 761d14e..d929cbf 100644 --- a/sqlparse/keywords.py +++ b/sqlparse/keywords.py @@ -78,6 +78,7 @@ SQL_REGEX = { r'|(CROSS\s+|NATURAL\s+)?)?JOIN\b', tokens.Keyword), (r'END(\s+IF|\s+LOOP|\s+WHILE)?\b', tokens.Keyword), (r'NOT\s+NULL\b', tokens.Keyword), + (r'NULLS\s+(FIRST|LAST)\b', tokens.Keyword), (r'UNION\s+ALL\b', tokens.Keyword), (r'CREATE(\s+OR\s+REPLACE)?\b', tokens.Keyword.DDL), (r'DOUBLE\s+PRECISION\b', tokens.Name.Builtin), diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index fcd1102..e65a651 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -171,6 +171,13 @@ def test_parse_endifloop(s): assert p.tokens[0].ttype is T.Keyword +@pytest.mark.parametrize('s', ['NULLS FIRST', 'NULLS LAST']) +def test_parse_nulls(s): # issue487 + p = sqlparse.parse(s)[0] + assert len(p.tokens) == 1 + assert p.tokens[0].ttype is T.Keyword + + @pytest.mark.parametrize('s', [ 'foo', 'Foo', |