diff options
author | Jakub Roztocil <jakub@roztocil.co> | 2019-12-02 20:00:38 +0100 |
---|---|---|
committer | Jakub Roztocil <jakub@roztocil.co> | 2019-12-02 20:00:38 +0100 |
commit | 8a89c7ba40753b8bca41bb1e6ad2af64688f4d27 (patch) | |
tree | 688ee1e53b4a5ae2be4bc46550129850cd03b233 | |
parent | 411a7b47413e5a1658ff1f5d1aa2f55ff3443f22 (diff) | |
download | pygments-git-8a89c7ba40753b8bca41bb1e6ad2af64688f4d27.tar.gz |
Parse HTTP status-line even when reason-phrase not included.
Background: https://github.com/jakubroztocil/httpie/issues/811
-rw-r--r-- | pygments/lexers/textfmts.py | 4 | ||||
-rw-r--r-- | tests/test_textfmts.py | 28 |
2 files changed, 30 insertions, 2 deletions
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py index cd5d4d2a..0faf66ed 100644 --- a/pygments/lexers/textfmts.py +++ b/pygments/lexers/textfmts.py @@ -179,9 +179,9 @@ class HttpLexer(RegexLexer): bygroups(Name.Function, Text, Name.Namespace, Text, Keyword.Reserved, Operator, Number, Text), 'headers'), - (r'(HTTP)(/)(1\.[01]|2|3)( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)', + (r'(HTTP)(/)(1\.[01]|2|3)( +)(\d{3})(( +)([^\r\n]+))?(\r?\n|\Z)', bygroups(Keyword.Reserved, Operator, Number, Text, Number, - Text, Name.Exception, Text), + None, Text, Name.Exception, Text), 'headers'), ], 'headers': [ diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py index 5f369007..6d382e8a 100644 --- a/tests/test_textfmts.py +++ b/tests/test_textfmts.py @@ -18,6 +18,34 @@ def lexer(): yield HttpLexer() +def test_http_status_line(lexer): + fragment = u'HTTP/1.1 200 OK\n' + tokens = [ + (Token.Keyword.Reserved, u'HTTP'), + (Token.Operator, u'/'), + (Token.Number, u'1.1'), + (Token.Text, u' '), + (Token.Number, u'200'), + (Token.Text, u' '), + (Token.Name.Exception, u'OK'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + +def test_http_status_line_without_reason_phrase(lexer): + fragment = u'HTTP/1.1 200\n' + tokens = [ + (Token.Keyword.Reserved, u'HTTP'), + (Token.Operator, u'/'), + (Token.Number, u'1.1'), + (Token.Text, u' '), + (Token.Number, u'200'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + def test_application_xml(lexer): fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n' tokens = [ |