summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTim Hatch <tim@timhatch.com>2016-05-31 22:06:40 -0700
committerTim Hatch <tim@timhatch.com>2016-05-31 22:06:40 -0700
commit11f5df043d36049fb066e41178c31c2e64410c7b (patch)
treeec6db7986fc8a1c98094c218c46c273979dcc5ec
parentd352bbdf4d2f6265d4ee934799775b044eb47678 (diff)
downloadpygments-11f5df043d36049fb066e41178c31c2e64410c7b.tar.gz
Add a new lexer that assumes json object is already open.
Fixes #884
-rw-r--r--pygments/lexers/_mapping.py1
-rw-r--r--pygments/lexers/data.py25
-rw-r--r--tests/test_data.py87
3 files changed, 111 insertions, 2 deletions
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index 8243d344..46948d8d 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -212,6 +212,7 @@ LEXERS = {
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
+ 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)),
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
index 84d02f49..fbc25bf2 100644
--- a/pygments/lexers/data.py
+++ b/pygments/lexers/data.py
@@ -14,9 +14,9 @@ import re
from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
include, bygroups, inherit
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
- Punctuation, Literal
+ Punctuation, Literal, Error
-__all__ = ['YamlLexer', 'JsonLexer', 'JsonLdLexer']
+__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
class YamlLexerContext(LexerContext):
@@ -508,6 +508,27 @@ class JsonLexer(RegexLexer):
],
}
+
+class JsonBareObjectLexer(JsonLexer):
+ """
+ For JSON data structures (with missing object curly braces).
+
+ .. versionadded:: 2.2
+ """
+
+ name = 'JSONBareObject'
+ aliases = ['json-object']
+ filenames = []
+ mimetypes = ['application/json-object']
+
+ tokens = {
+ 'root': [
+ (r'\}', Error),
+ include('objectvalue'),
+ ],
+ }
+
+
class JsonLdLexer(JsonLexer):
"""
For `JSON-LD <http://json-ld.org/>`_ linked data.
diff --git a/tests/test_data.py b/tests/test_data.py
new file mode 100644
index 00000000..ea4c9be6
--- /dev/null
+++ b/tests/test_data.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+"""
+ Data Tests
+ ~~~~~~~~~~
+
+ :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import JsonLexer, JsonBareObjectLexer
+from pygments.token import Token
+
+
+class JsonTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JsonLexer()
+
+ def testBasic(self):
+ fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
+ tokens = [
+ (Token.Punctuation, u'{'),
+ (Token.Name.Tag, u'"foo"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"bar"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Name.Tag, u'"foo2"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'['),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Punctuation, u']'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+class JsonBareObjectTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JsonBareObjectLexer()
+
+ def testBasic(self):
+ # This is the same as testBasic for JsonLexer above, except the
+ # enclosing curly braces are removed.
+ fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
+ tokens = [
+ (Token.Name.Tag, u'"foo"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"bar"'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Name.Tag, u'"foo2"'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'['),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Punctuation, u']'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testClosingCurly(self):
+ # This can be an Error token, but should not be a can't-pop-from-stack
+ # exception.
+ fragment = '}"a"\n'
+ tokens = [
+ (Token.Error, '}'),
+ (Token.Name.Tag, '"a"'),
+ (Token.Text, '\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))