summaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorUlf Wiger <ulf@feuerlabs.com>2015-10-06 12:56:11 +0200
committerUlf Wiger <ulf@feuerlabs.com>2015-11-20 13:43:07 -0800
commit34aa86b5a2e97650fe6299ccf794d5eb5d052d91 (patch)
treeedfb4bb844c3b90565e7a0bb00f678703d084188 /deps
parente6299ff287e767dae71fb47009f9bf4620cc3d78 (diff)
downloadrvi_core-34aa86b5a2e97650fe6299ccf794d5eb5d052d91.tar.gz
w.i.p. transition to jsx json codec
Diffstat (limited to 'deps')
-rw-r--r--deps/base64url/.gitignore7
-rw-r--r--deps/exec/.gitignore7
-rw-r--r--deps/exo/src/exo_http_server.erl55
-rw-r--r--deps/exo/src/exo_url.erl13
-rw-r--r--deps/jsx/.gitignore10
-rw-r--r--deps/jsx/.travis.yml8
-rw-r--r--deps/jsx/CHANGES.md194
-rw-r--r--deps/jsx/LICENSE21
-rw-r--r--deps/jsx/Makefile19
-rw-r--r--deps/jsx/README.md728
-rw-r--r--deps/jsx/mix.exs37
-rw-r--r--deps/jsx/mix.lock1
-rw-r--r--deps/jsx/rebar.config5
-rw-r--r--deps/jsx/rebar.config.script11
-rw-r--r--deps/jsx/rebar.lock1
-rw-r--r--deps/jsx/src/jsx.app.src21
-rw-r--r--deps/jsx/src/jsx.erl527
-rw-r--r--deps/jsx/src/jsx_config.erl337
-rw-r--r--deps/jsx/src/jsx_config.hrl17
-rw-r--r--deps/jsx/src/jsx_consult.erl99
-rw-r--r--deps/jsx/src/jsx_decoder.erl1976
-rw-r--r--deps/jsx/src/jsx_encoder.erl114
-rw-r--r--deps/jsx/src/jsx_parser.erl1168
-rw-r--r--deps/jsx/src/jsx_to_json.erl399
-rw-r--r--deps/jsx/src/jsx_to_term.erl455
-rw-r--r--deps/jsx/src/jsx_verify.erl119
-rw-r--r--deps/msgpack/.gitignore6
-rw-r--r--deps/msgpack/.travis.yml18
-rw-r--r--deps/msgpack/AUTHORS7
-rw-r--r--deps/msgpack/LICENSE-2.0.txt202
-rw-r--r--deps/msgpack/Makefile61
-rw-r--r--deps/msgpack/README.md127
-rw-r--r--deps/msgpack/dialyzer.ignore-warnings0
-rw-r--r--deps/msgpack/include/msgpack.hrl109
-rwxr-xr-xdeps/msgpack/rebarbin0 -> 151989 bytes
-rw-r--r--deps/msgpack/rebar.config26
-rw-r--r--deps/msgpack/rebar.config.script25
-rw-r--r--deps/msgpack/src/msgpack.app.src9
-rw-r--r--deps/msgpack/src/msgpack.erl256
-rw-r--r--deps/msgpack/src/msgpack_ext.erl30
-rw-r--r--deps/msgpack/src/msgpack_packer.erl394
-rw-r--r--deps/msgpack/src/msgpack_term.erl82
-rw-r--r--deps/msgpack/src/msgpack_unpacker.erl257
-rw-r--r--deps/msgpack/test/msgpack_ext_example_tests.erl102
-rw-r--r--deps/msgpack/test/msgpack_nif_tests.erl140
-rw-r--r--deps/msgpack/test/msgpack_test.erl413
-rw-r--r--deps/msgpack/test/utf8.txt1
-rw-r--r--deps/setup/.gitignore10
48 files changed, 8592 insertions, 32 deletions
diff --git a/deps/base64url/.gitignore b/deps/base64url/.gitignore
new file mode 100644
index 0000000..bdd3840
--- /dev/null
+++ b/deps/base64url/.gitignore
@@ -0,0 +1,7 @@
+ebin
+deps
+.eunit
+.ct
+logs
+test/*.beam
+erl_crash.dump
diff --git a/deps/exec/.gitignore b/deps/exec/.gitignore
new file mode 100644
index 0000000..b22551b
--- /dev/null
+++ b/deps/exec/.gitignore
@@ -0,0 +1,7 @@
+*.beam
+*.o
+*.swp
+*.tgz
+/priv
+/ebin
+.rebar/*
diff --git a/deps/exo/src/exo_http_server.erl b/deps/exo/src/exo_http_server.erl
index 2d08f8e..ae19faa 100644
--- a/deps/exo/src/exo_http_server.erl
+++ b/deps/exo/src/exo_http_server.erl
@@ -20,9 +20,9 @@
-behaviour(exo_socket_server).
%% exo_socket_server callbacks
--export([init/2,
- data/3,
- close/2,
+-export([init/2,
+ data/3,
+ close/2,
error/3]).
-export([control/4]).
@@ -50,13 +50,13 @@
%%-----------------------------------------------------------------------------
%% @doc
%% Starts a socket server on port Port with server options ServerOpts
-%% that are sent to the server when a connection is established,
+%% that are sent to the server when a connection is established,
%% i.e init is called.
%%
%% @end
%%-----------------------------------------------------------------------------
--spec start(Port::integer(),
- ServerOptions::list({Option::atom(), Value::term()})) ->
+-spec start(Port::integer(),
+ ServerOptions::list({Option::atom(), Value::term()})) ->
{ok, ChildPid::pid()} |
{error, Reason::term()}.
@@ -74,13 +74,13 @@ start(Port, ServerOptions) ->
%%-----------------------------------------------------------------------------
%% @doc
%% Starts and links a socket server on port Port with server options ServerOpts
-%% that are sent to the server when a connection is established,
+%% that are sent to the server when a connection is established,
%% i.e init is called.
%%
%% @end
%%-----------------------------------------------------------------------------
--spec start_link(Port::integer(),
- ServerOptions::list({Option::atom(), Value::term()})) ->
+-spec start_link(Port::integer(),
+ ServerOptions::list({Option::atom(), Value::term()})) ->
{ok, ChildPid::pid()} |
{error, Reason::term()}.
@@ -101,8 +101,8 @@ start_link(Port, ServerOptions) ->
%%
%% @end
%%-----------------------------------------------------------------------------
--spec init(Socket::#exo_socket{},
- ServerOptions::list({Option::atom(), Value::term()})) ->
+-spec init(Socket::#exo_socket{},
+ ServerOptions::list({Option::atom(), Value::term()})) ->
{ok, State::#state{}}.
init(Socket, Options) ->
@@ -111,7 +111,7 @@ init(Socket, Options) ->
[_IP, _Port, Options]),
Access = proplists:get_value(access, Options, []),
Module = proplists:get_value(request_handler, Options, undefined),
- {ok, #state{ access = Access, request_handler = Module}}.
+ {ok, #state{ access = Access, request_handler = Module}}.
%% To avoid a compiler warning. Should we actually support something here?
@@ -124,9 +124,9 @@ control(_Socket, _Request, _From, State) ->
%%
%% @end
%%-----------------------------------------------------------------------------
--spec data(Socket::#exo_socket{},
+-spec data(Socket::#exo_socket{},
Data::term(),
- State::#state{}) ->
+ State::#state{}) ->
{ok, NewState::#state{}} |
{stop, {error, Reason::term()}, NewState::#state{}}.
@@ -142,7 +142,7 @@ data(Socket, Data, State) ->
Error ->
{stop, Error, State}
end;
- {http_error, ?CRNL} ->
+ {http_error, ?CRNL} ->
{ok, State};
{http_error, ?NL} ->
{ok, State};
@@ -159,8 +159,8 @@ data(Socket, Data, State) ->
%%
%% @end
%%-----------------------------------------------------------------------------
--spec close(Socket::#exo_socket{},
- State::#state{}) ->
+-spec close(Socket::#exo_socket{},
+ State::#state{}) ->
{ok, NewState::#state{}}.
close(_Socket, State) ->
@@ -176,16 +176,16 @@ close(_Socket, State) ->
%%-----------------------------------------------------------------------------
-spec error(Socket::#exo_socket{},
Error::term(),
- State::#state{}) ->
+ State::#state{}) ->
{stop, {error, Reason::term()}, NewState::#state{}}.
error(_Socket,Error,State) ->
?debug("exo_http_serber: error = ~p\n", [Error]),
- {stop, Error, State}.
+ {stop, Error, State}.
handle_request(Socket, R, State) ->
- ?debug("exo_http_server: request = ~s\n",
+ ?debug("exo_http_server: request = ~s\n",
[[exo_http:format_request(R),?CRNL,
exo_http:format_hdr(R#http_request.headers),
?CRNL]]),
@@ -197,11 +197,11 @@ handle_request(Socket, R, State) ->
Error ->
{stop, Error, State}
end.
-
-handle_body(Socket, Request, Body,
+
+handle_body(Socket, Request, Body,
State=#state {request_handler = RH}) when is_tuple(RH) ->
{M, F, As} = request_handler(RH, Socket, Request, Body),
- ?debug("exo_http_server: calling ~p with -BODY:\n~s\n-END-BODY\n",
+ ?debug("exo_http_server: calling ~p with -BODY:\n~s\n-END-BODY\n",
[RH, Body]),
case apply(M, F, As) of
ok -> {ok, State};
@@ -220,7 +220,7 @@ handle_body(Socket, Request, Body, State) ->
response(Socket, undefined, 200, "OK", "OK"),
{ok, State};
true ->
- response(Socket, undefined, 404, "Not Found",
+ response(Socket, undefined, 404, "Not Found",
"Object not found"),
{ok, State}
end.
@@ -238,11 +238,11 @@ request_handler({Module, Function, XArgs}, Socket, Request, Body) ->
%%
%% @end
%%-----------------------------------------------------------------------------
--spec response(Socket::#exo_socket{},
+-spec response(Socket::#exo_socket{},
Connection::string() | undefined,
Status::integer(),
Phrase::string(),
- Status::string()) ->
+ Status::string()) ->
ok |
{error, Reason::term()}.
@@ -263,7 +263,7 @@ response(S, Connection, Status, Phrase, Body, Opts) ->
exo_http:format_hdr(H),
?CRNL,
Body],
- ?debug("exo_http_server: response:\n~s\n", [Response]),
+ ?debug("exo_http_server: response:\n~s\n", [iolist_to_binary(Response)]),
exo_socket:send(S, Response).
content_length(B) when is_binary(B) ->
@@ -288,4 +288,3 @@ test() ->
{keyfile, filename:join(Dir, "host.key")},
{certfile, filename:join(Dir, "host.cert")}],
?MODULE, []).
-
diff --git a/deps/exo/src/exo_url.erl b/deps/exo/src/exo_url.erl
index e904baa..6f8df60 100644
--- a/deps/exo/src/exo_url.erl
+++ b/deps/exo/src/exo_url.erl
@@ -20,12 +20,12 @@
-export([parse/1, parse/2, format/1, format_path/1]).
-export([parse_path/2]).
-%% returns a #url{}
+%% returns a #url{}
parse(Str) ->
parse(Str, strict).
parse(Str, Strict) ->
- case Str of
+ case str(Str) of
"http://" ++ Rest ->
parse_host(Strict, #url{scheme = http}, Rest, []);
"HTTP://" ++ Rest ->
@@ -42,10 +42,15 @@ parse(Str, Strict) ->
parse_host(Strict, #url{scheme = http}, Str, [])
end.
+str(S) when is_binary(S) ->
+ binary_to_list(S);
+str(S) when is_list(S) ->
+ S.
+
parse_host(Strict, U, Str, Ack) ->
case Str of
[] ->
- U#url{host = lists:reverse(Ack),
+ U#url{host = lists:reverse(Ack),
path = "/"
};
[$/|Tail] ->
@@ -83,7 +88,7 @@ parse_path(Strict, U, Str, Ack) ->
parse_path(Strict, U, T, [H|Ack])
end.
-
+
format(Url) when is_record(Url, url) ->
if Url#url.scheme == undefined ->
format_path(Url);
diff --git a/deps/jsx/.gitignore b/deps/jsx/.gitignore
new file mode 100644
index 0000000..4a8eee6
--- /dev/null
+++ b/deps/jsx/.gitignore
@@ -0,0 +1,10 @@
+.eunit
+deps
+ebin
+*.o
+*.beam
+*.plt
+erl_crash.dump
+.DS_Store
+doc
+.rebar
diff --git a/deps/jsx/.travis.yml b/deps/jsx/.travis.yml
new file mode 100644
index 0000000..cda5d0a
--- /dev/null
+++ b/deps/jsx/.travis.yml
@@ -0,0 +1,8 @@
+language: erlang
+script: rebar compile && rebar skip_deps=true eunit
+otp_release:
+ - 17.5
+ - 18.0
+ - R16B03-1
+ - R15B02
+
diff --git a/deps/jsx/CHANGES.md b/deps/jsx/CHANGES.md
new file mode 100644
index 0000000..880dcda
--- /dev/null
+++ b/deps/jsx/CHANGES.md
@@ -0,0 +1,194 @@
+vNext
+
+* support for milliseconds in datetimes
+
+v2.7.0
+
+* `return_tail` option
+* fixes for edoc generation
+
+v2.6.2
+
+* ensure maps are always enabled when compiling via mix
+
+v2.6.1
+
+* hex.pm maintenance release
+
+v2.6.0
+
+* equivalent to v2.5.3 but created for semver reasons
+
+v2.5.3
+
+* add a `mix.exs` to be buildable by both mix and rebar
+* minor README updates
+
+v2.5.2
+
+* fix regression parsing <<"-0e...">> (thanks @c-bik)
+
+v2.5.1
+
+* assume all datetimes are UTC time and add `Z` designator to indicate
+* fix parsing issue with datetimes in arrays
+
+v2.5.0
+
+* `consult/2` function for reading a file directly to a json term
+* `maps_always` build flag for always returning maps on platforms
+ that support them
+* dialyzer fixes
+
+v2.4.0
+
+* enough performance improvements to justify a new version. 2-3x
+ speedup depending on mode of operation
+
+v2.3.1
+
+* fixes an issue where astral plane json escape sequences were
+ inadvertently being converted to the unicode replacement
+ character
+
+v2.3
+
+* switched to a faster implementation of string parsing in both
+ the decoder and encoder
+* expand `uescape` option to the decoder
+* allow control codes in json passed to decoder (contrary to the spec,
+ yes)
+
+v2.2
+
+* `return_maps` option
+* `uescape` option for 7-bit clean output
+* add `Makefile` for slightly better `erlang.mk` compatibility
+* add `maps_support/0` call to determine whether `jsx` was compiled
+ with support for maps or not
+
+v2.1.1
+
+* faster generation of json via iolists
+* `repeat_keys` option
+
+v2.1
+
+* force the end of streams with `end_json` in addition to `end_stream`
+* support for encoding erlang datetime tuples to iso8601 format
+* allow a single trailing comma in objects and arrays
+
+v2.0.4
+
+* more typespec adjustments
+
+v2.0.3
+
+* update some typespecs to make them more comprehensive
+
+v2.0.2
+
+* fixes travis-ci spec
+
+v2.0.1
+
+* fix regression in output of empty objects/arrays
+
+v2.0
+
+* jsx is much more pragmatic by default; common json errors are silently
+ ignored (and fixed). stricter parsing must be enabled with options
+* add support for encoding otp 17.0's new maps data type
+* removed `pre_encode` and `post_decode` options in favour of making jsx
+ functions easier to wrap and customize
+* streaming behavior is now disabled by default and must be requested explicitly
+* removed deprecated function names (`to_json`, `to_term`, `term_to_json`, etc)
+* expanded test coverage
+
+
+v1.4.5
+
+* various fixes to typespecs uncovered by dialyzer
+* allow integer keys during encoding
+* convert atoms (other than `true`, `false` and `null`) to strings during encoding
+
+v1.4.4
+
+* typespec for `json_term/0` fixed
+* incorrect boolean shortcircuiting fixed in multibyte escape processing
+
+v1.4.3
+
+* add empty rebar.config for mix build tool
+* add `attempt_atom` option for decoding json objects
+* fix a bug related to multibyte codepoints and streaming input
+* add a missing error state in the encoder
+
+v1.4.2
+
+* build apparatus cleaned up and streamlined
+* new `{raw, <<"json goes here">>}` intermediate form to support direct generation of json
+* bugfixes involving inappropriate exceptions from jsx functions
+
+v1.4.1
+
+* fixes a bug with interaction between `dirty_strings` and even numbers of escape characters
+* performance enhancements
+
+v1.4
+
+* radically refactored decoder
+* `dirty_strings` now behaves intuitively in decoding. bad codepoints, bad utf8, illegal characters and escapes (except `"` and `'` if `single_quoted_strings` is enabled) are ignored completely
+* `incomplete_handler` & `error_handler` are now available for use, see documentation in README
+
+v1.3.3
+
+* `pre_encode` now orders input in the order you'd expect
+
+v1.3.2
+
+* `pre_encode` is now able to handle tuples *correctly*
+
+v1.3.1
+
+* `pre_encode` is now able to handle tuples
+
+v1.3
+
+* introduces `prettify/1` and `minify/1`, shortcuts for `format/2`
+* introduce `encode/1,2` and `decode/1,2` as primary interface to built in tokenizers. `to_json/1,2` and `to_term/1,2` remain accessible but not advertised
+* new `parser/3` function exposes syntactic analysis stage for use with user defined tokenizers
+* improved documentation
+
+v1.2.1
+
+* fixes incorrect handling of escaped forward slashes, thanks bob ippolito
+
+v1.2
+
+* rewritten handling of string escaping to improve performance
+* `pre_encode` and `post_decode` hooks, see README
+* `relax` option
+
+v1.1.2
+
+* add `dirty_strings` option
+* more fixes for invalid unicode in strings
+
+v1.1.1
+
+* fixes bug regarding handling of invalid unicode in R14Bxx
+
+v1.1
+
+* improvements to string escaping and json generation performance
+
+v1.0.2
+
+* fixes to function specs
+* rewritten README
+* `comments` option
+
+v1.0.1
+
+* rebar fix
diff --git a/deps/jsx/LICENSE b/deps/jsx/LICENSE
new file mode 100644
index 0000000..de1b470
--- /dev/null
+++ b/deps/jsx/LICENSE
@@ -0,0 +1,21 @@
+The MIT License
+
+Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/jsx/Makefile b/deps/jsx/Makefile
new file mode 100644
index 0000000..266608d
--- /dev/null
+++ b/deps/jsx/Makefile
@@ -0,0 +1,19 @@
+all: deps
+
+deps:
+ rebar get-deps
+ rebar compile
+
+app:
+ rebar compile
+
+tests:
+ rebar eunit
+
+clean:
+ rebar clean
+
+distclean: clean
+ rebar delete-deps
+
+.PHONY: all deps app tests clean distclean \ No newline at end of file
diff --git a/deps/jsx/README.md b/deps/jsx/README.md
new file mode 100644
index 0000000..59300a6
--- /dev/null
+++ b/deps/jsx/README.md
@@ -0,0 +1,728 @@
+# jsx (v2.7.0) #
+
+
+an erlang application for consuming, producing and manipulating [json][json].
+inspired by [yajl][yajl]
+
+**jsx** is built via [rebar][rebar] and continuous integration testing provided courtesy [travis-ci][travis]
+
+current status: [![Build Status](https://secure.travis-ci.org/talentdeficit/jsx.png?branch=develop)](http://travis-ci.org/talentdeficit/jsx)
+
+**jsx** is released under the terms of the [MIT][MIT] license
+
+copyright 2010-2015 alisdair sullivan
+
+## really important note ##
+
+there are a few changes for users upgrading from 1.x. see [CHANGES.md](CHANGES.md)
+for the overview or [migrating from 1.x](#migrating) for the details
+
+
+## index ##
+
+* [quickstart](#quickstart)
+* [description](#description)
+ - [migrating from 1.x](#migrating)
+ - [json <-> erlang mapping](#json---erlang-mapping)
+ - [incomplete input](#incomplete-input)
+* [data types](#data-types)
+ - [`json_term()`](#json_term)
+ - [`json_text()`](#json_text)
+ - [`event()`](#event)
+ - [`option()`](#option)
+* [exports](#exports)
+ - [`encoder/3`, `decoder/3` & `parser/3`](#encoder3-decoder3--parser3)
+ - [`decode/1,2`](#decode12)
+ - [`encode/1,2`](#encode12)
+ - [`format/1,2`](#format12)
+ - [`minify/1`](#minify1)
+ - [`prettify/1`](#prettify1)
+ - [`is_json/1,2`](#is_json12)
+ - [`is_term/1,2`](#is_term12)
+ - [`maps_support/0`](#maps_support0)
+* [callback exports](#callback_exports)
+ - [`Module:init/1`](#moduleinit1)
+ - [`Module:handle_event/2`](#modulehandle_event2)
+* [acknowledgements](#acknowledgements)
+
+
+## quickstart ##
+
+#### to build the library and run tests ####
+
+```bash
+$ rebar compile
+$ rebar eunit
+```
+
+#### to convert a utf8 binary containing a json string into an erlang term ####
+
+```erlang
+1> jsx:decode(<<"{\"library\": \"jsx\", \"awesome\": true}">>).
+[{<<"library">>,<<"jsx">>},{<<"awesome">>,true}]
+2> jsx:decode(<<"{\"library\": \"jsx\", \"awesome\": true}">>, [return_maps]).
+#{<<"awesome">> => true,<<"library">> => <<"jsx">>}
+3> jsx:decode(<<"[\"a\",\"list\",\"of\",\"words\"]">>).
+[<<"a">>, <<"list">>, <<"of">>, <<"words">>]
+```
+
+#### to convert an erlang term into a utf8 binary containing a json string ####
+
+```erlang
+1> jsx:encode([{<<"library">>,<<"jsx">>},{<<"awesome">>,true}]).
+<<"{\"library\": \"jsx\", \"awesome\": true}">>
+2> jsx:encode(#{<<"library">> => <<"jsx">>, <<"awesome">> => true}).
+<<"{\"awesome\":true,\"library\":\"jsx\"}">>
+3> jsx:encode([<<"a">>, <<"list">>, <<"of">>, <<"words">>]).
+<<"[\"a\",\"list\",\"of\",\"words\"]">>
+```
+
+#### to check if a binary or a term is valid json ####
+
+```erlang
+1> jsx:is_json(<<"[\"this is json\"]">>).
+true
+2> jsx:is_json("[\"this is not\"]").
+false
+3> jsx:is_term([<<"this is a term">>]).
+true
+4> jsx:is_term([this, is, not]).
+false
+```
+
+#### to minify some json ####
+
+```erlang
+1> jsx:minify(<<"{
+ \"a list\": [
+ 1,
+ 2,
+ 3
+ ]
+}">>).
+<<"{\"a list\":[1,2,3]}">>
+```
+
+#### to prettify some json ####
+
+```erlang
+1> jsx:prettify(<<"{\"a list\":[1,2,3]}">>).
+<<"{
+ \"a list\": [
+ 1,
+ 2,
+ 3
+ ]
+}">>
+```
+
+
+## description ##
+
+
+**jsx** is an erlang application for consuming, producing and manipulating
+[json][json]
+
+**jsx** follows the json [spec][rfc4627] as closely as possible with allowances for
+real world usage
+
+**jsx** is pragmatic. the json spec allows extensions so **jsx** extends the spec in a
+number of ways. see the section on `strict` in [options](#option) below though
+
+json has no official comments but this parser allows c/c++ style comments.
+anywhere whitespace is allowed you can insert comments (both `// ...` and `/* ... */`)
+
+some particularly irresponsible json emitters leave trailing commas at the end of
+objects or arrays. **jsx** allows a single trailing comma in input. multiple commas
+in any posistion or a preceding comma are still errors
+
+all **jsx** decoder input should be `utf8` encoded binaries. sometimes you get binaries
+that are almost but not quite valid utf8 whether due to improper escaping or poor
+encoding. **jsx** replaces invalid codepoints and poorly formed sequences with the
+unicode replacement character (`u+FFFD`) but does it's best to return something
+comprehensible
+
+json only allows keys and strings to be delimited by double quotes (`u+0022`) but
+javascript allows them to be delimited by single quotes (`u+0027`) as well. **jsx**
+follows javascript in this. strings that start with single quotes can contain double
+quotes but must end with single quotes and must escape any single quotes they contain
+
+json and **jsx** only recognize escape sequences as outlined in the json spec. it just
+ignores bad escape sequences leaving them in strings unaltered
+
+
+### migrating from 1.x ###
+
+if you're migrating from jsx v1.x to v2.x in most cases you won't need to
+make any changes to your code
+
+support for encoding otp 17.0's new map type is now enabled by default when compiling
+via rebar for any release that supports them. jsx should still compile cleanly for
+earlier releases without any user intervention. if you'd like to disable maps you can
+either set the env variable `JSX_NOMAPS` or by uncommenting the applicable tuple in
+`rebar.config`
+
+if you used any of `replaced_bad_utf8`, `single_quoted_strings`, `comments`,
+`ignored_bad_escapes` or `relax` you can simply omit them from your calls to jsx,
+they are all enabled by default now. if you want stricter parsing see the new
+[`strict` options](#option) available
+
+if you were using jsx to parse partial json using it's streaming features it is now
+disabled by default. you'll need to pass the `stream` option to calls to jsx functions
+to reenable it
+
+support for `pre_encode` and `post_decode` has been removed. they were fragile and hard
+to understand and they prevented evolution of the encoding and decoding code
+
+
+### json &lt;-> erlang mapping ###
+
+**json** | **erlang**
+--------------------------------|--------------------------------
+`number` | `integer()` and `float()`
+`string` | `binary()` and `atom()`
+`true`, `false` and `null` | `true`, `false` and `null`
+`array` | `[]` and `[JSON]`
+`object` | `#{}`, `[{}]` and `[{binary() OR atom() OR integer(), JSON}]`
+see below | `datetime()`
+
+* numbers
+
+ javascript and thus json represent all numeric values with floats. there's no
+ reason for erlang -- a language that supports arbitrarily large integers -- to
+ restrict all numbers to the ieee754 range
+
+ whenever possible, **jsx** will interpret json numbers that look like integers as
+ integers. other numbers will be converted to erlang's floating point type, which
+ is nearly but not quite iee754. negative zero is not representable in erlang (zero
+ is unsigned in erlang and `0` is equivalent to `-0`) and will be interpreted as
+ regular zero. numbers not representable are beyond the concern of this implementation,
+ and will result in parsing errors
+
+ when converting from erlang to json, floats are represented with their
+ shortest representation that will round trip without loss of precision. this
+ means that some floats may be superficially dissimilar (although
+ functionally equivalent). for example, `1.0000000000000001` will be
+ represented by `1.0`
+
+* strings
+
+ json strings must be unicode encoded binaries or erlang atoms. in practice,
+ because **jsx** only accepts `utf8` binaries all binary strings must be `utf8`.
+ in addition to being unicode json strings restrict a number of codepoints and
+ define a number of escape sequences
+
+ json string escapes of the form `\uXXXX` will be converted to their
+ equivalent codepoints during parsing. this means control characters and
+ other codepoints disallowed by the json spec may be encountered in resulting
+ strings. the utf8 restriction means the surrogates are explicitly disallowed.
+ if a string contains escaped surrogates (`u+d800` to `u+dfff`) they are
+ interpreted but only when they form valid surrogate pairs. surrogates
+ encountered otherwise are replaced with the replacement codepoint (`u+fffd`)
+
+ all erlang strings are represented by **valid** `utf8` encoded binaries. the
+ encoder will check strings for conformance. badly formed `utf8` sequences may
+ be replaced with the replacement codepoint (`u+fffd`) according to the unicode
+ spec
+
+ this implementation performs no normalization on strings beyond that
+ detailed here. be careful when comparing strings as equivalent strings
+ may have different `utf8` encodings
+
+* true, false and null
+
+ the json primitives `true`, `false` and `null` are represented by the
+ erlang atoms `true`, `false` and `null`. surprise
+
+* arrays
+
+ json arrays are represented with erlang lists of json values as described
+ in this section
+
+* objects
+
+ json objects are represented by erlang proplists. json maps may also be
+ encoded to json and optionally decoded to maps (via the `return_maps`
+ option)
+
+ the empty object has the special representation `[{}]` to differentiate it
+ from the empty list. ambiguities like `[true, false]` prevent the use of
+ the shorthand form of property lists using atoms as properties so all
+ properties must be tuples. all keys must be encoded as in `string` or as
+ atoms or integers (which will be escaped and converted to binaries for
+ presentation to handlers). values should be valid json values. repeated
+ keys are tolerated in json text decoded to erlang terms but are not allowed
+ in erlang terms encoded to json
+
+* datetime
+
+ erlang datetime tuples (`{{Year, Month, Day}, {Hour, Min, Sec}}`) as returned
+ from `erlang:localtime/0` are automatically encoded as [iso8601][iso8601]
+ strings and are assumed to be UTC time. no conversion is attempted of json [iso8601][iso8601] strings in decoded json
+
+
+### incomplete input ###
+
+**jsx** can handle incomplete json texts. if the option `stream` is passed to the decoder
+or parser and if a partial json text is parsed, rather than returning a term from
+your callback handler, **jsx** returns `{incomplete, F}` where `F` is a function with
+an identical API to the anonymous fun returned from `decoder/3`, `encoder/3` or
+`parser/3`. it retains the internal state of the parser at the point where input
+was exhausted. this allows you to parse as you stream json over a socket or file
+descriptor, or to parse large json texts without needing to keep them entirely in
+memory
+
+however, it is important to recognize that **jsx** is conservative by default. **jsx** will
+not consider the parsing complete even when input is exhausted and the json text is
+unambiguously incomplete. to end parsing call the `incomplete` function with the
+argument `end_stream` (or `end_json`) like:
+
+```erlang
+1> {incomplete, F} = jsx:decode(<<"[">>, [stream]).
+{incomplete,#Fun<jsx_decoder.1.122947756>}
+2> F(end_stream). % can also be `F(end_json)`
+** exception error: bad argument
+3> {incomplete, G} = F(<<"]">>).
+{incomplete,#Fun<jsx_decoder.1.122947756>}
+4> G(end_stream). % can also be `G(end_json)`
+[]
+```
+
+
+## data types ##
+
+#### `json_term()` ####
+
+```erlang
+json_term() = [json_term()]
+ | [{binary() | atom() | integer(), json_term()}]
+ | #{} % map of any size, not just the empty map
+ | true
+ | false
+ | null
+ | integer()
+ | float()
+ | binary()
+ | atom()
+ | datetime()
+```
+
+the erlang representation of json. binaries should be `utf8` encoded, or close
+at least
+
+#### `json_text()` ####
+
+```erlang
+json_text() = binary()
+```
+
+a utf8 encoded binary containing a json string
+
+#### `event()` ####
+
+```erlang
+event() = start_object
+ | end_object
+ | start_array
+ | end_array
+ | {key, binary()}
+ | {string, binary()}
+ | {integer, integer()}
+ | {float, float()}
+ | {literal, true}
+ | {literal, false}
+ | {literal, null}
+ | end_json
+```
+
+the subset of [`token()`](#token) emitted by the decoder and encoder to handlers
+
+#### `option()` ####
+
+```erlang
+option() = dirty_strings
+ | escaped_forward_slashes
+ | escaped_strings
+ | repeat_keys
+ | stream
+ | strict
+ | {strict, [strict_option()]}
+ | return_tail
+ | uescape
+ | unescaped_jsonp
+
+strict_option() = comments
+ | trailing_commas
+ | utf8
+ | single_quotes
+ | escapes
+```
+
+**jsx** functions all take a common set of options. not all flags have meaning
+in all contexts, but they are always valid options. functions may have
+additional options beyond these. see
+[individual function documentation](#exports) for details
+
+- `dirty_strings`
+
+ json escaping is lossy; it mutates the json string and repeated application
+ can result in unwanted behaviour. if your strings are already escaped (or
+ you'd like to force invalid strings into "json" you monster) use this flag
+ to bypass escaping. this can also be used to read in **really** invalid json
+ strings. everything between unescaped quotes are passed as is to the resulting
+ string term. note that this takes precedence over any other options
+
+- `escaped_forward_slashes`
+
+ json strings are escaped according to the json spec. this means forward
+ slashes (solidus) are only escaped when this flag is present. otherwise they
+ are left unescaped. you may want to use this if you are embedding json
+ directly into a html or xml document
+
+- `escaped_strings`
+
+ by default both the encoder and decoder return strings as utf8 binaries
+ appropriate for use in erlang. escape sequences that were present in decoded
+ terms are converted into the appropriate codepoint while encoded terms are
+ unaltered. this flag escapes strings as if for output in json, removing
+ control codes and problematic codepoints and replacing them with the
+ appropriate escapes
+
+- `stream`
+
+ see [incomplete input](#incomplete-input)
+
+- `strict`
+
+ as mentioned [earlier](#description), **jsx** is pragmatic. if you're more of a
+ json purist or you're really into bdsm stricter adherence to the spec is
+ possible. the following restrictions are available
+
+ * `comments`
+
+ comments are disabled and result in a `badarg` error
+
+ * `trailing_commas`
+
+ trailing commas in an object or list result in `badarg` errors
+
+ * `utf8`
+
+ invalid codepoints and malformed unicode result in `badarg` errors
+
+ * `single_quotes`
+
+ only keys and strings delimited by double quotes (`u+0022`) are allowed. the
+ single quote (`u+0027`) results in a `badarg` error
+
+ * `escapes`
+
+ escape sequences not adhering to the json spec result in a `badarg` error
+
+ any combination of these can be passed to **jsx** by using `{strict, [strict_option()]}`.
+ `strict` is equivalent to `{strict, [comments, bad_utf8, single_quotes, escapes]}`
+
+- `return_tail`
+
+ upon reaching the end of a valid json term in an input stream return the term and any
+ remaining bytes in the input stream as `{with_tail, term(), binary()}` where the second
+ member of the tuple is the json term and the third is any remaining bytes. note that
+ leading whitespace will be stripped from the tail
+
+- `uescape`
+
+ escape all codepoints outside the ascii range for 7 bit clean output. note
+ this escaping takes place even if no other string escaping is requested (via
+ `escaped_strings`)
+
+- `unescaped_jsonp`
+
+ javascript interpreters treat the codepoints `u+2028` and `u+2029` as
+ significant whitespace. json strings that contain either of these codepoints
+ will be parsed incorrectly by some javascript interpreters. by default,
+ these codepoints are escaped (to `\u2028` and `\u2029`, respectively) to
+ retain compatibility. this option simply removes that escaping
+
+
+## exports ##
+
+
+#### `encoder/3`, `decoder/3` & `parser/3` ####
+
+```erlang
+decoder(Module, Args, Opts) -> Fun((JSONText) -> any())
+encoder(Module, Args, Opts) -> Fun((JSONTerm) -> any())
+parser(Module, Args, Opts) -> Fun((Tokens) -> any())
+
+ Module = atom()
+ Args = any()
+ Opts = [option()]
+ JSONText = json_text()
+ JSONTerm = json_term()
+ Tokens = event() | [event()]
+```
+
+**jsx** is a json compiler with interleaved tokenizing, syntactic analysis and
+semantic analysis stages. included are two tokenizers; one that handles json
+texts (`decoder/3`) and one that handles erlang terms (`encoder/3`). there is
+also an entry point to the syntactic analysis stage for use with user-defined
+tokenizers (`parser/3`)
+
+all three functions return an anonymous function that takes the appropriate type
+of input and returns the result of performing semantic analysis, the tuple
+`{incomplete, F}` where `F` is a new anonymous function (see
+[incomplete input](#incomplete_input)) or a `badarg` error exception if
+syntactic analysis fails
+
+`Module` is the name of the callback module
+
+`Args` is any term that will be passed to `Module:init/1` prior to syntactic
+analysis to produce an initial state
+
+`Opts` are detailed [here](#option)
+
+check out [callback module documentation](#callback_exports) for details of
+the callback module interface
+
+#### `decode/1,2` ####
+
+```erlang
+decode(JSON) -> Term
+decode(JSON, Opts) -> Term
+
+ JSON = json_text()
+ Term = json_term()
+ Opts = [option() | labels | {labels, Label} | return_maps]
+ Label = binary | atom | existing_atom | attempt_atom
+ F = fun((any()) -> any())
+```
+
+`decode` parses a json text (a `utf8` encoded binary) and produces an erlang
+term
+
+the option `labels` controls how keys are converted from json to
+erlang terms. `binary` (the default behavior) does no conversion
+beyond normal escaping. `atom` converts keys to erlang atoms and
+results in a `badarg` error if the keys fall outside the range of erlang
+atoms. `existing_atom` is identical to `atom` except it will not add
+new atoms to the atom table and will result in a `badarg` error if the atom
+does not exist. `attempt_atom` will convert keys to atoms when they exist,
+and leave them as binary otherwise
+
+the option `return_maps` will attempt to return objects as maps instead of
+proplists. this option has no effect when used with releases that do not
+support maps
+
+raises a `badarg` error exception if input is not valid json
+
+
+#### `encode/1,2` ####
+
+```erlang
+encode(Term) -> JSON
+encode(Term, Opts) -> JSON
+
+ Term = json_term()
+ JSON = json_text()
+ Opts = [option() | space | {space, N} | indent | {indent, N}]
+ N = pos_integer()
+```
+
+`encode` converts an erlang term into json text (a `utf8` encoded binary)
+
+the option `{space, N}` inserts `N` spaces after every comma and colon in your
+json output. `space` is an alias for `{space, 1}`. the default is `{space, 0}`
+
+the option `{indent, N}` inserts a newline and `N` spaces for each level of
+indentation in your json output. note that this overrides spaces inserted after
+a comma. `indent` is an alias for `{indent, 1}`. the default is `{indent, 0}`
+
+raises a `badarg` error exception if input is not a valid
+[erlang representation of json](#json---erlang-mapping)
+
+
+#### `format/1,2` ####
+
+```erlang
+format(JSON) -> JSON
+format(JSON, Opts) -> JSON
+
+ JSON = json_text()
+ Opts = [option() | space | {space, N} | indent | {indent, N}]
+ N = pos_integer()
+```
+
+`format` parses a json text (a `utf8` encoded binary) and produces a new json
+text according to the format rules specified by `Opts`
+
+the option `{space, N}` inserts `N` spaces after every comma and colon in your
+json output. `space` is an alias for `{space, 1}`. the default is `{space, 0}`
+
+the option `{indent, N}` inserts a newline and `N` spaces for each level of
+indentation in your json output. note that this overrides spaces inserted after
+a comma. `indent` is an alias for `{indent, 1}`. the default is `{indent, 0}`
+
+raises a `badarg` error exception if input is not valid json
+
+
+#### `minify/1` ####
+
+```erlang
+minify(JSON) -> JSON
+
+ JSON = json_text()
+```
+
+`minify` parses a json text (a `utf8` encoded binary) and produces a new json
+text stripped of whitespace
+
+raises a `badarg` error exception if input is not valid json
+
+
+#### `prettify/1` ####
+
+```erlang
+prettify(JSON) -> JSON
+
+ JSON = json_text()
+```
+
+`prettify` parses a json text (a `utf8` encoded binary) and produces a new json
+text equivalent to `format(JSON, [{space, 1}, {indent, 2}])`
+
+raises a `badarg` error exception if input is not valid json
+
+
+#### `is_json/1,2` ####
+
+```erlang
+is_json(MaybeJSON) -> true | false
+is_json(MaybeJSON, Opts) -> true | false
+
+ MaybeJSON = any()
+ Opts = options()
+```
+
+returns true if input is a valid json text, false if not
+
+what exactly constitutes valid json may be [altered](#option)
+
+
+#### `is_term/1,2` ####
+
+```erlang
+is_term(MaybeJSON) -> true | false
+is_term(MaybeJSON, Opts) -> true | false
+
+ MaybeJSON = any()
+ Opts = options()
+```
+
+returns true if input is a valid erlang representation of json, false if not
+
+what exactly constitutes valid json may be altered via [options](#option)
+
+
+#### `maps_support/0` ####
+
+```erlang
+maps_support() -> true | false
+```
+
+if **jsx** was compiled with map support enabled returns `true`, else
+`false`
+
+
+## callback exports ##
+
+the following functions should be exported from a **jsx** callback module
+
+#### `Module:init/1` ####
+
+```erlang
+Module:init(Args) -> InitialState
+
+ Args = any()
+ InitialState = any()
+```
+
+whenever any of `encoder/3`, `decoder/3` or `parser/3` are called, this function
+is called with the `Args` argument provided in the calling function to obtain
+`InitialState`
+
+#### `Module:handle_event/2` ####
+
+```erlang
+Module:handle_event(Event, State) -> NewState
+
+ Event = [event()]
+ State = any()
+ NewState = any()
+```
+
+semantic analysis is performed by repeatedly calling `handle_event/2` with a
+stream of events emitted by the tokenizer and the current state. the new state
+returned is used as the input to the next call to `handle_event/2`. the
+following events must be handled:
+
+- `start_object`
+
+ the start of a json object
+
+- `end_object`
+
+ the end of a json object
+
+- `start_array`
+
+ the start of a json array
+
+- `end_array`
+
+ the end of a json array
+
+- `{string, binary()}`
+
+ a json string. it will usually be a `utf8` encoded binary. see the
+ [options](#option) for possible exceptions. note that keys are also
+ json strings
+
+- `{integer, integer()}`
+
+ an erlang integer (bignum)
+
+- `{float, float()}`
+
+ an erlang float
+
+- `{literal, true}`
+
+ the atom `true`
+
+- `{literal, false}`
+
+ the atom `false`
+
+- `{literal, null}`
+
+ the atom `null`
+
+- `end_json`
+
+ this event is emitted when syntactic analysis is completed. you should
+ do any cleanup and return the result of your semantic analysis
+
+
+## acknowledgements ##
+
+jsx wouldn't be what it is without the contributions of [Paul J. Davis](https://github.com/davisp), [Lloyd Hilaiel](https://github.com/lloyd), [John Engelhart](https://github.com/johnezang), [Bob Ippolito](https://github.com/etrepum), [Brujo Benavides](https://github.com/elbrujohalcon), [Alex Kropivny](https://github.com/amtal), [Steve Strong](https://github.com/srstrong), [Michael Truog](https://github.com/okeuday), [Devin Torres](https://github.com/devinus), [fogfish](https://github.com/fogfish), [emptytea](https://github.com/emptytea), [John Daily](https://github.com/macintux), [Ola Bäckström](https://github.com/olabackstrom), [Joseph Crowe](https://github.com/JosephCrowe), [Patrick Gombert](https://github.com/patrickgombert), [Eshengazin S. Kuat](https://github.com/eskuat), [Max Lapshin](https://github.com/maxlapshin), [Bikram Chatterjee](https://github.com/c-bik), [Michael Uvarov](https://github.com/arcusfelis), [Led](https://github.com/Ledest) and [tvv](https://github.com/tvv)
+
+[json]: http://json.org
+[yajl]: http://lloyd.github.com/yajl
+[MIT]: http://www.opensource.org/licenses/mit-license.html
+[rebar]: https://github.com/rebar/rebar
+[meck]: https://github.com/eproxus/meck
+[rfc4627]: http://tools.ietf.org/html/rfc4627
+[travis]: https://travis-ci.org/
+[jsxn]: https://github.com/talentdeficit/jsxn
+[iso8601]: http://www.iso.org/iso/iso8601
diff --git a/deps/jsx/mix.exs b/deps/jsx/mix.exs
new file mode 100644
index 0000000..7d4eaca
--- /dev/null
+++ b/deps/jsx/mix.exs
@@ -0,0 +1,37 @@
+defmodule JSX.Mixfile do
+use Mix.Project
+
+ def project do
+ [
+ app: :jsx,
+ version: "2.7.0",
+ description: "an erlang application for consuming, producing and manipulating json. inspired by yajl",
+ deps: deps(Mix.env),
+ package: package,
+ language: :erlang,
+ erlc_options: opts(Mix.env)
+ ]
+ end
+
+ defp opts(:dev), do: [d: :TEST] ++ opts(:prod)
+ defp opts(_), do: [d: :maps_support, d: :maps_always]
+
+ defp deps(_), do: [{:mixunit, "~> 0.9.2", only: :dev}]
+
+ defp package do
+ [
+ files: [
+ "CHANGES.md",
+ "LICENSE",
+ "mix.exs",
+ "rebar.config",
+ "README.md",
+ "src"
+ ],
+ contributors: ["alisdair sullivan"],
+ links: %{"github" => "https://github.com/talentdeficit/jsx"},
+ licenses: ["MIT"]
+ ]
+ end
+end
+
diff --git a/deps/jsx/mix.lock b/deps/jsx/mix.lock
new file mode 100644
index 0000000..ccf4987
--- /dev/null
+++ b/deps/jsx/mix.lock
@@ -0,0 +1 @@
+%{"mixunit": {:hex, :mixunit, "0.9.2"}}
diff --git a/deps/jsx/rebar.config b/deps/jsx/rebar.config
new file mode 100644
index 0000000..0dcef82
--- /dev/null
+++ b/deps/jsx/rebar.config
@@ -0,0 +1,5 @@
+{erl_opts, [
+ % uncomment to always decode to maps
+ % {d, maps_always},
+]}.
+{edoc_opts, [{preprocess, true}]}. \ No newline at end of file
diff --git a/deps/jsx/rebar.config.script b/deps/jsx/rebar.config.script
new file mode 100644
index 0000000..4510155
--- /dev/null
+++ b/deps/jsx/rebar.config.script
@@ -0,0 +1,11 @@
+Def0 = case erlang:is_builtin(erlang, binary_to_integer, 1) andalso
+ erlang:is_builtin(erlang, binary_to_float, 1) of
+ true -> [];
+ false -> [{d, no_binary_to_whatever}]
+ end,
+Defs = case erlang:is_builtin(erlang, is_map, 1) of
+ true -> [{d, maps_support}|Def0];
+ false -> Def0
+ end,
+lists:keystore(erl_opts, 1, CONFIG,
+ {erl_opts, proplists:get_value(erl_opts, CONFIG, []) ++ Defs}).
diff --git a/deps/jsx/rebar.lock b/deps/jsx/rebar.lock
new file mode 100644
index 0000000..57afcca
--- /dev/null
+++ b/deps/jsx/rebar.lock
@@ -0,0 +1 @@
+[].
diff --git a/deps/jsx/src/jsx.app.src b/deps/jsx/src/jsx.app.src
new file mode 100644
index 0000000..553a3d1
--- /dev/null
+++ b/deps/jsx/src/jsx.app.src
@@ -0,0 +1,21 @@
+{application, jsx,
+[
+ {description, "a streaming, evented json parsing toolkit"},
+ {vsn, "2.7.0"},
+ {modules, [
+ jsx,
+ jsx_encoder,
+ jsx_decoder,
+ jsx_parser,
+ jsx_to_json,
+ jsx_to_term,
+ jsx_config,
+ jsx_verify
+ ]},
+ {registered, []},
+ {applications, [
+ kernel,
+ stdlib
+ ]},
+ {env, []}
+]}.
diff --git a/deps/jsx/src/jsx.erl b/deps/jsx/src/jsx.erl
new file mode 100644
index 0000000..0b55b2e
--- /dev/null
+++ b/deps/jsx/src/jsx.erl
@@ -0,0 +1,527 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx).
+
+-export([encode/1, encode/2, decode/1, decode/2]).
+-export([is_json/1, is_json/2, is_term/1, is_term/2]).
+-export([format/1, format/2, minify/1, prettify/1]).
+-export([consult/1, consult/2]).
+-export([encoder/3, decoder/3, parser/3]).
+-export([resume/3]).
+-export([maps_support/0]).
+
+-export_type([json_term/0, json_text/0, token/0]).
+-export_type([encoder/0, decoder/0, parser/0, internal_state/0]).
+-export_type([config/0]).
+
+
+-ifdef(TEST).
+%% data and helper functions for tests
+-export([test_cases/0, special_test_cases/0]).
+-export([init/1, handle_event/2]).
+-endif.
+
+
+-ifndef(maps_support).
+-type json_term() :: [{binary() | atom(), json_term()}] | [{}]
+ | [json_term()] | []
+ | true | false | null
+ | integer() | float()
+ | binary() | atom()
+ | calendar:datetime().
+-endif.
+
+-ifdef(maps_support).
+-type json_term() :: [{binary() | atom(), json_term()}] | [{}]
+ | [json_term()] | []
+ | map()
+ | true | false | null
+ | integer() | float()
+ | binary() | atom()
+ | calendar:datetime().
+-endif.
+
+-type json_text() :: binary().
+
+-type config() :: jsx_config:config().
+
+-spec encode(Source::json_term()) -> json_text().
+
+encode(Source) -> encode(Source, []).
+
+-spec encode(Source::json_term(), Config::jsx_to_json:config()) -> json_text() | {incomplete, encoder()}.
+
+encode(Source, Config) -> jsx_to_json:to_json(Source, Config).
+
+
+-spec decode(Source::json_text()) -> json_term().
+
+decode(Source) -> decode(Source, []).
+
+-spec decode(Source::json_text(), Config::jsx_to_term:config()) -> json_term() | {incomplete, decoder()}.
+
+decode(Source, Config) -> jsx_to_term:to_term(Source, Config).
+
+
+-spec format(Source::json_text()) -> json_text().
+
+format(Source) -> format(Source, []).
+
+-spec format(Source::json_text(), Config::jsx_to_json:config()) -> json_text() | {incomplete, decoder()}.
+
+format(Source, Config) -> jsx_to_json:format(Source, Config).
+
+
+-spec minify(Source::json_text()) -> json_text().
+
+minify(Source) -> format(Source, []).
+
+
+-spec prettify(Source::json_text()) -> json_text().
+
+prettify(Source) -> format(Source, [space, {indent, 2}]).
+
+
+-spec is_json(Source::any()) -> boolean().
+
+is_json(Source) -> is_json(Source, []).
+
+-spec is_json(Source::any(), Config::jsx_verify:config()) -> boolean() | {incomplete, decoder()}.
+
+is_json(Source, Config) -> jsx_verify:is_json(Source, Config).
+
+
+-spec is_term(Source::any()) -> boolean().
+
+is_term(Source) -> is_term(Source, []).
+
+-spec is_term(Source::any(), Config::jsx_verify:config()) -> boolean() | {incomplete, encoder()}.
+
+is_term(Source, Config) -> jsx_verify:is_term(Source, Config).
+
+
+-spec consult(File::file:name_all()) -> list(json_term()).
+
+consult(File) -> consult(File, []).
+
+-spec consult(File::file:name_all(), Config::jsx_to_term:config()) -> list(json_term()).
+
+consult(File, Config) -> jsx_consult:consult(File, Config).
+
+
+-type decoder() :: fun((json_text() | end_stream | end_json) -> any()).
+
+-spec decoder(Handler::module(), State::any(), Config::list()) -> decoder().
+
+decoder(Handler, State, Config) -> jsx_decoder:decoder(Handler, State, Config).
+
+
+-type encoder() :: fun((json_term() | end_stream | end_json) -> any()).
+
+-spec encoder(Handler::module(), State::any(), Config::list()) -> encoder().
+
+encoder(Handler, State, Config) -> jsx_encoder:encoder(Handler, State, Config).
+
+
+-type token() :: [token()]
+ | start_object
+ | end_object
+ | start_array
+ | end_array
+ | {key, binary()}
+ | {string, binary()}
+ | binary()
+ | {number, integer() | float()}
+ | {integer, integer()}
+ | {float, float()}
+ | integer()
+ | float()
+ | {literal, true}
+ | {literal, false}
+ | {literal, null}
+ | true
+ | false
+ | null
+ | end_json.
+
+
+-type parser() :: fun((token() | end_stream) -> any()).
+
+-spec parser(Handler::module(), State::any(), Config::list()) -> parser().
+
+parser(Handler, State, Config) -> jsx_parser:parser(Handler, State, Config).
+
+-opaque internal_state() :: tuple().
+
+-spec resume(Term::json_text() | token(), InternalState::internal_state(), Config::list()) -> any().
+
+resume(Term, {decoder, State, Handler, Acc, Stack}, Config) ->
+ jsx_decoder:resume(Term, State, Handler, Acc, Stack, jsx_config:parse_config(Config));
+resume(Term, {parser, State, Handler, Stack}, Config) ->
+ jsx_parser:resume(Term, State, Handler, Stack, jsx_config:parse_config(Config)).
+
+
+-spec maps_support() -> boolean().
+
+-ifndef(maps_support).
+maps_support() -> false.
+-endif.
+-ifdef(maps_support).
+maps_support() -> true.
+-endif.
+
+
+-ifdef(TEST).
+
+-include_lib("eunit/include/eunit.hrl").
+
+
+%% test handler
+init([]) -> [].
+
+handle_event(end_json, State) -> lists:reverse([end_json] ++ State);
+handle_event(Event, State) -> [Event] ++ State.
+
+
+test_cases() ->
+ empty_array()
+ ++ nested_array()
+ ++ empty_object()
+ ++ nested_object()
+ ++ strings()
+ ++ literals()
+ ++ integers()
+ ++ floats()
+ ++ compound_object().
+
+%% segregate these so we can skip them in `jsx_to_term`
+special_test_cases() -> special_objects() ++ special_array().
+
+
+empty_array() -> [{"[]", <<"[]">>, [], [start_array, end_array]}].
+
+
+nested_array() ->
+ [{
+ "[[[]]]",
+ <<"[[[]]]">>,
+ [[[]]],
+ [start_array, start_array, start_array, end_array, end_array, end_array]
+ }].
+
+
+empty_object() -> [{"{}", <<"{}">>, [{}], [start_object, end_object]}].
+
+
+nested_object() ->
+ [{
+ "{\"key\":{\"key\":{}}}",
+ <<"{\"key\":{\"key\":{}}}">>,
+ [{<<"key">>, [{<<"key">>, [{}]}]}],
+ [
+ start_object,
+ {key, <<"key">>},
+ start_object,
+ {key, <<"key">>},
+ start_object,
+ end_object,
+ end_object,
+ end_object
+ ]
+ }].
+
+
+naked_strings() ->
+ Raw = [
+ "",
+ "hello world"
+ ],
+ [
+ {
+ String,
+ <<"\"", (list_to_binary(String))/binary, "\"">>,
+ list_to_binary(String),
+ [{string, list_to_binary(String)}]
+ }
+ || String <- Raw
+ ].
+
+
+strings() ->
+ naked_strings()
+ ++ [ wrap_with_array(Test) || Test <- naked_strings() ]
+ ++ [ wrap_with_object(Test) || Test <- naked_strings() ].
+
+
+naked_integers() ->
+ Raw = [
+ 1, 2, 3,
+ 127, 128, 129,
+ 255, 256, 257,
+ 65534, 65535, 65536,
+ 18446744073709551616,
+ 18446744073709551617
+ ],
+ [
+ {
+ integer_to_list(X),
+ list_to_binary(integer_to_list(X)),
+ X,
+ [{integer, X}]
+ }
+ || X <- Raw ++ [ -1 * Y || Y <- Raw ] ++ [0]
+ ].
+
+
+integers() ->
+ naked_integers()
+ ++ [ wrap_with_array(Test) || Test <- naked_integers() ]
+ ++ [ wrap_with_object(Test) || Test <- naked_integers() ].
+
+
+naked_floats() ->
+ Raw = [
+ 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9,
+ 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9,
+ 1234567890.0987654321,
+ 0.0e0,
+ 1234567890.0987654321e16,
+ 0.1e0, 0.1e1, 0.1e2, 0.1e4, 0.1e8, 0.1e16, 0.1e308,
+ 1.0e0, 1.0e1, 1.0e2, 1.0e4, 1.0e8, 1.0e16, 1.0e308,
+ 2.2250738585072014e-308, %% min normalized float
+ 1.7976931348623157e308, %% max normalized float
+ 5.0e-324, %% min denormalized float
+ 2.225073858507201e-308 %% max denormalized float
+ ],
+ [
+ {
+ sane_float_to_list(X),
+ list_to_binary(sane_float_to_list(X)),
+ X,
+ [{float, X}]
+ }
+ || X <- Raw ++ [ -1 * Y || Y <- Raw ]
+ ].
+
+
+floats() ->
+ naked_floats()
+ ++ [ wrap_with_array(Test) || Test <- naked_floats() ]
+ ++ [ wrap_with_object(Test) || Test <- naked_floats() ].
+
+
+naked_literals() ->
+ [
+ {
+ atom_to_list(Literal),
+ atom_to_binary(Literal, unicode),
+ Literal,
+ [{literal, Literal}]
+ }
+ || Literal <- [true, false, null]
+ ].
+
+
+literals() ->
+ naked_literals()
+ ++ [ wrap_with_array(Test) || Test <- naked_literals() ]
+ ++ [ wrap_with_object(Test) || Test <- naked_literals() ].
+
+
+compound_object() ->
+ [{
+ "[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]",
+ <<"[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]">>,
+ [[{<<"alpha">>, [1, 2, 3]}, {<<"beta">>, [{<<"alpha">>, [1.0, 2.0, 3.0]}, {<<"beta">>, [true, false]}]}], [[{}]]],
+ [
+ start_array,
+ start_object,
+ {key, <<"alpha">>},
+ start_array,
+ {integer, 1},
+ {integer, 2},
+ {integer, 3},
+ end_array,
+ {key, <<"beta">>},
+ start_object,
+ {key, <<"alpha">>},
+ start_array,
+ {float, 1.0},
+ {float, 2.0},
+ {float, 3.0},
+ end_array,
+ {key, <<"beta">>},
+ start_array,
+ {literal, true},
+ {literal, false},
+ end_array,
+ end_object,
+ end_object,
+ start_array,
+ start_object,
+ end_object,
+ end_array,
+ end_array
+ ]
+ }].
+
+
+special_objects() ->
+ [
+ {
+ "[{key, atom}]",
+ <<"{\"key\":\"atom\"}">>,
+ [{key, atom}],
+ [start_object, {key, <<"key">>}, {string, <<"atom">>}, end_object]
+ },
+ {
+ "[{1, true}]",
+ <<"{\"1\":true}">>,
+ [{1, true}],
+ [start_object, {key, <<"1">>}, {literal, true}, end_object]
+ }
+ ].
+
+
+special_array() ->
+ [
+ {
+ "[foo, bar]",
+ <<"[\"foo\",\"bar\"]">>,
+ [foo, bar],
+ [start_array, {string, <<"foo">>}, {string, <<"bar">>}, end_array]
+ }
+ ].
+
+
+wrap_with_array({Title, JSON, Term, Events}) ->
+ {
+ "[" ++ Title ++ "]",
+ <<"[", JSON/binary, "]">>,
+ [Term],
+ [start_array] ++ Events ++ [end_array]
+ }.
+
+
+wrap_with_object({Title, JSON, Term, Events}) ->
+ {
+ "{\"key\":" ++ Title ++ "}",
+ <<"{\"key\":", JSON/binary, "}">>,
+ [{<<"key">>, Term}],
+ [start_object, {key, <<"key">>}] ++ Events ++ [end_object]
+ }.
+
+
+sane_float_to_list(X) ->
+ [Output] = io_lib:format("~p", [X]),
+ Output.
+
+
+incremental_decode(JSON) ->
+ Final = lists:foldl(
+ fun(Byte, Decoder) -> {incomplete, F} = Decoder(Byte), F end,
+ decoder(jsx, [], [stream]),
+ json_to_bytes(JSON)
+ ),
+ Final(end_stream).
+
+
+incremental_parse(Events) ->
+ Final = lists:foldl(
+ fun(Event, Parser) -> {incomplete, F} = Parser(Event), F end,
+ parser(?MODULE, [], [stream]),
+ lists:map(fun(X) -> [X] end, Events)
+ ),
+ Final(end_stream).
+
+
+%% used to convert a json text into a list of codepoints to be incrementally
+%% parsed
+json_to_bytes(JSON) -> json_to_bytes(JSON, []).
+
+json_to_bytes(<<>>, Acc) -> [<<>>] ++ lists:reverse(Acc);
+json_to_bytes(<<X, Rest/binary>>, Acc) -> json_to_bytes(Rest, [<<X>>] ++ Acc).
+
+
+%% actual tests!
+decode_test_() ->
+ Data = test_cases(),
+ [{Title, ?_assertEqual(Events ++ [end_json], (decoder(?MODULE, [], []))(JSON))}
+ || {Title, JSON, _, Events} <- Data
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(Events ++ [end_json], incremental_decode(JSON))}
+ || {Title, JSON, _, Events} <- Data
+ ].
+
+
+parse_test_() ->
+ Data = test_cases(),
+ [{Title, ?_assertEqual(Events ++ [end_json], (parser(?MODULE, [], []))(Events ++ [end_json]))}
+ || {Title, _, _, Events} <- Data
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(Events ++ [end_json], incremental_parse(Events))}
+ || {Title, _, _, Events} <- Data
+ ].
+
+
+encode_test_() ->
+ Data = test_cases(),
+ [
+ {
+ Title, ?_assertEqual(
+ Events ++ [end_json],
+ (jsx:encoder(jsx, [], []))(Term)
+ )
+ } || {Title, _, Term, Events} <- Data
+ ].
+
+end_stream_test_() ->
+ Tokens = [start_object, end_object, end_json],
+ [
+ {"encoder end_stream", ?_assertEqual(
+ Tokens,
+ begin
+ {incomplete, F} = (jsx:parser(jsx, [], [stream]))([start_object, end_object]),
+ F(end_stream)
+ end
+ )},
+ {"encoder end_json", ?_assertEqual(
+ Tokens,
+ begin
+ {incomplete, F} = (jsx:parser(jsx, [], [stream]))([start_object, end_object]),
+ F(end_json)
+ end
+ )},
+ {"decoder end_stream", ?_assertEqual(
+ Tokens,
+ begin {incomplete, F} = (jsx:decoder(jsx, [], [stream]))(<<"{}">>), F(end_stream) end
+ )},
+ {"decoder end_json", ?_assertEqual(
+ Tokens,
+ begin {incomplete, F} = (jsx:decoder(jsx, [], [stream]))(<<"{}">>), F(end_json) end
+ )}
+ ].
+
+
+-endif.
diff --git a/deps/jsx/src/jsx_config.erl b/deps/jsx/src/jsx_config.erl
new file mode 100644
index 0000000..92a958b
--- /dev/null
+++ b/deps/jsx/src/jsx_config.erl
@@ -0,0 +1,337 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_config).
+
+-export([parse_config/1]).
+-export([config_to_list/1]).
+-export([extract_config/1, valid_flags/0]).
+
+-ifdef(TEST).
+-export([fake_error_handler/3]).
+-endif.
+
+-include("jsx_config.hrl").
+
+-type handler_type(Handler) ::
+ fun((jsx:json_text() | end_stream |
+ jsx:json_term(),
+ {decoder, any(), module(), null | list(), list()} |
+ {parser, any(), module(), list()} |
+ {encoder, any(), module()},
+ list({pre_encode, fun((any()) -> any())} |
+ {error_handler, Handler} |
+ {incomplete_handler, Handler} |
+ atom())) -> any()).
+-type handler() :: handler_type(handler()).
+-export_type([handler/0]).
+
+-type config() :: #config{}.
+-export_type([config/0]).
+
+%% parsing of jsx config
+-spec parse_config(Config::proplists:proplist()) -> config().
+
+parse_config(Config) -> parse_config(Config, #config{}).
+
+parse_config([], Config) -> Config;
+parse_config([escaped_forward_slashes|Rest], Config) ->
+ parse_config(Rest, Config#config{escaped_forward_slashes=true});
+parse_config([escaped_strings|Rest], Config) ->
+ parse_config(Rest, Config#config{escaped_strings=true});
+parse_config([unescaped_jsonp|Rest], Config) ->
+ parse_config(Rest, Config#config{unescaped_jsonp=true});
+parse_config([dirty_strings|Rest], Config) ->
+ parse_config(Rest, Config#config{dirty_strings=true});
+parse_config([multi_term|Rest], Config) ->
+ parse_config(Rest, Config#config{multi_term=true});
+parse_config([return_tail|Rest], Config) ->
+ parse_config(Rest, Config#config{return_tail=true});
+%% retained for backwards compat, now does nothing however
+parse_config([repeat_keys|Rest], Config) ->
+ parse_config(Rest, Config);
+parse_config([uescape|Rest], Config) ->
+ parse_config(Rest, Config#config{uescape=true});
+parse_config([strict|Rest], Config) ->
+ parse_config(Rest, Config#config{
+ strict_comments=true,
+ strict_commas=true,
+ strict_utf8=true,
+ strict_single_quotes=true,
+ strict_escapes=true
+ });
+parse_config([{strict, Strict}|Rest], Config) ->
+ parse_strict(Strict, Rest, Config);
+parse_config([stream|Rest], Config) ->
+ parse_config(Rest, Config#config{stream=true});
+parse_config([{error_handler, ErrorHandler}|Rest] = Options, Config) when is_function(ErrorHandler, 3) ->
+ case Config#config.error_handler of
+ false -> parse_config(Rest, Config#config{error_handler=ErrorHandler})
+ ; _ -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([{incomplete_handler, IncompleteHandler}|Rest] = Options, Config) when is_function(IncompleteHandler, 3) ->
+ case Config#config.incomplete_handler of
+ false -> parse_config(Rest, Config#config{incomplete_handler=IncompleteHandler})
+ ; _ -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config(_Options, _Config) -> erlang:error(badarg).
+
+
+parse_strict([], Rest, Config) -> parse_config(Rest, Config);
+parse_strict([comments|Strict], Rest, Config) ->
+ parse_strict(Strict, Rest, Config#config{strict_comments=true});
+parse_strict([trailing_commas|Strict], Rest, Config) ->
+ parse_strict(Strict, Rest, Config#config{strict_commas=true});
+parse_strict([utf8|Strict], Rest, Config) ->
+ parse_strict(Strict, Rest, Config#config{strict_utf8=true});
+parse_strict([single_quotes|Strict], Rest, Config) ->
+ parse_strict(Strict, Rest, Config#config{strict_single_quotes=true});
+parse_strict([escapes|Strict], Rest, Config) ->
+ parse_strict(Strict, Rest, Config#config{strict_escapes=true});
+parse_strict(_Strict, _Rest, _Config) ->
+ erlang:error(badarg).
+
+
+
+-spec config_to_list(Config::config()) -> proplists:proplist().
+
+config_to_list(Config) ->
+ reduce_config(lists:map(
+ fun ({error_handler, F}) -> {error_handler, F};
+ ({incomplete_handler, F}) -> {incomplete_handler, F};
+ ({Key, true}) -> Key
+ end,
+ lists:filter(
+ fun({_, false}) -> false; (_) -> true end,
+ lists:zip(record_info(fields, config), tl(tuple_to_list(Config)))
+ )
+ )).
+
+
+reduce_config(Input) -> reduce_config(Input, [], []).
+
+reduce_config([], Output, Strict) ->
+ case length(Strict) of
+ 0 -> lists:reverse(Output);
+ 4 -> lists:reverse(Output) ++ [strict];
+ _ -> lists:reverse(Output) ++ [{strict, lists:reverse(Strict)}]
+ end;
+reduce_config([strict_comments|Input], Output, Strict) ->
+ reduce_config(Input, Output, [comments] ++ Strict);
+reduce_config([strict_utf8|Input], Output, Strict) ->
+ reduce_config(Input, Output, [utf8] ++ Strict);
+reduce_config([strict_single_quotes|Input], Output, Strict) ->
+ reduce_config(Input, Output, [single_quotes] ++ Strict);
+reduce_config([strict_escapes|Input], Output, Strict) ->
+ reduce_config(Input, Output, [escapes] ++ Strict);
+reduce_config([Else|Input], Output, Strict) ->
+ reduce_config(Input, [Else] ++ Output, Strict).
+
+
+-spec valid_flags() -> [atom()].
+
+valid_flags() ->
+ [
+ escaped_forward_slashes,
+ escaped_strings,
+ unescaped_jsonp,
+ dirty_strings,
+ multi_term,
+ return_tail,
+ repeat_keys,
+ strict,
+ stream,
+ uescape,
+ error_handler,
+ incomplete_handler
+ ].
+
+
+-spec extract_config(Config::proplists:proplist()) -> proplists:proplist().
+
+extract_config(Config) ->
+ extract_parser_config(Config, []).
+
+extract_parser_config([], Acc) -> Acc;
+extract_parser_config([{K,V}|Rest], Acc) ->
+ case lists:member(K, valid_flags()) of
+ true -> extract_parser_config(Rest, [{K,V}] ++ Acc)
+ ; false -> extract_parser_config(Rest, Acc)
+ end;
+extract_parser_config([K|Rest], Acc) ->
+ case lists:member(K, valid_flags()) of
+ true -> extract_parser_config(Rest, [K] ++ Acc)
+ ; false -> extract_parser_config(Rest, Acc)
+ end.
+
+
+%% eunit tests
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+config_test_() ->
+ [
+ {"all flags",
+ ?_assertEqual(
+ #config{escaped_forward_slashes = true,
+ escaped_strings = true,
+ unescaped_jsonp = true,
+ dirty_strings = true,
+ multi_term = true,
+ return_tail = true,
+ strict_comments = true,
+ strict_commas = true,
+ strict_utf8 = true,
+ strict_single_quotes = true,
+ strict_escapes = true,
+ stream = true,
+ uescape = true
+ },
+ parse_config([dirty_strings,
+ escaped_forward_slashes,
+ escaped_strings,
+ unescaped_jsonp,
+ multi_term,
+ return_tail,
+ repeat_keys,
+ strict,
+ stream,
+ uescape
+ ])
+ )
+ },
+ {"strict flag",
+ ?_assertEqual(
+ #config{strict_comments = true,
+ strict_commas = true,
+ strict_utf8 = true,
+ strict_single_quotes = true,
+ strict_escapes = true
+ },
+ parse_config([strict])
+ )
+ },
+ {"strict selective",
+ ?_assertEqual(
+ #config{strict_comments = true},
+ parse_config([{strict, [comments]}])
+ )
+ },
+ {"strict expanded",
+ ?_assertEqual(
+ #config{strict_comments = true,
+ strict_utf8 = true,
+ strict_single_quotes = true,
+ strict_escapes = true
+ },
+ parse_config([{strict, [comments, utf8, single_quotes, escapes]}])
+ )
+ },
+ {"error_handler flag", ?_assertEqual(
+ #config{error_handler=fun ?MODULE:fake_error_handler/3},
+ parse_config([{error_handler, fun ?MODULE:fake_error_handler/3}])
+ )},
+ {"two error_handlers defined", ?_assertError(
+ badarg,
+ parse_config([
+ {error_handler, fun(_, _, _) -> true end},
+ {error_handler, fun(_, _, _) -> false end}
+ ])
+ )},
+ {"incomplete_handler flag", ?_assertEqual(
+ #config{incomplete_handler=fun ?MODULE:fake_error_handler/3},
+ parse_config([{incomplete_handler, fun ?MODULE:fake_error_handler/3}])
+ )},
+ {"two incomplete_handlers defined", ?_assertError(
+ badarg,
+ parse_config([
+ {incomplete_handler, fun(_, _, _) -> true end},
+ {incomplete_handler, fun(_, _, _) -> false end}
+ ])
+ )},
+ {"bad option flag", ?_assertError(badarg, parse_config([this_flag_does_not_exist]))}
+ ].
+
+
+config_to_list_test_() ->
+ [
+ {"empty config", ?_assertEqual(
+ [],
+ config_to_list(#config{})
+ )},
+ {"all flags", ?_assertEqual(
+ [dirty_strings,
+ escaped_forward_slashes,
+ escaped_strings,
+ multi_term,
+ stream,
+ uescape,
+ unescaped_jsonp,
+ strict
+ ],
+ config_to_list(
+ #config{escaped_forward_slashes = true,
+ escaped_strings = true,
+ unescaped_jsonp = true,
+ dirty_strings = true,
+ multi_term = true,
+ strict_comments = true,
+ strict_utf8 = true,
+ strict_single_quotes = true,
+ strict_escapes = true,
+ stream = true,
+ uescape = true
+ }
+ )
+ )},
+ {"single strict", ?_assertEqual(
+ [{strict, [comments]}],
+ config_to_list(#config{strict_comments = true})
+ )},
+ {"multiple strict", ?_assertEqual(
+ [{strict, [utf8, single_quotes, escapes]}],
+ config_to_list(#config{strict_utf8 = true, strict_single_quotes = true, strict_escapes = true})
+ )},
+ {"all strict", ?_assertEqual(
+ [strict],
+ config_to_list(#config{strict_comments = true,
+ strict_utf8 = true,
+ strict_single_quotes = true,
+ strict_escapes = true})
+ )},
+ {"error handler", ?_assertEqual(
+ [{error_handler, fun ?MODULE:fake_error_handler/3}],
+ config_to_list(#config{error_handler=fun ?MODULE:fake_error_handler/3})
+ )},
+ {"incomplete handler", ?_assertEqual(
+ [{incomplete_handler, fun ?MODULE:fake_error_handler/3}],
+ config_to_list(#config{incomplete_handler=fun ?MODULE:fake_error_handler/3})
+ )}
+ ].
+
+
+fake_error_handler(_, _, _) -> ok.
+
+
+-endif.
diff --git a/deps/jsx/src/jsx_config.hrl b/deps/jsx/src/jsx_config.hrl
new file mode 100644
index 0000000..be619c0
--- /dev/null
+++ b/deps/jsx/src/jsx_config.hrl
@@ -0,0 +1,17 @@
+-record(config, {
+ dirty_strings = false :: boolean(),
+ escaped_forward_slashes = false :: boolean(),
+ escaped_strings = false :: boolean(),
+ multi_term = false :: boolean(),
+ strict_comments = false :: boolean(),
+ strict_commas = false :: boolean(),
+ strict_utf8 = false :: boolean(),
+ strict_single_quotes = false :: boolean(),
+ strict_escapes = false :: boolean(),
+ stream = false :: boolean(),
+ return_tail = false :: boolean(),
+ uescape = false :: boolean(),
+ unescaped_jsonp = false :: boolean(),
+ error_handler = false :: false | jsx_config:handler(),
+ incomplete_handler = false :: false | jsx_config:handler()
+}).
diff --git a/deps/jsx/src/jsx_consult.erl b/deps/jsx/src/jsx_consult.erl
new file mode 100644
index 0000000..2b0f1da
--- /dev/null
+++ b/deps/jsx/src/jsx_consult.erl
@@ -0,0 +1,99 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2015 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_consult).
+
+-export([consult/2]).
+-export([init/1, reset/1, handle_event/2]).
+
+
+-record(config, {
+ labels = binary,
+ return_maps = false
+}).
+
+-type config() :: list().
+-export_type([config/0]).
+
+-ifndef(maps_support).
+-type json_value() :: list(json_value())
+ | list({binary() | atom(), json_value()})
+ | true
+ | false
+ | null
+ | integer()
+ | float()
+ | binary().
+-endif.
+
+-ifdef(maps_support).
+-type json_value() :: list(json_value())
+ | map()
+ | true
+ | false
+ | null
+ | integer()
+ | float()
+ | binary().
+-endif.
+
+
+-ifdef(maps_always).
+opts(Opts) -> [return_maps, multi_term] ++ Opts.
+-endif.
+-ifndef(maps_always).
+opts(Opts) -> [multi_term] ++ Opts.
+-endif.
+
+-spec consult(File::file:name_all(), Config::config()) -> [json_value()].
+
+consult(File, Config) when is_list(Config) ->
+ case file:read_file(File) of
+ {ok, Bin} ->
+ {Final, _, _} = (jsx:decoder(
+ ?MODULE,
+ opts(Config),
+ jsx_config:extract_config(opts(Config))
+ ))(Bin),
+ lists:reverse(Final);
+ {error, _} -> erlang:error(badarg)
+ end.
+
+
+-type state() :: {list(), #config{}}.
+-spec init(Config::proplists:proplist()) -> state().
+
+init(Config) -> {[], Config, jsx_to_term:start_term(Config)}.
+
+
+-spec reset(State::state()) -> state().
+
+reset({Acc, Config, _}) -> {Acc, Config, jsx_to_term:start_term(Config)}.
+
+
+-spec handle_event(Event::any(), State::state()) -> state().
+
+handle_event(end_json, {Acc, Config, State}) ->
+ {[jsx_to_term:get_value(State)] ++ Acc, Config, State};
+handle_event(Event, {Acc, Config, State}) ->
+ {Acc, Config, jsx_to_term:handle_event(Event, State)}.
diff --git a/deps/jsx/src/jsx_decoder.erl b/deps/jsx/src/jsx_decoder.erl
new file mode 100644
index 0000000..4600956
--- /dev/null
+++ b/deps/jsx/src/jsx_decoder.erl
@@ -0,0 +1,1976 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_decoder).
+
+%% inline handle_event, format_number and maybe_replace
+-compile({inline, [handle_event/3]}).
+-compile({inline, [format_number/1]}).
+-compile({inline, [maybe_replace/2]}).
+-compile({inline, [doublequote/5, singlequote/5]}).
+
+-export([decoder/3, resume/6]).
+
+
+-spec decoder(Handler::module(), State::any(), Config::list()) -> jsx:decoder().
+
+decoder(Handler, State, Config) ->
+ fun(JSON) -> start(JSON, {Handler, Handler:init(State)}, [], jsx_config:parse_config(Config)) end.
+
+
+%% resume allows continuation from interrupted decoding without having to explicitly export
+%% all states
+-spec resume(
+ Rest::binary(),
+ State::atom(),
+ Handler::{atom(), any()},
+ Acc::any(),
+ Stack::list(atom()),
+ Config::jsx:config()
+ ) -> jsx:decoder() | {incomplete, jsx:decoder()}.
+
+resume(Rest, State, Handler, Acc, Stack, Config) ->
+ case State of
+ start -> start(Rest, Handler, Stack, Config);
+ value -> value(Rest, Handler, Stack, Config);
+ object -> object(Rest, Handler, Stack, Config);
+ array -> array(Rest, Handler, Stack, Config);
+ colon -> colon(Rest, Handler, Stack, Config);
+ key -> key(Rest, Handler, Stack, Config);
+ string -> string(Rest, Handler, Acc, Stack, Config);
+ number -> number(Rest, Handler, Acc, Stack, Config);
+ true -> true(Rest, Handler, Stack, Config);
+ false -> false(Rest, Handler, Stack, Config);
+ null -> null(Rest, Handler, Stack, Config);
+ comment -> comment(Rest, Handler, Acc, Stack, Config);
+ maybe_done -> maybe_done(Rest, Handler, Stack, Config);
+ done -> done(Rest, Handler, Stack, Config)
+ end.
+
+
+-include("jsx_config.hrl").
+
+
+%% whitespace
+-define(space, 16#20).
+-define(tab, 16#09).
+-define(cr, 16#0D).
+-define(newline, 16#0A).
+
+%% object delimiters
+-define(start_object, 16#7B).
+-define(end_object, 16#7D).
+
+%% array delimiters
+-define(start_array, 16#5B).
+-define(end_array, 16#5D).
+
+%% kv seperator
+-define(comma, 16#2C).
+-define(doublequote, 16#22).
+-define(singlequote, 16#27).
+-define(colon, 16#3A).
+
+%% string escape sequences
+-define(rsolidus, 16#5C).
+-define(solidus, 16#2F).
+
+%% math
+-define(zero, 16#30).
+-define(decimalpoint, 16#2E).
+-define(negative, 16#2D).
+-define(positive, 16#2B).
+
+%% comments
+-define(star, 16#2A).
+
+
+%% some useful guards
+-define(is_hex(Symbol),
+ (Symbol >= $a andalso Symbol =< $f) orelse
+ (Symbol >= $A andalso Symbol =< $F) orelse
+ (Symbol >= $0 andalso Symbol =< $9)
+).
+
+-define(is_nonzero(Symbol),
+ Symbol >= $1 andalso Symbol =< $9
+).
+
+
+%% error is a macro so the stack trace shows the error site when possible
+-ifndef(error).
+-define(error(State, Bin, Handler, Acc, Stack, Config),
+ case Config#config.error_handler of
+ false -> erlang:error(badarg);
+ F -> F(Bin, {decoder, State, Handler, Acc, Stack}, jsx_config:config_to_list(Config))
+ end
+).
+-define(error(State, Bin, Handler, Stack, Config),
+ ?error(State, Bin, Handler, null, Stack, Config)
+).
+-endif.
+
+
+incomplete(State, Rest, Handler, Stack, Config = #config{stream=false}) ->
+ ?error(State, Rest, Handler, Stack, Config);
+incomplete(State, Rest, Handler, Stack, Config) ->
+ incomplete(State, Rest, Handler, unused, Stack, Config).
+
+
+incomplete(State, Rest, Handler, Acc, Stack, Config = #config{stream=false}) ->
+ ?error(State, Rest, Handler, Acc, Stack, Config);
+incomplete(State, Rest, Handler, Acc, Stack, Config = #config{incomplete_handler=false}) ->
+ {incomplete, fun(Stream) when is_binary(Stream) ->
+ resume(<<Rest/binary, Stream/binary>>, State, Handler, Acc, Stack, Config);
+ (End) when End == end_stream; End == end_json ->
+ case resume(<<Rest/binary, ?space/utf8>>, State, Handler, Acc, Stack, Config#config{stream=false}) of
+ {incomplete, _} -> ?error(State, Rest, Handler, Acc, Stack, Config);
+ Else -> Else
+ end
+ end
+ };
+incomplete(State, Rest, Handler, Acc, Stack, Config = #config{incomplete_handler=F}) ->
+ F(Rest, {decoder, State, Handler, Acc, Stack}, jsx_config:config_to_list(Config)).
+
+
+handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}.
+
+
+start(<<16#ef, 16#bb, 16#bf, Rest/binary>>, Handler, Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+start(<<16#ef, 16#bb>>, Handler, Stack, Config) ->
+ incomplete(start, <<16#ef, 16#bb>>, Handler, Stack, Config);
+start(<<16#ef>>, Handler, Stack, Config) ->
+ incomplete(start, <<16#ef>>, Handler, Stack, Config);
+start(<<>>, Handler, Stack, Config) ->
+ incomplete(start, <<>>, Handler, Stack, Config);
+start(Bin, Handler, Stack, Config) ->
+ value(Bin, Handler, Stack, Config).
+
+
+value(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
+ string(Rest, Handler, Stack, Config);
+value(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+value(<<?start_object, Rest/binary>>, Handler, Stack, Config) ->
+ object(Rest, handle_event(start_object, Handler, Config), [key|Stack], Config);
+value(<<?start_array, Rest/binary>>, Handler, Stack, Config) ->
+ array(Rest, handle_event(start_array, Handler, Config), [array|Stack], Config);
+value(<<$t, $r, $u, $e, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, true}, Handler, Config), Stack, Config);
+value(<<$f, $a, $l, $s, $e, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, false}, Handler, Config), Stack, Config);
+value(<<$n, $u, $l, $l, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, null}, Handler, Config), Stack, Config);
+value(<<?zero, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [?zero], [zero|Stack], Config);
+value(<<$1, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$1], [integer|Stack], Config);
+value(<<$2, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$2], [integer|Stack], Config);
+value(<<$3, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$3], [integer|Stack], Config);
+value(<<$4, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$4], [integer|Stack], Config);
+value(<<$5, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$5], [integer|Stack], Config);
+value(<<$6, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$6], [integer|Stack], Config);
+value(<<$7, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$7], [integer|Stack], Config);
+value(<<$8, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$8], [integer|Stack], Config);
+value(<<$9, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$9], [integer|Stack], Config);
+value(<<?negative, Rest/binary>>, Handler, Stack, Config) ->
+ number(Rest, Handler, [$-], [negative|Stack], Config);
+value(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+value(<<$t, Rest/binary>>, Handler, Stack, Config) ->
+ true(Rest, Handler, Stack, Config);
+value(<<$f, Rest/binary>>, Handler, Stack, Config) ->
+ false(Rest, Handler, Stack, Config);
+value(<<$n, Rest/binary>>, Handler, Stack, Config) ->
+ null(Rest, Handler, Stack, Config);
+value(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+value(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+value(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) ->
+ string(Rest, Handler, [singlequote|Stack], Config);
+value(<<?end_array, _/binary>> = Rest, Handler, Stack, Config=#config{strict_commas=false}) ->
+ maybe_done(Rest, Handler, Stack, Config);
+value(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(value, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+value(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, value, [comment|Stack], Config);
+value(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, value, [multicomment|Stack], Config);
+value(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(value, <<?solidus>>, Handler, Stack, Config);
+value(<<>>, Handler, Stack, Config) ->
+ incomplete(value, <<>>, Handler, Stack, Config);
+value(Bin, Handler, Stack, Config) ->
+ ?error(value, Bin, Handler, Stack, Config).
+
+
+object(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
+ string(Rest, Handler, Stack, Config);
+object(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ object(Rest, Handler, Stack, Config);
+object(<<?end_object, Rest/binary>>, Handler, [key|Stack], Config) ->
+ maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config);
+object(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ object(Rest, Handler, Stack, Config);
+object(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ object(Rest, Handler, Stack, Config);
+object(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ object(Rest, Handler, Stack, Config);
+object(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) ->
+ string(Rest, Handler, [singlequote|Stack], Config);
+object(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(object, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+object(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, object, [comment|Stack], Config);
+object(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, object, [multicomment|Stack], Config);
+object(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(object, <<?solidus>>, Handler, Stack, Config);
+object(<<>>, Handler, Stack, Config) ->
+ incomplete(object, <<>>, Handler, Stack, Config);
+object(Bin, Handler, Stack, Config) ->
+ ?error(object, Bin, Handler, Stack, Config).
+
+
+array(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) ->
+ maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config);
+array(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ array(Rest, Handler, Stack, Config);
+array(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ array(Rest, Handler, Stack, Config);
+array(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ array(Rest, Handler, Stack, Config);
+array(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ array(Rest, Handler, Stack, Config);
+array(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ value(<<?solidus, Rest/binary>>, Handler, Stack, Config);
+array(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, array, [comment|Stack], Config);
+array(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, array, [multicomment|Stack], Config);
+array(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(array, <<?solidus>>, Handler, Stack, Config);
+array(<<>>, Handler, Stack, Config) ->
+ incomplete(array, <<>>, Handler, Stack, Config);
+array(Bin, Handler, Stack, Config) ->
+ value(Bin, Handler, Stack, Config).
+
+
+colon(<<?colon, Rest/binary>>, Handler, [key|Stack], Config) ->
+ value(Rest, Handler, [object|Stack], Config);
+colon(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ colon(Rest, Handler, Stack, Config);
+colon(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ colon(Rest, Handler, Stack, Config);
+colon(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ colon(Rest, Handler, Stack, Config);
+colon(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ colon(Rest, Handler, Stack, Config);
+colon(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(colon, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+colon(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, colon, [comment|Stack], Config);
+colon(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, colon, [multicomment|Stack], Config);
+colon(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(colon, <<?solidus>>, Handler, Stack, Config);
+colon(<<>>, Handler, Stack, Config) ->
+ incomplete(colon, <<>>, Handler, Stack, Config);
+colon(Bin, Handler, Stack, Config) ->
+ ?error(colon, Bin, Handler, Stack, Config).
+
+
+key(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
+ string(Rest, Handler, Stack, Config);
+key(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ key(Rest, Handler, Stack, Config);
+key(<<?end_object, Rest/binary>>, Handler, [key|Stack], Config=#config{strict_commas=false}) ->
+ maybe_done(<<?end_object, Rest/binary>>, Handler, [object|Stack], Config);
+key(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ key(Rest, Handler, Stack, Config);
+key(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ key(Rest, Handler, Stack, Config);
+key(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ key(Rest, Handler, Stack, Config);
+key(<<?singlequote, Rest/binary>>, Handler, Stack, Config=#config{strict_single_quotes=false}) ->
+ string(Rest, Handler, [singlequote|Stack], Config);
+key(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(key, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+key(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, key, [comment|Stack], Config);
+key(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, key, [multicomment|Stack], Config);
+key(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(key, <<?solidus>>, Handler, Stack, Config);
+key(<<>>, Handler, Stack, Config) ->
+ incomplete(key, <<>>, Handler, Stack, Config);
+key(Bin, Handler, Stack, Config) ->
+ ?error(key, Bin, Handler, Stack, Config).
+
+
+%% note that if you encounter an error from string and you can't find the clause that
+%% caused it here, it might be in unescape below
+string(Bin, Handler, Stack, Config) ->
+ string(Bin, Handler, [], Stack, Config).
+
+
+string(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ doublequote(Rest, Handler, Acc, Stack, Config);
+string(<<?singlequote, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ singlequote(Rest, Handler, Acc, Stack, Config);
+string(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace(?solidus, Config)], Stack, Config);
+string(<<?rsolidus/utf8, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ unescape(Rest, Handler, Acc, Stack, Config);
+%% TODO this is pretty gross and i don't like it
+string(<<X/utf8, Rest/binary>> = Bin, Handler, Acc, Stack, Config=#config{uescape=true}) ->
+ case X of
+ X when X < 16#80 -> count(Bin, Handler, Acc, Stack, Config);
+ X -> string(Rest, Handler, [Acc, json_escape_sequence(X)], Stack, Config)
+ end;
+%% u+2028
+string(<<226, 128, 168, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace(16#2028, Config)], Stack, Config);
+%% u+2029
+string(<<226, 128, 169, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace(16#2029, Config)], Stack, Config);
+string(<<_/utf8, _/binary>> = Bin, Handler, Acc, Stack, Config) ->
+ count(Bin, Handler, Acc, Stack, Config);
+%% necessary for bytes that are badly formed utf8 that won't match in `count`
+string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) ->
+ string(Rest, Handler, [Acc, X], Stack, Config);
+%% u+fffe and u+ffff for R14BXX (subsequent runtimes will happily match with /utf8
+string(<<239, 191, 190, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, <<16#fffe/utf8>>], Stack, Config);
+string(<<239, 191, 191, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, <<16#ffff/utf8>>], Stack, Config);
+string(<<>>, Handler, Acc, Stack, Config) ->
+ incomplete(string, <<>>, Handler, Acc, Stack, Config);
+%% partial utf8 codepoints
+string(<<X>>, Handler, Acc, Stack, Config) when X >= 2#11000000 ->
+ incomplete(string, <<X>>, Handler, Acc, Stack, Config);
+string(<<X, Y>>, Handler, Acc, Stack, Config) when X >= 2#11100000, Y >= 2#10000000 ->
+ incomplete(string, <<X, Y>>, Handler, Acc, Stack, Config);
+string(<<X, Y, Z>>, Handler, Acc, Stack, Config)
+ when X >= 2#11100000, Y >= 2#10000000, Z >= 2#10000000 ->
+ incomplete(string, <<X, Y, Z>>, Handler, Acc, Stack, Config);
+%% surrogates
+string(<<237, X, _, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
+ when X >= 160 ->
+ string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config);
+%% overlong encodings and missing continuations of a 2 byte sequence
+string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
+ when X >= 192, X =< 223 ->
+ strip_continuations(Rest, Handler, Acc, Stack, Config, 1);
+%% overlong encodings and missing continuations of a 3 byte sequence
+string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
+ when X >= 224, X =< 239 ->
+ strip_continuations(Rest, Handler, Acc, Stack, Config, 2);
+%% overlong encodings and missing continuations of a 4 byte sequence
+string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
+ when X >= 240, X =< 247 ->
+ strip_continuations(Rest, Handler, Acc, Stack, Config, 3);
+%% incompletes and unexpected bytes, including orphan continuations
+string(<<_, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) ->
+ string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config);
+string(Bin, Handler, Acc, Stack, Config) -> ?error(string, Bin, Handler, Acc, Stack, Config).
+
+
+count(Bin, Handler, Acc, Stack, Config) ->
+ Size = count(Bin, 0, Config),
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ string(Rest, Handler, [Acc, Clean], Stack, Config).
+
+
+%% explicitly whitelist ascii set for faster parsing. really? really. someone should
+%% submit a patch that unrolls simple guards
+count(<<0, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<1, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<2, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<3, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<4, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<5, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<6, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<7, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<8, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<9, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<10, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<11, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<12, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<13, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<14, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<15, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<16, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<17, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<18, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<19, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<20, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<21, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<22, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<23, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<24, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<25, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<26, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<27, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<28, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<29, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<30, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<31, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<32, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<33, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<?doublequote, _/binary>>, N, _) -> N;
+count(<<35, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<36, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<37, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<38, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<?singlequote, _/binary>>, N, _) -> N;
+count(<<40, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<41, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<42, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<43, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<44, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<45, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<46, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<?solidus, _/binary>>, N, _) -> N;
+count(<<48, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<49, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<50, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<51, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<52, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<53, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<54, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<55, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<56, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<57, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<58, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<59, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<60, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<61, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<62, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<63, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<64, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<65, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<66, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<67, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<68, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<69, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<70, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<71, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<72, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<73, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<74, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<75, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<76, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<77, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<78, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<79, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<80, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<81, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<82, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<83, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<84, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<85, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<86, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<87, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<88, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<89, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<90, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<91, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<?rsolidus, _/binary>>, N, _) -> N;
+count(<<93, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<94, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<95, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<96, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<97, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<98, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<99, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<100, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<101, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<102, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<103, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<104, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<105, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<106, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<107, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<108, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<109, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<110, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<111, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<112, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<113, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<114, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<115, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<116, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<117, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<118, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<119, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<120, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<121, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<122, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<123, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<124, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<125, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<126, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<127, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<_, Rest/binary>>, N, Config=#config{dirty_strings=true}) ->
+ count(Rest, N + 1, Config);
+count(<<_/utf8, _/binary>>, N, #config{uescape=true}) -> N;
+count(<<X/utf8, Rest/binary>>, N, Config) ->
+ case X of
+ X when X < 16#800 -> count(Rest, N + 2, Config);
+ %% jsonp escaping
+ 16#2028 -> N;
+ 16#2029 -> N;
+ X when X < 16#10000 -> count(Rest, N + 3, Config);
+ _ -> count(Rest, N + 4, Config)
+ end;
+count(_, N, _) -> N.
+
+
+doublequote(Rest, Handler, Acc, [key|_] = Stack, Config) ->
+ colon(Rest, handle_event({key, iolist_to_binary(Acc)}, Handler, Config), Stack, Config);
+doublequote(Rest, Handler, Acc, [singlequote|_] = Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace(?doublequote, Config)], Stack, Config);
+doublequote(<<>>, Handler, Acc, [singlequote|_] = Stack, Config) ->
+ incomplete(string, <<?doublequote>>, Handler, Acc, Stack, Config);
+doublequote(Rest, Handler, Acc, Stack, Config) ->
+ maybe_done(Rest, handle_event({string, iolist_to_binary(Acc)}, Handler, Config), Stack, Config).
+
+
+singlequote(Rest, Handler, Acc, [singlequote, key|Stack], Config) ->
+ colon(Rest, handle_event({key, iolist_to_binary(Acc)}, Handler, Config), [key|Stack], Config);
+singlequote(Rest, Handler, Acc, [singlequote|Stack], Config) ->
+ maybe_done(Rest, handle_event({string, iolist_to_binary(Acc)}, Handler, Config), Stack, Config);
+singlequote(Rest, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, ?singlequote], Stack, Config).
+
+
+%% strips continuation bytes after bad utf bytes, guards against both too short
+%% and overlong sequences. N is the maximum number of bytes to strip
+strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, 0) ->
+ string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config);
+strip_continuations(<<X, Rest/binary>>, Handler, Acc, Stack, Config, N) when X >= 128, X =< 191 ->
+ strip_continuations(Rest, Handler, Acc, Stack, Config, N - 1);
+%% if end of input is reached before stripping the max number of continuations
+%% possible magic numbers are reinserted into the stream that get us back to
+%% the same state without complicated machinery
+strip_continuations(<<>>, Handler, Acc, Stack, Config, N) ->
+ case N of
+ 1 -> incomplete(string, <<192>>, Handler, Acc, Stack, Config);
+ 2 -> incomplete(string, <<224>>, Handler, Acc, Stack, Config);
+ 3 -> incomplete(string, <<240>>, Handler, Acc, Stack, Config)
+ end;
+%% not a continuation byte, insert a replacement character for sequence thus
+%% far and dispatch back to string
+strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, _) ->
+ string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config).
+
+
+%% this all gets really gross and should probably eventually be folded into
+%% but for now it fakes being part of string on incompletes and errors
+unescape(<<?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) ->
+ string(<<?rsolidus, Rest/binary>>, Handler, [Acc, <<?rsolidus>>], Stack, Config);
+unescape(<<C, Rest/binary>>, Handler, Acc, Stack, Config=#config{dirty_strings=true}) ->
+ string(Rest, Handler, [Acc, <<?rsolidus, C>>], Stack, Config);
+unescape(<<$b, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\b, Config)], Stack, Config);
+unescape(<<$f, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\f, Config)], Stack, Config);
+unescape(<<$n, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\n, Config)], Stack, Config);
+unescape(<<$r, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\r, Config)], Stack, Config);
+unescape(<<$t, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\t, Config)], Stack, Config);
+unescape(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\", Config)], Stack, Config);
+unescape(<<?singlequote, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_single_quotes=false}) ->
+ string(Rest, Handler, [Acc, <<?singlequote>>], Stack, Config);
+unescape(<<?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($\\, Config)], Stack, Config);
+unescape(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
+ string(Rest, Handler, [Acc, maybe_replace($/, Config)], Stack, Config);
+unescape(<<$u, F, A, B, C, ?rsolidus, $u, G, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config)
+ when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B),
+ (X == $c orelse X == $d orelse X == $e orelse X == $f orelse X == $C orelse X == $D orelse X == $E orelse X == $F),
+ (F == $d orelse F == $D),
+ (G == $d orelse G == $D),
+ ?is_hex(B), ?is_hex(C), ?is_hex(Y), ?is_hex(Z)
+ ->
+ High = erlang:list_to_integer([$d, A, B, C], 16),
+ Low = erlang:list_to_integer([$d, X, Y, Z], 16),
+ Codepoint = (High - 16#d800) * 16#400 + (Low - 16#dc00) + 16#10000,
+ string(Rest, Handler, [Acc, <<Codepoint/utf8>>], Stack, Config);
+unescape(<<$u, F, A, B, C, ?rsolidus, $u, W, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config)
+ when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B),
+ (F == $d orelse F == $D),
+ ?is_hex(B), ?is_hex(C), ?is_hex(W), ?is_hex(X), ?is_hex(Y), ?is_hex(Z)
+ ->
+ case Config#config.strict_utf8 of
+ true -> ?error(<<$u, $d, A, B, C, ?rsolidus, $u, W, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config);
+ false -> string(Rest, Handler, [Acc, <<16#fffd/utf8>>, <<16#fffd/utf8>>], Stack, Config)
+ end;
+unescape(<<$u, F, A, B, C, ?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config)
+ when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B),
+ (F == $d orelse F == $D),
+ ?is_hex(B), ?is_hex(C)
+ ->
+ incomplete(string, <<?rsolidus, $u, $d, A, B, C, ?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config);
+unescape(<<$u, F, A, B, C>>, Handler, Acc, Stack, Config)
+ when (A == $8 orelse A == $9 orelse A == $a orelse A == $b orelse A == $A orelse A == $B),
+ (F == $d orelse F == $D),
+ ?is_hex(B), ?is_hex(C)
+ ->
+ incomplete(string, <<?rsolidus, $u, $d, A, B, C>>, Handler, Acc, Stack, Config);
+unescape(<<$u, A, B, C, D, Rest/binary>>, Handler, Acc, Stack, Config)
+ when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) ->
+ case erlang:list_to_integer([A, B, C, D], 16) of
+ Codepoint when Codepoint < 16#d800; Codepoint > 16#dfff ->
+ string(Rest, Handler, [Acc, maybe_replace(Codepoint, Config)], Stack, Config);
+ _ when Config#config.strict_utf8 ->
+ ?error(string, <<?rsolidus, $u, A, B, C, D, Rest/binary>>, Handler, Acc, Stack, Config);
+ _ -> string(Rest, Handler, [Acc, <<16#fffd/utf8>>], Stack, Config)
+ end;
+unescape(Bin, Handler, Acc, Stack, Config) ->
+ case is_partial_escape(Bin) of
+ true -> incomplete(string, <<?rsolidus/utf8, Bin/binary>>, Handler, Acc, Stack, Config);
+ false -> case Config#config.strict_escapes of
+ true -> ?error(string, <<?rsolidus, Bin/binary>>, Handler, Acc, Stack, Config);
+ false -> string(Bin, Handler, [Acc, <<?rsolidus>>], Stack, Config)
+ end
+ end.
+
+
+is_partial_escape(<<$u, A, B, C>>) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> true;
+is_partial_escape(<<$u, A, B>>) when ?is_hex(A), ?is_hex(B) -> true;
+is_partial_escape(<<$u, A>>) when ?is_hex(A) -> true;
+is_partial_escape(<<$u>>) -> true;
+is_partial_escape(<<>>) -> true;
+is_partial_escape(_) -> false.
+
+
+maybe_replace(C, #config{dirty_strings=true}) -> <<C>>;
+maybe_replace($\b, #config{escaped_strings=true}) -> <<$\\, $b>>;
+maybe_replace($\t, #config{escaped_strings=true}) -> <<$\\, $t>>;
+maybe_replace($\n, #config{escaped_strings=true}) -> <<$\\, $n>>;
+maybe_replace($\f, #config{escaped_strings=true}) -> <<$\\, $f>>;
+maybe_replace($\r, #config{escaped_strings=true}) -> <<$\\, $r>>;
+maybe_replace($\", #config{escaped_strings=true}) -> <<$\\, $\">>;
+maybe_replace($/, Config=#config{escaped_strings=true}) ->
+ case Config#config.escaped_forward_slashes of
+ true -> <<$\\, $/>>
+ ; false -> <<$/>>
+ end;
+maybe_replace($\\, #config{escaped_strings=true}) -> <<$\\, $\\>>;
+maybe_replace(X, Config=#config{escaped_strings=true}) when X == 16#2028; X == 16#2029 ->
+ case Config#config.unescaped_jsonp of
+ true -> <<X/utf8>>
+ ; false -> json_escape_sequence(X)
+ end;
+maybe_replace(X, #config{escaped_strings=true}) when X < 32 ->
+ json_escape_sequence(X);
+maybe_replace(X, _Config) -> <<X/utf8>>.
+
+
+%% convert a codepoint to it's \uXXXX equiv.
+json_escape_sequence(X) when X < 65536 ->
+ <<A:4, B:4, C:4, D:4>> = <<X:16>>,
+ <<$\\, $u, (to_hex(A)), (to_hex(B)), (to_hex(C)), (to_hex(D))>>;
+json_escape_sequence(X) ->
+ Adjusted = X - 16#10000,
+ <<A:10, B:10>> = <<Adjusted:20>>,
+ [json_escape_sequence(A + 16#d800), json_escape_sequence(B + 16#dc00)].
+
+
+%% ascii "1" is [49], "2" is [50], etc...
+to_hex(10) -> $a;
+to_hex(11) -> $b;
+to_hex(12) -> $c;
+to_hex(13) -> $d;
+to_hex(14) -> $e;
+to_hex(15) -> $f;
+to_hex(X) -> X + 48.
+
+
+number(<<$e, Rest/binary>>, Handler, Acc, [integer|Stack], Config) ->
+ number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config);
+number(<<$E, Rest/binary>>, Handler, Acc, [integer|Stack], Config) ->
+ number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config);
+number(<<$e, Rest/binary>>, Handler, Acc, [zero|Stack], Config) ->
+ number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config);
+number(<<$E, Rest/binary>>, Handler, Acc, [zero|Stack], Config) ->
+ number(Rest, Handler, [Acc, $., $0, $e], [e|Stack], Config);
+number(<<>>, Handler, Acc, [State|Stack], Config=#config{stream=false}) ->
+ NumType = case State of
+ zero -> integer;
+ integer -> integer;
+ decimal -> float;
+ exp -> float
+ end,
+ finish_number(<<>>, Handler, {NumType, iolist_to_binary(Acc)}, Stack, Config);
+number(<<>>, Handler, Acc, Stack, Config) ->
+ incomplete(number, <<>>, Handler, Acc, Stack, Config);
+number(Bin, Handler, Acc, [State|Stack], Config) ->
+ Counted = case State of
+ zero -> zero(Bin, 0);
+ integer -> integer(Bin, 0);
+ negative -> negative(Bin, 0);
+ initialdecimal -> initialdecimal(Bin, 0);
+ decimal -> decimal(Bin, 0);
+ e -> e(Bin, 0);
+ ex -> ex(Bin, 0);
+ exp -> exp(Bin, 0)
+ end,
+ case Counted of
+ {finish_integer, Size} ->
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ finish_number(Rest, Handler, {integer, iolist_to_binary([Acc, Clean])}, Stack, Config);
+ {finish_float, Size} ->
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ finish_number(Rest, Handler, {float, iolist_to_binary([Acc, Clean])}, Stack, Config);
+ {error, Size} ->
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ ?error(number, Rest, Handler, [Acc, Clean], Stack, Config);
+ {NewState, Size} ->
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ number(Rest, Handler, [Acc, Clean], [NewState|Stack], Config)
+ end.
+
+
+zero(<<?decimalpoint, Rest/binary>>, N) -> initialdecimal(Rest, N + 1);
+zero(<<$e, _/binary>>, N) -> {integer, N};
+zero(<<$E, _/binary>>, N) -> {integer, N};
+zero(<<>>, N) -> {zero, N};
+zero(_, N) -> {finish_integer, N}.
+
+
+integer(<<$0, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$1, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$2, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$3, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$4, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$5, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$6, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$7, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$8, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<$9, Rest/binary>>, N) -> integer(Rest, N + 1);
+integer(<<?decimalpoint, Rest/binary>>, N) -> initialdecimal(Rest, N + 1);
+integer(<<$e, _/binary>>, N) -> {integer, N};
+integer(<<$E, _/binary>>, N) -> {integer, N};
+integer(<<>>, N) -> {integer, N};
+integer(_, N) -> {finish_integer, N}.
+
+
+negative(<<$0, Rest/binary>>, N) -> zero(Rest, N + 1);
+negative(<<$1, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$2, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$3, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$4, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$5, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$6, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$7, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$8, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<$9, Rest/binary>>, N) -> integer(Rest, N + 1);
+negative(<<>>, N) -> {negative, N};
+negative(_, N) -> {error, N}.
+
+
+initialdecimal(<<$0, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$1, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$2, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$3, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$4, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$5, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$6, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$7, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$8, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<$9, Rest/binary>>, N) -> decimal(Rest, N + 1);
+initialdecimal(<<>>, N) -> {initialdecimal, N};
+initialdecimal(_, N) -> {error, N}.
+
+
+decimal(<<$0, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$1, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$2, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$3, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$4, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$5, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$6, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$7, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$8, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$9, Rest/binary>>, N) -> decimal(Rest, N + 1);
+decimal(<<$e, Rest/binary>>, N) -> e(Rest, N + 1);
+decimal(<<$E, Rest/binary>>, N) -> e(Rest, N + 1);
+decimal(<<>>, N) -> {decimal, N};
+decimal(_, N) -> {finish_float, N}.
+
+
+e(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1);
+e(<<?positive, Rest/binary>>, N) -> ex(Rest, N + 1);
+e(<<?negative, Rest/binary>>, N) -> ex(Rest, N + 1);
+e(<<>>, N) -> {e, N};
+e(_, N) -> {error, N}.
+
+
+ex(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1);
+ex(<<>>, N) -> {ex, N};
+ex(_, N) -> {error, N}.
+
+
+exp(<<$0, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$1, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$2, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$3, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$4, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$5, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$6, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$7, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$8, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<$9, Rest/binary>>, N) -> exp(Rest, N + 1);
+exp(<<>>, N) -> {exp, N};
+exp(_, N) -> {finish_float, N}.
+
+
+finish_number(Rest, Handler, Acc, Stack, Config) ->
+ maybe_done(Rest, handle_event(format_number(Acc), Handler, Config), Stack, Config).
+
+
+-ifndef(no_binary_to_whatever).
+format_number({integer, Acc}) -> {integer, binary_to_integer(Acc)};
+format_number({float, Acc}) -> {float, binary_to_float(Acc)}.
+-else.
+format_number({integer, Acc}) -> {integer, list_to_integer(unicode:characters_to_list(Acc))};
+format_number({float, Acc}) -> {float, list_to_float(unicode:characters_to_list(Acc))}.
+-endif.
+
+
+true(<<$r, $u, $e, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, true}, Handler, Config), Stack, Config);
+true(<<$r, $u>>, Handler, Stack, Config) ->
+ incomplete(true, <<$r, $u>>, Handler, Stack, Config);
+true(<<$r>>, Handler, Stack, Config) ->
+ incomplete(true, <<$r>>, Handler, Stack, Config);
+true(<<>>, Handler, Stack, Config) ->
+ incomplete(true, <<>>, Handler, Stack, Config);
+true(Bin, Handler, Stack, Config) ->
+ ?error(true, Bin, Handler, Stack, Config).
+
+
+false(<<$a, $l, $s, $e, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, false}, Handler, Config), Stack, Config);
+false(<<$a, $l, $s>>, Handler, Stack, Config) ->
+ incomplete(false, <<$a, $l, $s>>, Handler, Stack, Config);
+false(<<$a, $l>>, Handler, Stack, Config) ->
+ incomplete(false, <<$a, $l>>, Handler, Stack, Config);
+false(<<$a>>, Handler, Stack, Config) ->
+ incomplete(false, <<$a>>, Handler, Stack, Config);
+false(<<>>, Handler, Stack, Config) ->
+ incomplete(false, <<>>, Handler, Stack, Config);
+false(Bin, Handler, Stack, Config) ->
+ ?error(false, Bin, Handler, Stack, Config).
+
+
+null(<<$u, $l, $l, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, handle_event({literal, null}, Handler, Config), Stack, Config);
+null(<<$u, $l>>, Handler, Stack, Config) ->
+ incomplete(null, <<$u, $l>>, Handler, Stack, Config);
+null(<<$u>>, Handler, Stack, Config) ->
+ incomplete(null, <<$u>>, Handler, Stack, Config);
+null(<<>>, Handler, Stack, Config) ->
+ incomplete(null, <<>>, Handler, Stack, Config);
+null(Bin, Handler, Stack, Config) ->
+ ?error(null, Bin, Handler, Stack, Config).
+
+
+comment(<<?newline, Rest/binary>>, Handler, Resume, [comment|Stack], Config) ->
+ resume(Rest, Resume, Handler, unused, Stack, Config);
+comment(<<?solidus, ?star, Rest/binary>>, Handler, Resume, Stack, Config) ->
+ comment(Rest, Handler, Resume, [multicomment|Stack], Config);
+comment(<<?solidus>>, Handler, Resume, [multicomment|_] = Stack, Config) ->
+ incomplete(comment, <<?solidus>>, Handler, Resume, Stack, Config);
+comment(<<?star, ?solidus, Rest/binary>>, Handler, Resume, [multicomment|Stack], Config) ->
+ case Stack of
+ [multicomment|_] -> comment(Rest, Handler, Resume, Stack, Config);
+ _ -> resume(Rest, Resume, Handler, unused, Stack, Config)
+ end;
+comment(<<?star>>, Handler, Resume, [multicomment|_] = Stack, Config) ->
+ incomplete(comment, <<?star>>, Handler, Resume, Stack, Config);
+comment(<<_/utf8, Rest/binary>>, Handler, Resume, Stack, Config) ->
+ comment(Rest, Handler, Resume, Stack, Config);
+comment(<<_, Rest/binary>>, Handler, Resume, Stack, Config=#config{strict_utf8=false}) ->
+ comment(Rest, Handler, Resume, Stack, Config);
+comment(<<>>, Handler, done, [Comment], Config=#config{stream=false})
+ when Comment == comment; Comment == multicomment ->
+ resume(<<>>, done, Handler, unused, [], Config);
+comment(<<>>, Handler, Resume, Stack, Config) ->
+ incomplete(comment, <<>>, Handler, Resume, Stack, Config);
+comment(Bin, Handler, Resume, Stack, Config) ->
+ ?error(comment, Bin, Handler, Resume, Stack, Config).
+
+
+maybe_done(<<Rest/binary>>, Handler, [], Config) ->
+ done(Rest, handle_event(end_json, Handler, Config), [], Config);
+maybe_done(<<?space, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, Handler, Stack, Config);
+maybe_done(<<?end_object, Rest/binary>>, Handler, [object|Stack], Config) ->
+ maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config);
+maybe_done(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) ->
+ maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config);
+maybe_done(<<?comma, Rest/binary>>, Handler, [object|Stack], Config) ->
+ key(Rest, Handler, [key|Stack], Config);
+maybe_done(<<?comma, Rest/binary>>, Handler, [array|_] = Stack, Config) ->
+ value(Rest, Handler, Stack, Config);
+maybe_done(<<?newline, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, Handler, Stack, Config);
+maybe_done(<<?tab, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, Handler, Stack, Config);
+maybe_done(<<?cr, Rest/binary>>, Handler, Stack, Config) ->
+ maybe_done(Rest, Handler, Stack, Config);
+maybe_done(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(maybe_done, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+maybe_done(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, maybe_done, [comment|Stack], Config);
+maybe_done(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, maybe_done, [multicomment|Stack], Config);
+maybe_done(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(maybe_done, <<?solidus>>, Handler, Stack, Config);
+maybe_done(<<>>, Handler, Stack, Config) when length(Stack) > 0 ->
+ incomplete(maybe_done, <<>>, Handler, Stack, Config);
+maybe_done(Bin, Handler, Stack, Config) ->
+ ?error(maybe_done, Bin, Handler, Stack, Config).
+
+
+done(<<?space, Rest/binary>>, Handler, [], Config) ->
+ done(Rest, Handler, [], Config);
+done(<<?newline, Rest/binary>>, Handler, [], Config) ->
+ done(Rest, Handler, [], Config);
+done(<<?tab, Rest/binary>>, Handler, [], Config) ->
+ done(Rest, Handler, [], Config);
+done(<<?cr, Rest/binary>>, Handler, [], Config) ->
+ done(Rest, Handler, [], Config);
+done(<<?solidus, Rest/binary>>, Handler, Stack, Config=#config{strict_comments=true}) ->
+ ?error(done, <<?solidus, Rest/binary>>, Handler, Stack, Config);
+done(<<?solidus, ?solidus, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, done, [comment|Stack], Config);
+done(<<?solidus, ?star, Rest/binary>>, Handler, Stack, Config) ->
+ comment(Rest, Handler, done, [multicomment|Stack], Config);
+done(<<?solidus>>, Handler, Stack, Config) ->
+ incomplete(done, <<?solidus>>, Handler, Stack, Config);
+done(Bin, {_Handler, State}, _Stack, #config{return_tail=true}) ->
+ {with_tail,State, Bin};
+done(<<>>, {Handler, State}, [], Config=#config{stream=true}) ->
+ incomplete(done, <<>>, {Handler, State}, [], Config);
+done(<<>>, {_Handler, State}, [], _Config) -> State;
+done(Bin, {Handler, State}, _Stack, Config=#config{multi_term=true}) ->
+ value(Bin, {Handler, Handler:reset(State)}, [], Config);
+done(Bin, Handler, Stack, Config) -> ?error(done, Bin, Handler, Stack, Config).
+
+
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+json_to_bytes(JSON) -> json_to_bytes(JSON, []).
+
+json_to_bytes(<<>>, Acc) -> [<<>>] ++ lists:reverse(Acc);
+json_to_bytes(<<X, Rest/binary>>, Acc) -> json_to_bytes(Rest, [<<X>>] ++ Acc).
+
+
+decode(JSON) -> decode(JSON, []).
+decode(JSON, Config) -> (decoder(jsx, [], Config))(JSON).
+
+
+incremental_decode(JSON) -> incremental_decode(JSON, []).
+incremental_decode(JSON, Config) ->
+ Final = lists:foldl(
+ fun(Byte, Decoder) -> {incomplete, F} = Decoder(Byte), F end,
+ decoder(jsx, [], [stream] ++ Config),
+ json_to_bytes(JSON)
+ ),
+ Final(end_stream).
+
+
+%% all these numbers have different representation in erlang than in javascript and
+%% do not roundtrip like most integers/floats
+special_number_test_() ->
+ Cases = [
+ % {title, test form, json, opt flags}
+ {"-0", [{integer, 0}, end_json], <<"-0">>},
+ {"-0.0", [{float, 0.0}, end_json], <<"-0.0">>},
+ {"0e0", [{float, 0.0}, end_json], <<"0e0">>},
+ {"0e4", [{float, 0.0}, end_json], <<"0e4">>},
+ {"1e0", [{float, 1.0}, end_json], <<"1e0">>},
+ {"-1e0", [{float, -1.0}, end_json], <<"-1e0">>},
+ {"-0e0", [{float, -0.0}, end_json], <<"-0e0">>},
+ {"1e4", [{float, 1.0e4}, end_json], <<"1e4">>},
+ {"number terminated by whitespace",
+ [start_array, {integer, 1}, end_array, end_json],
+ <<"[ 1 ]">>
+ },
+ {"number terminated by comma",
+ [start_array, {integer, 1}, {integer, 1}, end_array, end_json],
+ <<"[ 1, 1 ]">>
+ },
+ {"number terminated by comma in object",
+ [start_object, {key, <<"x">>}, {integer, 1}, {key, <<"y">>}, {integer, 1}, end_object, end_json],
+ <<"{\"x\": 1, \"y\": 1}">>
+ }
+ ],
+ [{Title, ?_assertEqual(Events, decode(JSON))}
+ || {Title, Events, JSON} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON))}
+ || {Title, Events, JSON} <- Cases
+ ].
+
+
+comments_test_() ->
+ Cases = [
+ % {title, test form, json, opt flags}
+ {"preceeding // comment",
+ [start_array, end_array, end_json],
+ <<"// comment ", ?newline, "[]">>
+ },
+ {"preceeding /**/ comment",
+ [start_array, end_array, end_json],
+ <<"/* comment */[]">>
+ },
+ {"trailing // comment",
+ [start_array, end_array, end_json],
+ <<"[]// comment", ?newline>>
+ },
+ {"trailing // comment (no newline)",
+ [start_array, end_array, end_json],
+ <<"[]// comment">>
+ },
+ {"trailing /**/ comment",
+ [start_array, end_array, end_json],
+ <<"[] /* comment */">>
+ },
+ {"// comment inside array",
+ [start_array, end_array, end_json],
+ <<"[ // comment", ?newline, "]">>
+ },
+ {"/**/ comment inside array",
+ [start_array, end_array, end_json],
+ <<"[ /* comment */ ]">>
+ },
+ {"// comment at beginning of array",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ // comment", ?newline, "true", ?newline, "]">>
+ },
+ {"/**/ comment at beginning of array",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* comment */ true ]">>
+ },
+ {"// comment at end of array",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ true // comment", ?newline, "]">>
+ },
+ {"/**/ comment at end of array",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ true /* comment */ ]">>
+ },
+ {"// comment midarray (post comma)",
+ [start_array, {literal, true}, {literal, false}, end_array, end_json],
+ <<"[ true, // comment", ?newline, "false ]">>
+ },
+ {"/**/ comment midarray (post comma)",
+ [start_array, {literal, true}, {literal, false}, end_array, end_json],
+ <<"[ true, /* comment */ false ]">>
+ },
+ {"// comment midarray (pre comma)",
+ [start_array, {literal, true}, {literal, false}, end_array, end_json],
+ <<"[ true// comment", ?newline, ", false ]">>
+ },
+ {"/**/ comment midarray (pre comma)",
+ [start_array, {literal, true}, {literal, false}, end_array, end_json],
+ <<"[ true/* comment */, false ]">>
+ },
+ {"// comment inside object",
+ [start_object, end_object, end_json],
+ <<"{ // comment", ?newline, "}">>
+ },
+ {"/**/ comment inside object",
+ [start_object, end_object, end_json],
+ <<"{ /* comment */ }">>
+ },
+ {"// comment at beginning of object",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ // comment", ?newline, " \"key\": true", ?newline, "}">>
+ },
+ {"/**/ comment at beginning of object",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ /* comment */ \"key\": true }">>
+ },
+ {"// comment at end of object",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\": true // comment", ?newline, "}">>
+ },
+ {"/**/ comment at end of object",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\": true /* comment */ }">>
+ },
+ {"// comment midobject (post comma)",
+ [
+ start_object,
+ {key, <<"x">>},
+ {literal, true},
+ {key, <<"y">>},
+ {literal, false},
+ end_object,
+ end_json
+ ],
+ <<"{ \"x\": true, // comment", ?newline, "\"y\": false }">>
+ },
+ {"/**/ comment midobject (post comma)",
+ [
+ start_object,
+ {key, <<"x">>},
+ {literal, true},
+ {key, <<"y">>},
+ {literal, false},
+ end_object,
+ end_json
+ ],
+ <<"{ \"x\": true, /* comment */", ?newline, "\"y\": false }">>
+ },
+ {"// comment midobject (pre comma)",
+ [
+ start_object,
+ {key, <<"x">>},
+ {literal, true},
+ {key, <<"y">>},
+ {literal, false},
+ end_object,
+ end_json
+ ],
+ <<"{ \"x\": true// comment", ?newline, ", \"y\": false }">>
+ },
+ {"/**/ comment midobject (pre comma)",
+ [
+ start_object,
+ {key, <<"x">>},
+ {literal, true},
+ {key, <<"y">>},
+ {literal, false},
+ end_object,
+ end_json
+ ],
+ <<"{ \"x\": true/* comment */", ?newline, ", \"y\": false }">>
+ },
+ {"// comment precolon",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\" // comment", ?newline, ": true }">>
+ },
+ {"/**/ comment precolon",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\"/* comment */: true }">>
+ },
+ {"// comment postcolon",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\": // comment", ?newline, " true }">>
+ },
+ {"/**/ comment postcolon",
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ <<"{ \"key\":/* comment */ true }">>
+ },
+ {"// comment terminating zero",
+ [start_array, {integer, 0}, end_array, end_json],
+ <<"[ 0// comment", ?newline, "]">>
+ },
+ {"// comment terminating integer",
+ [start_array, {integer, 1}, end_array, end_json],
+ <<"[ 1// comment", ?newline, "]">>
+ },
+ {"// comment terminating float",
+ [start_array, {float, 1.0}, end_array, end_json],
+ <<"[ 1.0// comment", ?newline, "]">>
+ },
+ {"// comment terminating exp",
+ [start_array, {float, 1.0e1}, end_array, end_json],
+ <<"[ 1e1// comment", ?newline, "]">>
+ },
+ {"/**/ comment terminating zero",
+ [start_array, {integer, 0}, end_array, end_json],
+ <<"[ 0/* comment */ ]">>
+ },
+ {"/**/ comment terminating integer",
+ [start_array, {integer, 1}, end_array, end_json],
+ <<"[ 1/* comment */ ]">>
+ },
+ {"/**/ comment terminating float",
+ [start_array, {float, 1.0}, end_array, end_json],
+ <<"[ 1.0/* comment */ ]">>
+ },
+ {"/**/ comment terminating exp",
+ [start_array, {float, 1.0e1}, end_array, end_json],
+ <<"[ 1e1/* comment */ ]">>
+ },
+ {"/**/ comment following /**/ comment",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[/* comment *//* comment */true]">>
+ },
+ {"/**/ comment following // comment",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[// comment", ?newline, "/* comment */true]">>
+ },
+ {"// comment following /**/ comment",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[/* comment */// comment", ?newline, "true]">>
+ },
+ {"// comment following // comment",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[// comment", ?newline, "// comment", ?newline, "true]">>
+ },
+ {"/**/ comment inside /**/ comment",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* /* comment */ */ true ]">>
+ },
+ {"/**/ comment with /",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* / */ true ]">>
+ },
+ {"/**/ comment with *",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* * */ true ]">>
+ },
+ {"// comment with badutf",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ // comment ", 16#00c0, " ", ?newline, "true]">>
+ },
+ {"/**/ comment with badutf",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* comment ", 16#00c0, " */ true]">>
+ },
+ {"/**/ comment with badutf preceeded by /",
+ [start_array, {literal, true}, end_array, end_json],
+ <<"[ /* comment /", 16#00c0, " */ true]">>
+ }
+ ],
+ [{Title, ?_assertEqual(Events, decode(JSON))}
+ || {Title, Events, JSON} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON))}
+ || {Title, Events, JSON} <- Cases
+ ] ++
+ % error when `{strict, [comments]}` is present
+ [{Title, ?_assertError(badarg, decode(JSON, [{strict, [comments]}]))}
+ || {Title, _Events, JSON} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertError(
+ badarg,
+ incremental_decode(JSON, [{strict, [comments]}])
+ )} || {Title, _Events, JSON} <- Cases
+ ].
+
+
+no_comments_test_() ->
+ Cases = [
+ {"// comment with badutf",
+ badarg,
+ <<"[ // comment ", 16#00c0, " ", ?newline, "true]">>,
+ [{strict, [utf8]}]
+ },
+ {"/**/ comment with badutf",
+ badarg,
+ <<"[ /* comment ", 16#00c0, " */ true]">>,
+ [{strict, [utf8]}]
+ },
+ {"/**/ comment with badutf preceeded by /",
+ badarg,
+ <<"[ /* comment /", 16#00c0, " */ true]">>,
+ [{strict, [utf8]}]
+ }
+ ],
+ [{Title, ?_assertError(Error, decode(JSON, Config))}
+ || {Title, Error, JSON, Config} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertError(Error, incremental_decode(JSON, Config))}
+ || {Title, Error, JSON, Config} <- Cases
+ ].
+
+
+% doing the full unicode range takes foreverrrrrrr so just do boundaries
+% excludes characters that may need escaping
+codepoints() ->
+ lists:seq(0, 32) ++
+ [32, 33] ++
+ lists:seq(35, 46) ++
+ lists:seq(48, 91) ++
+ lists:seq(93, 127) ++
+ [16#2027, 16#202a, 16#d7ff, 16#e000] ++
+ lists:seq(16#fdd0, 16#ffff) ++
+ [16#10000, 16#20000, 16#30000, 16#40000, 16#50000] ++
+ [16#60000, 16#70000, 16#80000, 16#90000, 16#a0000, 16#b0000] ++
+ [16#c0000, 16#d0000, 16#e0000, 16#f0000, 16#100000].
+
+
+surrogates() -> lists:seq(16#d800, 16#dfff).
+
+
+%% erlang refuses to decode certain codepoints, so fake them all
+to_fake_utf8(N) when N < 16#0080 -> <<34/utf8, N:8, 34/utf8>>;
+to_fake_utf8(N) when N < 16#0800 ->
+ <<0:5, Y:5, X:6>> = <<N:16>>,
+ <<34/utf8, 2#110:3, Y:5, 2#10:2, X:6, 34/utf8>>;
+to_fake_utf8(N) when N < 16#10000 ->
+ <<Z:4, Y:6, X:6>> = <<N:16>>,
+ <<34/utf8, 2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6, 34/utf8>>;
+to_fake_utf8(N) ->
+ <<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>,
+ <<34/utf8, 2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6, 34/utf8>>.
+
+
+clean_string_test_() ->
+ Clean = codepoints(),
+ Dirty = surrogates(),
+ % clean codepoints
+ [{"clean u+" ++ integer_to_list(Codepoint, 16), ?_assertEqual(
+ [{string, <<Codepoint/utf8>>}, end_json],
+ decode(<<34/utf8, Codepoint/utf8, 34/utf8>>)
+ )} || Codepoint <- Clean
+ ] ++
+ % bad codepoints replaced by u+FFFD
+ [{"clean u+" ++ integer_to_list(Codepoint, 16), ?_assertEqual(
+ [{string, <<16#fffd/utf8>>}, end_json],
+ decode(to_fake_utf8(Codepoint))
+ )} || Codepoint <- Dirty
+ ] ++
+ % bad codepoints that cause errors
+ [{"dirty u+" ++ integer_to_list(Codepoint, 16), ?_assertError(
+ badarg,
+ decode(to_fake_utf8(Codepoint), [{strict, [utf8]}])
+ )} || Codepoint <- Dirty
+ ].
+
+
+dirty_string_test_() ->
+ Cases = [
+ {"dirty \\n",
+ [start_array, {string, <<"\\n">>}, end_array, end_json],
+ <<"[\"\\n\"]">>,
+ [dirty_strings]
+ },
+ {"dirty \\uwxyz",
+ [start_array, {string, <<"\\uwxyz">>}, end_array, end_json],
+ <<"[\"\\uwxyz\"]">>,
+ [dirty_strings]
+ },
+ {"dirty \\x23",
+ [start_array, {string, <<"\\x23">>}, end_array, end_json],
+ <<"[\"\\x23\"]">>,
+ [dirty_strings]
+ },
+ {"dirty 0",
+ [start_array, {string, <<0>>}, end_array, end_json],
+ <<"[\"", 0, "\"]">>,
+ [dirty_strings]
+ },
+ {"dirty 0\\\"0",
+ [start_array, {string, <<0, ?rsolidus, ?doublequote, 0>>}, end_array, end_json],
+ <<"[\"", 0, ?rsolidus, ?doublequote, 0, "\"]">>,
+ [dirty_strings]
+ },
+ {"dirty 0\\\\\"0",
+ [start_array, {string, <<0, ?rsolidus, ?rsolidus, ?doublequote, 0>>}, end_array, end_json],
+ <<"[\"", 0, ?rsolidus, ?rsolidus, ?doublequote, 0, "\"]">>,
+ [dirty_strings]
+ },
+ {"dirty 16#d800",
+ [start_array, {string, <<237, 160, 128>>}, end_array, end_json],
+ <<"[\"", 237, 160, 128, "\"]">>,
+ [dirty_strings]
+ },
+ {"dirty /",
+ [start_array, {string, <<$/>>}, end_array, end_json],
+ <<"[\"", $/, "\"]">>,
+ [dirty_strings, escaped_forward_slashes]
+ },
+ {"dirty <<194, 129>>",
+ [start_array, {string, <<194, 129>>}, end_array, end_json],
+ <<"[\"", 194, 129, "\"]">>,
+ [dirty_strings]
+ }
+ ],
+ [{Title, ?_assertEqual(Events, decode(JSON, Config))}
+ || {Title, Events, JSON, Config} <- Cases
+ ] ++
+ % ensure `dirty_strings` and `strict` interact properly
+ [{Title, ?_assertEqual(Events, decode(JSON, Config ++ [strict]))}
+ || {Title, Events, JSON, Config} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(Events, incremental_decode(JSON, Config))}
+ || {Title, Events, JSON, Config} <- Cases
+ ].
+
+
+bad_utf8_test_() ->
+ Cases = [
+ {"orphan continuation byte u+0080", <<16#fffd/utf8>>, <<16#0080>>},
+ {"orphan continuation byte u+00bf", <<16#fffd/utf8>>, <<16#00bf>>},
+ {"2 continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, 2),
+ <<(binary:copy(<<16#0080>>, 2))/binary>>
+ },
+ {"3 continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, 3),
+ <<(binary:copy(<<16#0080>>, 3))/binary>>
+ },
+ {"4 continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, 4),
+ <<(binary:copy(<<16#0080>>, 4))/binary>>
+ },
+ {"5 continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, 5),
+ <<(binary:copy(<<16#0080>>, 5))/binary>>
+ },
+ {"6 continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, 6),
+ <<(binary:copy(<<16#0080>>, 6))/binary>>
+ },
+ {"all continuation bytes",
+ binary:copy(<<16#fffd/utf8>>, length(lists:seq(16#0080, 16#00bf))),
+ <<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>>
+ },
+ {"lonely start byte", <<16#fffd/utf8>>, <<16#00c0>>},
+ {"lonely start bytes (2 byte)",
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ <<16#00c0, 32, 16#00df>>
+ },
+ {"lonely start bytes (3 byte)",
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ <<16#00e0, 32, 16#00ef>>
+ },
+ {"lonely start bytes (4 byte)",
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ <<16#00f0, 32, 16#00f7>>
+ },
+ {"missing continuation byte (3 byte)", <<16#fffd/utf8, 32>>, <<224, 160, 32>>},
+ {"missing continuation byte (4 byte missing one)",
+ <<16#fffd/utf8, 32>>,
+ <<240, 144, 128, 32>>
+ },
+ {"missing continuation byte (4 byte missing two)",
+ <<16#fffd/utf8, 32>>,
+ <<240, 144, 32>>
+ },
+ {"overlong encoding of u+002f (2 byte)",
+ <<16#fffd/utf8, 32>>,
+ <<16#c0, 16#af, 32>>
+ },
+ {"overlong encoding of u+002f (3 byte)",
+ <<16#fffd/utf8, 32>>,
+ <<16#e0, 16#80, 16#af, 32>>
+ },
+ {"overlong encoding of u+002f (4 byte)",
+ <<16#fffd/utf8, 32>>,
+ <<16#f0, 16#80, 16#80, 16#af, 32>>
+ },
+ {"highest overlong 2 byte sequence",
+ <<16#fffd/utf8, 32>>,
+ <<16#c1, 16#bf, 32>>
+ },
+ {"highest overlong 3 byte sequence",
+ <<16#fffd/utf8, 32>>,
+ <<16#e0, 16#9f, 16#bf, 32>>
+ },
+ {"highest overlong 4 byte sequence",
+ <<16#fffd/utf8, 32>>,
+ <<16#f0, 16#8f, 16#bf, 16#bf, 32>>
+ }
+ ],
+ [{Title, ?_assertError(
+ badarg,
+ decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}])
+ )} || {Title, _, JSON} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertError(
+ badarg,
+ incremental_decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}])
+ )} || {Title, _, JSON} <- Cases
+ ] ++
+ [{Title ++ " replaced", ?_assertEqual(
+ [{string, Replacement}, end_json],
+ decode(<<34, JSON/binary, 34>>)
+ )} || {Title, Replacement, JSON} <- Cases
+ ] ++
+ [{Title ++ " replaced (incremental)", ?_assertEqual(
+ [{string, Replacement}, end_json],
+ incremental_decode(<<34, JSON/binary, 34>>)
+ )} || {Title, Replacement, JSON} <- Cases
+ ].
+
+
+unescape_test_() ->
+ Cases = [
+ {"unescape backspace", <<"\b">>, <<"\\b"/utf8>>},
+ {"unescape tab", <<"\t">>, <<"\\t"/utf8>>},
+ {"unescape newline", <<"\n">>, <<"\\n"/utf8>>},
+ {"unescape formfeed", <<"\f">>, <<"\\f"/utf8>>},
+ {"unescape carriage return", <<"\r">>, <<"\\r"/utf8>>},
+ {"unescape quote", <<"\"">>, <<"\\\""/utf8>>},
+ {"unescape solidus", <<"/">>, <<"\\/"/utf8>>},
+ {"unescape reverse solidus", <<"\\">>, <<"\\\\"/utf8>>},
+ {"unescape control", <<0>>, <<"\\u0000"/utf8>>},
+ {"unescape surrogate pair", <<16#10000/utf8>>, <<"\\ud800\\udc00"/utf8>>},
+ {"unescape surrogate pair", <<16#10000/utf8>>, <<"\\uD800\\uDC00"/utf8>>},
+ {"replace bad high surrogate", <<16#fffd/utf8>>, <<"\\udc00"/utf8>>},
+ {"replace bad high surrogate", <<16#fffd/utf8>>, <<"\\uDC00"/utf8>>},
+ {"replace naked high surrogate",
+ <<16#fffd/utf8, "hello world">>,
+ <<"\\ud800hello world"/utf8>>
+ },
+ {"replace naked high surrogate",
+ <<16#fffd/utf8, "hello world">>,
+ <<"\\uD800hello world"/utf8>>
+ },
+ {"replace naked low surrogate",
+ <<16#fffd/utf8, "hello world">>,
+ <<"\\udc00hello world"/utf8>>
+ },
+ {"replace naked low surrogate",
+ <<16#fffd/utf8, "hello world">>,
+ <<"\\uDC00hello world"/utf8>>
+ },
+ {"replace bad surrogate pair", <<16#fffd/utf8, 16#fffd/utf8>>, <<"\\ud800\\u0000">>},
+ {"replace bad surrogate pair", <<16#fffd/utf8, 16#fffd/utf8>>, <<"\\uD800\\u0000">>}
+ ],
+ [{Title, ?_assertEqual([{string, Escaped}, end_json], decode(<<34, JSON/binary, 34>>))}
+ || {Title, Escaped, JSON} <- Cases
+ ] ++
+ [{Title ++ " (incremental)", ?_assertEqual(
+ [{string, Escaped}, end_json],
+ incremental_decode(<<34, JSON/binary, 34>>)
+ )} || {Title, Escaped, JSON} <- Cases
+ ].
+
+
+bad_escaped_surrogate_test_() ->
+ Cases = [
+ {"do not unescape bad high surrogate", <<"\\udc00">>},
+ {"do not unescape naked high surrogate", <<"\\ud800hello world">>},
+ {"do not unescape naked low surrogate", <<"\\udc00hello world">>},
+ {"do not unescape bad surrogate pair", <<"\\ud800\\u0000">>}
+ ],
+ [{Title, ?_assertError(badarg, decode(<<34, JSON/binary, 34>>, [{strict, [utf8]}]))}
+ || {Title, JSON} <- Cases
+ ].
+
+
+escape_test_() ->
+ Cases = [
+ {"backspace", <<"\b">>, <<"\\b">>},
+ {"tab", <<"\t">>, <<"\\t">>},
+ {"newline", <<"\n">>, <<"\\n">>},
+ {"formfeed", <<"\f">>, <<"\\f">>},
+ {"carriage return", <<"\r">>, <<"\\r">>},
+ {"quote", <<"\"">>, <<"\\\"">>},
+ {"backslash", <<"\\">>, <<"\\\\">>},
+ {"control", <<0>>, <<"\\u0000">>}
+ ],
+ [{"escape " ++ Title, ?_assertEqual(
+ [{string, Escaped}, end_json],
+ decode(<<34, Escaped/binary, 34>>, [escaped_strings])
+ )} || {Title, _Unescaped, Escaped} <- Cases
+ ] ++
+ [{"do not escape " ++ Title, ?_assertEqual(
+ [{string, Unescaped}, end_json],
+ decode(<<34, Escaped/binary, 34>>)
+ )} || {Title, Unescaped, Escaped} <- Cases
+ ].
+
+
+special_escape_test_() ->
+ Cases = [
+ {"escape forward slash", <<"\\/">>, <<"/"/utf8>>, [escaped_forward_slashes]},
+ {"do not escape forward slash", <<"/">>, <<"/"/utf8>>, []},
+ {"escape jsonp", <<"\\u2028">>, <<16#2028/utf8>>, []},
+ {"do not escape jsonp", <<16#2028/utf8>>, <<16#2028/utf8>>, [unescaped_jsonp]}
+ ],
+ [{Title, ?_assertEqual(
+ [{string, Expect}, end_json],
+ decode(<<34, Raw/binary, 34>>, [escaped_strings] ++ Config)
+ )} || {Title, Expect, Raw, Config} <- Cases
+ ].
+
+
+uescape_test_() ->
+ [
+ {"\"\\u0080\"", ?_assertEqual(
+ [{string, <<"\\u0080">>}, end_json],
+ decode(<<34, 128/utf8, 34>>, [uescape])
+ )},
+ {"\"\\u8ca8\\u5481\\u3002\\u0091\\u0091\"", ?_assertEqual(
+ [{string, <<"\\u8ca8\\u5481\\u3002\\u0091\\u0091">>}, end_json],
+ decode(
+ <<34,232,178,168,229,146,129,227,128,130,194,145,194,145,34>>,
+ [uescape]
+ )
+ )},
+ {"\"\\ud834\\udd1e\"", ?_assertEqual(
+ [{string, <<"\\ud834\\udd1e">>}, end_json],
+ decode(<<34, 240, 157, 132, 158, 34>>, [uescape])
+ )},
+ {"\"\\ud83d\\ude0a\"", ?_assertEqual(
+ [{string, <<"\\ud83d\\ude0a">>}, end_json],
+ decode(<<34, 240, 159, 152, 138, 34>>, [uescape])
+ )}
+ ].
+
+
+single_quoted_string_test_() ->
+ Cases = [
+ {"single quoted string", [{string, <<"hello world">>}, end_json], <<39, "hello world", 39>>},
+ {"single quoted string with embedded double quotes",
+ [{string, <<"quoth the raven, \"nevermore\"">>}, end_json],
+ <<39, "quoth the raven, \"nevermore\"", 39>>
+ },
+ {"escaped single quote",
+ [{string, <<"quoth the raven, 'nevermore'">>}, end_json],
+ <<39, "quoth the raven, \\'nevermore\\'", 39>>
+ },
+ {"single quoted key",
+ [start_object,
+ {key, <<"key">>}, {string, <<"value">>},
+ {key, <<"another key">>}, {string, <<"another value">>},
+ end_object, end_json],
+ <<"{'key':'value','another key':'another value'}">>
+ }
+ ],
+ [{Title, ?_assertEqual(Expect, decode(Raw, []))} || {Title, Expect, Raw} <- Cases] ++
+ [{Title, ?_assertError(
+ badarg,
+ decode(Raw, [{strict, [single_quotes]}])
+ )} || {Title, _Expect, Raw} <- Cases
+ ].
+
+
+embedded_single_quoted_string_test_() ->
+ [
+ {"string with embedded single quotes", ?_assertEqual(
+ [{string, <<"quoth the raven, 'nevermore'">>}, end_json],
+ decode(<<34, "quoth the raven, 'nevermore'", 34>>, [])
+ )},
+ {"string with embedded single quotes", ?_assertEqual(
+ [{string, <<"quoth the raven, 'nevermore'">>}, end_json],
+ decode(<<34, "quoth the raven, 'nevermore'", 34>>, [{strict, [single_quotes]}])
+ )}
+ ].
+
+
+ignored_bad_escapes_test_() ->
+ [
+ {"ignore unrecognized escape sequence", ?_assertEqual(
+ [{string, <<"\\x25">>}, end_json],
+ decode(<<"\"\\x25\"">>, [])
+ )}
+ ].
+
+
+bom_test_() ->
+ [
+ {"bom", ?_assertEqual(
+ [start_array, end_array, end_json],
+ decode(<<16#ef, 16#bb, 16#bf, "[]"/utf8>>, [])
+ )}
+ ].
+
+
+trailing_comma_test_() ->
+ [
+ {"trailing comma in object", ?_assertEqual(
+ [start_object, {key, <<"key">>}, {literal, true}, end_object, end_json],
+ decode(<<"{\"key\": true,}">>, [])
+ )},
+ {"strict trailing comma in object", ?_assertError(
+ badarg,
+ decode(<<"{\"key\": true,}">>, [{strict, [trailing_commas]}])
+ )},
+ {"two trailing commas in object", ?_assertError(
+ badarg,
+ decode(<<"{\"key\": true,,}">>, [])
+ )},
+ {"comma in empty object", ?_assertError(
+ badarg,
+ decode(<<"{,}">>, [])
+ )},
+ {"trailing comma in list", ?_assertEqual(
+ [start_array, {literal, true}, end_array, end_json],
+ decode(<<"[true,]">>, [])
+ )},
+ {"strict trailing comma in list", ?_assertError(
+ badarg,
+ decode(<<"[true,]">>, [{strict, [trailing_commas]}])
+ )},
+ {"two trailing commas in list", ?_assertError(
+ badarg,
+ decode(<<"[true,,]">>, [])
+ )},
+ {"comma in empty list", ?_assertError(
+ badarg,
+ decode(<<"[,]">>, [])
+ )}
+ ].
+
+
+incomplete_test_() ->
+ [
+ {"stream false", ?_assertError(
+ badarg,
+ decode(<<"{">>)
+ )},
+ {"stream true", ?_assertMatch(
+ {incomplete, _},
+ decode(<<"{">>, [stream])
+ )},
+ {"complete input", ?_assertMatch(
+ {incomplete, _},
+ decode(<<"{}">>, [stream])
+ )}
+ ].
+
+
+error_test_() ->
+ Cases = [
+ {"maybe_bom error", <<16#ef, 0>>},
+ {"definitely_bom error", <<16#ef, 16#bb, 0>>},
+ {"object error", <<"{"/utf8, 0>>},
+ {"colon error", <<"{\"\""/utf8, 0>>},
+ {"key error", <<"{\"\":1,"/utf8, 0>>},
+ {"value error", <<0>>},
+ {"negative error", <<"-"/utf8, 0>>},
+ {"zero error", <<"0"/utf8, 0>>},
+ {"integer error", <<"1"/utf8, 0>>},
+ {"decimal error", <<"1.0"/utf8, 0>>},
+ {"e error", <<"1e"/utf8, 0>>},
+ {"ex error", <<"1e+"/utf8, 0>>},
+ {"exp error", <<"1e1"/utf8, 0>>},
+ {"exp error", <<"1.0e1"/utf8, 0>>},
+ {"exp error", <<"1.e"/utf8>>},
+ {"true error", <<"tru"/utf8, 0>>},
+ {"false error", <<"fals"/utf8, 0>>},
+ {"null error", <<"nul"/utf8, 0>>},
+ {"maybe_done error", <<"[[]"/utf8, 0>>},
+ {"done error", <<"[]"/utf8, 0>>}
+ ],
+ [{Title, ?_assertError(badarg, decode(State))} || {Title, State} <- Cases].
+
+
+custom_incomplete_handler_test_() ->
+ [
+ {"custom incomplete handler", ?_assertError(
+ incomplete,
+ decode(<<>>, [{incomplete_handler, fun(_, _, _) -> erlang:error(incomplete) end}, stream])
+ )}
+ ].
+
+
+return_tail_test_() ->
+ [
+ {"return_tail with tail", ?_assertEqual(
+ {with_tail,[{}],<<"3">>},
+ jsx:decode(<<"{} 3">>, [return_tail])
+ )},
+ {"return_tail without tail", ?_assertEqual(
+ {with_tail,[{}],<<"">>},
+ jsx:decode(<<"{}">>, [return_tail])
+ )},
+ {"return_tail with trimmed whitespace", ?_assertEqual(
+ {with_tail,[{}],<<"">>},
+ jsx:decode(<<"{} ">>, [return_tail])
+ )},
+ {"return_tail and streaming", ?_assertEqual(
+ {with_tail,[{}],<<"3">>},
+ begin
+ {incomplete, F} = jsx:decode(<<"{">>, [return_tail, stream]),
+ F(<<"} 3">>)
+ end
+ )},
+ {"return_tail and streaming", ?_assertEqual(
+ {with_tail,[{}],<<"">>},
+ begin
+ %% In case of infinite stream of objects a user does not know
+ %% when to call F(end_stream).
+ %% So, return_tail overwrites conservative stream end.
+ %% This means that we don't need to call end_stream explicitly.
+ {incomplete, F} = jsx:decode(<<"{">>, [return_tail, stream]),
+ F(<<"}">>)
+ end
+ )}
+ ].
+
+-endif.
diff --git a/deps/jsx/src/jsx_encoder.erl b/deps/jsx/src/jsx_encoder.erl
new file mode 100644
index 0000000..e89328c
--- /dev/null
+++ b/deps/jsx/src/jsx_encoder.erl
@@ -0,0 +1,114 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_encoder).
+
+-export([encoder/3, encode/1, encode/2]).
+
+-spec encoder(Handler::module(), State::any(), Config::list()) -> jsx:encoder().
+
+encoder(Handler, State, Config) ->
+ Parser = jsx:parser(Handler, State, Config),
+ fun(Term) -> Parser(encode(Term) ++ [end_json]) end.
+
+
+-spec encode(Term::any()) -> any().
+
+encode(Term) -> encode(Term, ?MODULE).
+
+
+-spec encode(Term::any(), EntryPoint::module()) -> any().
+
+-ifndef(maps_support).
+encode(Term, EntryPoint) -> encode_(Term, EntryPoint).
+-endif.
+
+-ifdef(maps_support).
+encode(Map, _EntryPoint) when is_map(Map), map_size(Map) < 1 ->
+ [start_object, end_object];
+encode(Term, EntryPoint) when is_map(Term) ->
+ [start_object] ++ unpack(Term, EntryPoint);
+encode(Term, EntryPoint) -> encode_(Term, EntryPoint).
+-endif.
+
+encode_([], _EntryPoint) -> [start_array, end_array];
+encode_([{}], _EntryPoint) -> [start_object, end_object];
+
+%% datetime special case
+encode_([{{_,_,_},{_,_,_}} = DateTime|Rest], EntryPoint) ->
+ [start_array] ++ [DateTime] ++ unhitch(Rest, EntryPoint);
+encode_([{_, _}|_] = Term, EntryPoint) ->
+ [start_object] ++ unzip(Term, EntryPoint);
+encode_(Term, EntryPoint) when is_list(Term) ->
+ [start_array] ++ unhitch(Term, EntryPoint);
+
+encode_(Else, _EntryPoint) -> [Else].
+
+
+unzip([{K, V}|Rest], EntryPoint) when is_integer(K); is_binary(K); is_atom(K) ->
+ [K] ++ EntryPoint:encode(V, EntryPoint) ++ unzip(Rest, EntryPoint);
+unzip([], _) -> [end_object].
+
+
+unhitch([V|Rest], EntryPoint) ->
+ EntryPoint:encode(V, EntryPoint) ++ unhitch(Rest, EntryPoint);
+unhitch([], _) -> [end_array].
+
+
+-ifdef(maps_support).
+unpack(Map, EntryPoint) -> unpack(Map, maps:keys(Map), EntryPoint).
+
+unpack(Map, [K|Rest], EntryPoint) when is_integer(K); is_binary(K); is_atom(K) ->
+ [K] ++ EntryPoint:encode(maps:get(K, Map), EntryPoint) ++ unpack(Map, Rest, EntryPoint);
+unpack(_, [], _) -> [end_object].
+-endif.
+
+
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+parser(Term, Opts) -> (jsx:parser(jsx, [], Opts))(Term).
+
+
+error_test_() ->
+ [
+ {"value error", ?_assertError(badarg, parser(self(), []))},
+ {"string error", ?_assertError(badarg, parser(<<239, 191, 191>>, [strict]))}
+ ].
+
+custom_error_handler_test_() ->
+ Error = fun(Term, {_, State, _, _}, _) -> {State, Term} end,
+ [
+ {"value error", ?_assertEqual(
+ {value, [self()]},
+ parser(self(), [{error_handler, Error}])
+ )},
+ {"string error", ?_assertEqual(
+ {value, [{string, <<237, 160, 128>>}]},
+ parser(<<237, 160, 128>>, [{error_handler, Error}, strict])
+ )}
+ ].
+
+-endif.
diff --git a/deps/jsx/src/jsx_parser.erl b/deps/jsx/src/jsx_parser.erl
new file mode 100644
index 0000000..7e6c6cf
--- /dev/null
+++ b/deps/jsx/src/jsx_parser.erl
@@ -0,0 +1,1168 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_parser).
+
+-export([parser/3, resume/5]).
+-export([init/1, handle_event/2]).
+
+
+-spec parser(Handler::module(), State::any(), Config::list()) -> jsx:parser().
+
+parser(Handler, State, Config) ->
+ fun(Tokens) -> value(Tokens, {Handler, Handler:init(State)}, [], jsx_config:parse_config(Config)) end.
+
+
+%% resume allows continuation from interrupted decoding without having to explicitly export
+%% all states
+-spec resume(
+ Rest::jsx:token(),
+ State::atom(),
+ Handler::{atom(), any()},
+ Stack::list(atom()),
+ Config::jsx:config()
+ ) -> jsx:parser() | {incomplete, jsx:parser()}.
+
+resume(Rest, State, Handler, Stack, Config) ->
+ case State of
+ value -> value(Rest, Handler, Stack, Config);
+ object -> object(Rest, Handler, Stack, Config);
+ array -> array(Rest, Handler, Stack, Config);
+ maybe_done -> maybe_done(Rest, Handler, Stack, Config);
+ done -> done(Rest, Handler, Stack, Config)
+ end.
+
+
+-include("jsx_config.hrl").
+
+
+%% error, incomplete and event macros
+-ifndef(error).
+-define(error(State, Terms, Handler, Stack, Config),
+ case Config#config.error_handler of
+ false -> erlang:error(badarg);
+ F -> F(Terms, {parser, State, Handler, Stack}, jsx_config:config_to_list(Config))
+ end
+
+).
+-endif.
+
+
+incomplete(State, Handler, Stack, Config=#config{stream=false}) ->
+ ?error(State, [], Handler, Stack, Config);
+incomplete(State, Handler, Stack, Config=#config{incomplete_handler=false}) ->
+ {incomplete, fun(End) when End == end_stream; End == end_json ->
+ case resume([end_json], State, Handler, Stack, Config) of
+ {incomplete, _} -> ?error(State, [], Handler, Stack, Config);
+ Else -> Else
+ end;
+ (Tokens) ->
+ resume(Tokens, State, Handler, Stack, Config)
+ end
+ };
+incomplete(State, Handler, Stack, Config=#config{incomplete_handler=F}) ->
+ F([], {parser, State, Handler, Stack}, jsx_config:config_to_list(Config)).
+
+
+handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}.
+
+
+value([String|Tokens], Handler, Stack, Config) when is_binary(String) ->
+ try clean_string(String, Config) of Clean ->
+ maybe_done(Tokens, handle_event({string, Clean}, Handler, Config), Stack, Config)
+ catch error:badarg ->
+ ?error(value, [{string, String}|Tokens], Handler, Stack, Config)
+ end;
+value([true|Tokens], Handler, Stack, Config) ->
+ maybe_done(Tokens, handle_event({literal, true}, Handler, Config), Stack, Config);
+value([false|Tokens], Handler, Stack, Config) ->
+ maybe_done(Tokens, handle_event({literal, false}, Handler, Config), Stack, Config);
+value([null|Tokens], Handler, Stack, Config) ->
+ maybe_done(Tokens, handle_event({literal, null}, Handler, Config), Stack, Config);
+value([start_object|Tokens], Handler, Stack, Config) ->
+ object(Tokens, handle_event(start_object, Handler, Config), [object|Stack], Config);
+value([start_array|Tokens], Handler, Stack, Config) ->
+ array(Tokens, handle_event(start_array, Handler, Config), [array|Stack], Config);
+value([Number|Tokens], Handler, Stack, Config) when is_integer(Number) ->
+ maybe_done(Tokens, handle_event({integer, Number}, Handler, Config), Stack, Config);
+value([Number|Tokens], Handler, Stack, Config) when is_float(Number) ->
+ maybe_done(Tokens, handle_event({float, Number}, Handler, Config), Stack, Config);
+value([{raw, Raw}|Tokens], Handler, Stack, Config) when is_binary(Raw) ->
+ value((jsx:decoder(?MODULE, [], []))(Raw) ++ Tokens, Handler, Stack, Config);
+value([{{Year, Month, Day}, {Hour, Min, Sec}}|Tokens], Handler, Stack, Config)
+when is_integer(Year), is_integer(Month), is_integer(Day), is_integer(Hour), is_integer(Min), is_integer(Sec) ->
+ value([{string, unicode:characters_to_binary(io_lib:format(
+ "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ",
+ [Year, Month, Day, Hour, Min, Sec]
+ ))}|Tokens],
+ Handler,
+ Stack,
+ Config
+ );
+value([{{Year, Month, Day}, {Hour, Min, Sec}}|Tokens], Handler, Stack, Config)
+when is_integer(Year), is_integer(Month), is_integer(Day), is_integer(Hour), is_integer(Min), is_float(Sec) ->
+ value([{string, unicode:characters_to_binary(io_lib:format(
+ "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~9.6.0fZ",
+ [Year, Month, Day, Hour, Min, Sec]
+ ))}|Tokens],
+ Handler,
+ Stack,
+ Config
+ );
+value([{_, Value}|Tokens], Handler, Stack, Config) ->
+ value([Value] ++ Tokens, Handler, Stack, Config);
+value([String|Tokens], Handler, Stack, Config) when is_atom(String) ->
+ value([{string, atom_to_binary(String, utf8)}] ++ Tokens, Handler, Stack, Config);
+value([], Handler, Stack, Config) ->
+ incomplete(value, Handler, Stack, Config);
+value(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
+ ?error(value, BadTokens, Handler, Stack, Config);
+value(Token, Handler, Stack, Config) ->
+ value([Token], Handler, Stack, Config).
+
+
+object([end_object|Tokens], Handler, [object|Stack], Config) ->
+ maybe_done(Tokens, handle_event(end_object, Handler, Config), Stack, Config);
+object([{key, Key}|Tokens], Handler, Stack, Config)
+when is_atom(Key); is_binary(Key); is_integer(Key) ->
+ object([Key|Tokens], Handler, Stack, Config);
+object([Key|Tokens], Handler, [object|Stack], Config)
+when is_atom(Key); is_binary(Key); is_integer(Key) ->
+ try clean_string(fix_key(Key), Config)
+ of K ->
+ value(
+ Tokens,
+ handle_event({key, K}, Handler, Config),
+ [object|Stack],
+ Config
+ )
+ catch error:badarg ->
+ ?error(object, [{string, Key}|Tokens], Handler, Stack, Config)
+ end;
+object([], Handler, Stack, Config) ->
+ incomplete(object, Handler, Stack, Config);
+object(Token, Handler, Stack, Config) ->
+ object([Token], Handler, Stack, Config).
+
+
+array([end_array|Tokens], Handler, [array|Stack], Config) ->
+ maybe_done(Tokens, handle_event(end_array, Handler, Config), Stack, Config);
+array([], Handler, Stack, Config) ->
+ incomplete(array, Handler, Stack, Config);
+array(Tokens, Handler, Stack, Config) when is_list(Tokens) ->
+ value(Tokens, Handler, Stack, Config);
+array(Token, Handler, Stack, Config) ->
+ array([Token], Handler, Stack, Config).
+
+
+maybe_done([end_json], Handler, [], Config) ->
+ done([end_json], Handler, [], Config);
+maybe_done(Tokens, Handler, [object|_] = Stack, Config) when is_list(Tokens) ->
+ object(Tokens, Handler, Stack, Config);
+maybe_done(Tokens, Handler, [array|_] = Stack, Config) when is_list(Tokens) ->
+ array(Tokens, Handler, Stack, Config);
+maybe_done([], Handler, Stack, Config) ->
+ incomplete(maybe_done, Handler, Stack, Config);
+maybe_done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
+ ?error(maybe_done, BadTokens, Handler, Stack, Config);
+maybe_done(Token, Handler, Stack, Config) ->
+ maybe_done([Token], Handler, Stack, Config).
+
+
+done([], Handler, [], Config=#config{stream=true}) ->
+ incomplete(done, Handler, [], Config);
+done(Tokens, Handler, [], Config) when Tokens == [end_json]; Tokens == [] ->
+ {_, State} = handle_event(end_json, Handler, Config),
+ State;
+done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
+ ?error(done, BadTokens, Handler, Stack, Config);
+done(Token, Handler, Stack, Config) ->
+ done([Token], Handler, Stack, Config).
+
+
+fix_key(Key) when is_atom(Key) -> atom_to_binary(Key, utf8);
+fix_key(Key) when is_integer(Key) -> list_to_binary(integer_to_list(Key));
+fix_key(Key) when is_binary(Key) -> Key.
+
+
+clean_string(Bin, #config{dirty_strings=true}) -> Bin;
+clean_string(Bin, Config) -> clean(Bin, [], Config).
+
+
+%% unroll the control characters
+clean(<<0, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(0, Config)], Config);
+clean(<<1, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(1, Config)], Config);
+clean(<<2, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(2, Config)], Config);
+clean(<<3, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(3, Config)], Config);
+clean(<<4, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(4, Config)], Config);
+clean(<<5, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(5, Config)], Config);
+clean(<<6, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(6, Config)], Config);
+clean(<<7, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(7, Config)], Config);
+clean(<<8, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(8, Config)], Config);
+clean(<<9, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(9, Config)], Config);
+clean(<<10, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(10, Config)], Config);
+clean(<<11, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(11, Config)], Config);
+clean(<<12, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(12, Config)], Config);
+clean(<<13, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(13, Config)], Config);
+clean(<<14, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(14, Config)], Config);
+clean(<<15, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(15, Config)], Config);
+clean(<<16, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(16, Config)], Config);
+clean(<<17, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(17, Config)], Config);
+clean(<<18, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(18, Config)], Config);
+clean(<<19, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(19, Config)], Config);
+clean(<<20, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(20, Config)], Config);
+clean(<<21, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(21, Config)], Config);
+clean(<<22, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(22, Config)], Config);
+clean(<<23, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(23, Config)], Config);
+clean(<<24, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(24, Config)], Config);
+clean(<<25, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(25, Config)], Config);
+clean(<<26, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(26, Config)], Config);
+clean(<<27, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(27, Config)], Config);
+clean(<<28, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(28, Config)], Config);
+clean(<<29, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(29, Config)], Config);
+clean(<<30, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(30, Config)], Config);
+clean(<<31, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(31, Config)], Config);
+clean(<<34, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(34, Config)], Config);
+clean(<<47, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(47, Config)], Config);
+clean(<<92, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(92, Config)], Config);
+clean(<<X/utf8, Rest/binary>> = Bin, Acc, Config=#config{uescape=true}) ->
+ case X of
+ X when X < 16#80 -> start_count(Bin, Acc, Config);
+ _ -> clean(Rest, [Acc, json_escape_sequence(X)], Config)
+ end;
+%% u+2028
+clean(<<226, 128, 168, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(16#2028, Config)], Config);
+%% u+2029
+clean(<<226, 128, 169, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(16#2029, Config)], Config);
+clean(<<_/utf8, _/binary>> = Bin, Acc, Config) -> start_count(Bin, Acc, Config);
+%% surrogates
+clean(<<237, X, _, Rest/binary>>, Acc, Config) when X >= 160 ->
+ clean(Rest, [Acc, maybe_replace(surrogate, Config)], Config);
+%% overlong encodings and missing continuations of a 2 byte sequence
+clean(<<X, Rest/binary>>, Acc, Config) when X >= 192, X =< 223 ->
+ clean(strip_continuations(Rest, 1), [Acc, maybe_replace(badutf, Config)], Config);
+%% overlong encodings and missing continuations of a 3 byte sequence
+clean(<<X, Rest/binary>>, Acc, Config) when X >= 224, X =< 239 ->
+ clean(strip_continuations(Rest, 2), [Acc, maybe_replace(badutf, Config)], Config);
+%% overlong encodings and missing continuations of a 4 byte sequence
+clean(<<X, Rest/binary>>, Acc, Config) when X >= 240, X =< 247 ->
+ clean(strip_continuations(Rest, 3), [Acc, maybe_replace(badutf, Config)], Config);
+clean(<<_, Rest/binary>>, Acc, Config) ->
+ clean(Rest, [Acc, maybe_replace(badutf, Config)], Config);
+clean(<<>>, Acc, _) -> iolist_to_binary(Acc).
+
+
+start_count(Bin, Acc, Config) ->
+ Size = count(Bin, 0, Config),
+ <<Clean:Size/binary, Rest/binary>> = Bin,
+ clean(Rest, [Acc, Clean], Config).
+
+
+%% again, unrolling ascii makes a huge difference. sadly
+count(<<0, _/binary>>, N, _) -> N;
+count(<<1, _/binary>>, N, _) -> N;
+count(<<2, _/binary>>, N, _) -> N;
+count(<<3, _/binary>>, N, _) -> N;
+count(<<4, _/binary>>, N, _) -> N;
+count(<<5, _/binary>>, N, _) -> N;
+count(<<6, _/binary>>, N, _) -> N;
+count(<<7, _/binary>>, N, _) -> N;
+count(<<8, _/binary>>, N, _) -> N;
+count(<<9, _/binary>>, N, _) -> N;
+count(<<10, _/binary>>, N, _) -> N;
+count(<<11, _/binary>>, N, _) -> N;
+count(<<12, _/binary>>, N, _) -> N;
+count(<<13, _/binary>>, N, _) -> N;
+count(<<14, _/binary>>, N, _) -> N;
+count(<<15, _/binary>>, N, _) -> N;
+count(<<16, _/binary>>, N, _) -> N;
+count(<<17, _/binary>>, N, _) -> N;
+count(<<18, _/binary>>, N, _) -> N;
+count(<<19, _/binary>>, N, _) -> N;
+count(<<20, _/binary>>, N, _) -> N;
+count(<<21, _/binary>>, N, _) -> N;
+count(<<22, _/binary>>, N, _) -> N;
+count(<<23, _/binary>>, N, _) -> N;
+count(<<24, _/binary>>, N, _) -> N;
+count(<<25, _/binary>>, N, _) -> N;
+count(<<26, _/binary>>, N, _) -> N;
+count(<<27, _/binary>>, N, _) -> N;
+count(<<28, _/binary>>, N, _) -> N;
+count(<<29, _/binary>>, N, _) -> N;
+count(<<30, _/binary>>, N, _) -> N;
+count(<<31, _/binary>>, N, _) -> N;
+count(<<32, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<33, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<34, _/binary>>, N, _) -> N;
+count(<<35, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<36, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<37, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<38, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<39, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<40, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<41, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<42, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<43, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<44, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<45, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<46, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<47, _/binary>>, N, _) -> N;
+count(<<48, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<49, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<50, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<51, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<52, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<53, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<54, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<55, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<56, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<57, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<58, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<59, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<60, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<61, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<62, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<63, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<64, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<65, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<66, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<67, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<68, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<69, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<70, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<71, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<72, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<73, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<74, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<75, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<76, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<77, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<78, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<79, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<80, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<81, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<82, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<83, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<84, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<85, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<86, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<87, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<88, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<89, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<90, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<91, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<92, _/binary>>, N, _) -> N;
+count(<<93, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<94, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<95, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<96, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<97, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<98, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<99, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<100, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<101, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<102, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<103, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<104, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<105, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<106, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<107, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<108, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<109, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<110, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<111, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<112, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<113, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<114, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<115, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<116, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<117, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<118, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<119, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<120, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<121, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<122, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<123, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<124, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<125, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<126, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<127, Rest/binary>>, N, Config) ->
+ count(Rest, N + 1, Config);
+count(<<_/utf8, _/binary>>, N, #config{uescape=true}) -> N;
+count(<<X/utf8, Rest/binary>>, N, Config) ->
+ case X of
+ X when X < 16#800 -> count(Rest, N + 2, Config);
+ 16#2028 -> N;
+ 16#2029 -> N;
+ X when X < 16#10000 -> count(Rest, N + 3, Config);
+ _ -> count(Rest, N + 4, Config)
+ end;
+count(<<_, _/binary>>, N, _) -> N;
+count(<<>>, N, _) -> N.
+
+
+strip_continuations(Bin, 0) -> Bin;
+strip_continuations(<<X, Rest/binary>>, N) when X >= 128, X =< 191 ->
+ strip_continuations(Rest, N - 1);
+%% not a continuation byte
+strip_continuations(Bin, _) -> Bin.
+
+
+maybe_replace($\b, #config{escaped_strings=true}) -> <<$\\, $b>>;
+maybe_replace($\t, #config{escaped_strings=true}) -> <<$\\, $t>>;
+maybe_replace($\n, #config{escaped_strings=true}) -> <<$\\, $n>>;
+maybe_replace($\f, #config{escaped_strings=true}) -> <<$\\, $f>>;
+maybe_replace($\r, #config{escaped_strings=true}) -> <<$\\, $r>>;
+maybe_replace($\", #config{escaped_strings=true}) -> <<$\\, $\">>;
+maybe_replace($/, Config=#config{escaped_strings=true}) ->
+ case Config#config.escaped_forward_slashes of
+ true -> <<$\\, $/>>;
+ false -> <<$/>>
+ end;
+maybe_replace($\\, #config{escaped_strings=true}) -> <<$\\, $\\>>;
+maybe_replace(X, #config{escaped_strings=true}) when X < 32 ->
+ json_escape_sequence(X);
+maybe_replace(X, Config=#config{escaped_strings=true}) when X == 16#2028; X == 16#2029 ->
+ case Config#config.unescaped_jsonp of
+ true -> <<X/utf8>>;
+ false -> json_escape_sequence(X)
+ end;
+maybe_replace(Atom, #config{strict_utf8=true}) when is_atom(Atom) ->
+ erlang:error(badarg);
+maybe_replace(surrogate, _Config) ->
+ <<16#fffd/utf8>>;
+maybe_replace(badutf, _Config) ->
+ <<16#fffd/utf8>>;
+maybe_replace(X, _Config) ->
+ <<X/utf8>>.
+
+
+%% convert a codepoint to it's \uXXXX equiv.
+json_escape_sequence(X) when X < 65536 ->
+ <<A:4, B:4, C:4, D:4>> = <<X:16>>,
+ <<$\\, $u, (to_hex(A)), (to_hex(B)), (to_hex(C)), (to_hex(D))>>;
+json_escape_sequence(X) ->
+ Adjusted = X - 16#10000,
+ <<A:10, B:10>> = <<Adjusted:20>>,
+ [json_escape_sequence(A + 16#d800), json_escape_sequence(B + 16#dc00)].
+
+
+to_hex(10) -> $a;
+to_hex(11) -> $b;
+to_hex(12) -> $c;
+to_hex(13) -> $d;
+to_hex(14) -> $e;
+to_hex(15) -> $f;
+to_hex(X) -> X + 48. %% ascii "1" is [49], "2" is [50], etc...
+
+
+%% for raw input
+-spec init(proplists:proplist()) -> list().
+
+init([]) -> [].
+
+
+-spec handle_event(Event::any(), Acc::list()) -> list().
+
+handle_event(end_json, State) -> lists:reverse(State);
+handle_event(Event, State) -> [Event] ++ State.
+
+
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+parse(Events, Config) -> value(Events, {jsx, []}, [], jsx_config:parse_config(Config)).
+
+
+error_test_() ->
+ [
+ {"value error", ?_assertError(badarg, parse([self()], []))},
+ {"maybe_done error", ?_assertError(badarg, parse([start_array, end_array, start_array, end_json], []))},
+ {"done error", ?_assertError(badarg, parse([{string, <<"">>}, {literal, true}, end_json], []))},
+ {"string error", ?_assertError(badarg, parse([{string, <<237, 160, 128>>}, end_json], [strict]))}
+ ].
+
+
+custom_error_handler_test_() ->
+ Error = fun(Rest, {_, State, _, _}, _) -> {State, Rest} end,
+ [
+ {"value error", ?_assertEqual(
+ {value, [self()]},
+ parse([self()], [{error_handler, Error}])
+ )},
+ {"maybe_done error", ?_assertEqual(
+ {maybe_done, [start_array, end_json]},
+ parse([start_array, end_array, start_array, end_json], [{error_handler, Error}])
+ )},
+ {"done error", ?_assertEqual(
+ {maybe_done, [{literal, true}, end_json]},
+ parse([{string, <<"">>}, {literal, true}, end_json], [{error_handler, Error}])
+ )},
+ {"string error", ?_assertEqual(
+ {value, [{string, <<237, 160, 128>>}, end_json]},
+ parse([{string, <<237, 160, 128>>}, end_json], [{error_handler, Error}, strict])
+ )}
+ ].
+
+
+incomplete_test_() ->
+ Cases = [
+ {"incomplete value", []},
+ {"incomplete object", [start_object]},
+ {"incomplete array", [start_array]},
+ {"incomplete maybe_done", [start_array, end_array]}
+ ],
+ [{Title, ?_assertError(badarg, parse(Events, []))}
+ || {Title, Events} <- Cases
+ ].
+
+
+custom_incomplete_handler_test_() ->
+ [
+ {"custom incomplete handler", ?_assertError(
+ badarg,
+ parse([], [{incomplete_handler, fun(_, _, _) -> erlang:error(badarg) end}])
+ )}
+ ].
+
+
+raw_test_() ->
+ Parse = fun(Events, Config) -> (parser(?MODULE, [], Config))(Events ++ [end_json]) end,
+ [
+ {"raw empty list", ?_assertEqual(
+ [start_array, end_array],
+ Parse([{raw, <<"[]">>}], [])
+ )},
+ {"raw empty object", ?_assertEqual(
+ [start_object, end_object],
+ Parse([{raw, <<"{}">>}], [])
+ )},
+ {"raw chunk inside stream", ?_assertEqual(
+ [start_object, {key, <<"key">>}, start_array, {literal, true}, end_array, end_object],
+ Parse([start_object, {key, <<"key">>}, {raw, <<"[true]">>}, end_object], [])
+ )}
+ ].
+
+
+%% erlang refuses to encode certain codepoints, so fake them
+to_fake_utf8(N) when N < 16#0080 -> <<N:8>>;
+to_fake_utf8(N) when N < 16#0800 ->
+ <<0:5, Y:5, X:6>> = <<N:16>>,
+ <<2#110:3, Y:5, 2#10:2, X:6>>;
+to_fake_utf8(N) when N < 16#10000 ->
+ <<Z:4, Y:6, X:6>> = <<N:16>>,
+ <<2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6>>;
+to_fake_utf8(N) ->
+ <<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>,
+ <<2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6>>.
+
+
+codepoints() ->
+ unicode:characters_to_binary(
+ [32, 33]
+ ++ lists:seq(35, 46)
+ ++ lists:seq(48, 91)
+ ++ lists:seq(93, 16#2027)
+ ++ lists:seq(16#202a, 16#d7ff)
+ ++ lists:seq(16#e000, 16#ffff)
+ ).
+
+
+extended_codepoints() ->
+ unicode:characters_to_binary(
+ lists:seq(16#10000, 16#1ffff) ++ [
+ 16#20000, 16#30000, 16#40000, 16#50000, 16#60000,
+ 16#70000, 16#80000, 16#90000, 16#a0000, 16#b0000,
+ 16#c0000, 16#d0000, 16#e0000, 16#f0000, 16#100000
+ ]
+ ).
+
+
+surrogates() -> [ to_fake_utf8(N) || N <- lists:seq(16#d800, 16#dfff) ].
+
+
+clean_string_helper(String) ->
+ try clean_string(String, #config{strict_utf8=true}) of Clean -> Clean
+ catch error:badarg -> {error, badarg}
+ end.
+
+
+clean_string_test_() ->
+ [
+ {"clean codepoints", ?_assertEqual(
+ codepoints(),
+ clean_string(codepoints(), #config{})
+ )},
+ {"clean extended codepoints", ?_assertEqual(
+ extended_codepoints(),
+ clean_string(extended_codepoints(), #config{})
+ )},
+ {"escape path codepoints", ?_assertEqual(
+ codepoints(),
+ clean_string(codepoints(), #config{escaped_strings=true})
+ )},
+ {"escape path extended codepoints", ?_assertEqual(
+ extended_codepoints(),
+ clean_string(extended_codepoints(), #config{escaped_strings=true})
+ )},
+ {"error surrogates", ?_assertEqual(
+ lists:duplicate(length(surrogates()), {error, badarg}),
+ lists:map(fun(Codepoint) -> clean_string_helper(Codepoint) end, surrogates())
+ )},
+ {"clean surrogates", ?_assertEqual(
+ lists:duplicate(length(surrogates()), <<16#fffd/utf8>>),
+ lists:map(fun(Codepoint) -> clean_string(Codepoint, #config{}) end, surrogates())
+ )}
+ ].
+
+
+escape_test_() ->
+ [
+ {"maybe_escape backspace", ?_assertEqual(
+ <<"\\b">>,
+ clean_string(<<16#0008/utf8>>, #config{escaped_strings=true})
+ )},
+ {"don't escape backspace", ?_assertEqual(
+ <<"\b">>,
+ clean_string(<<16#0008/utf8>>, #config{})
+ )},
+ {"maybe_escape tab", ?_assertEqual(
+ <<"\\t">>,
+ clean_string(<<16#0009/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape newline", ?_assertEqual(
+ <<"\\n">>,
+ clean_string(<<16#000a/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape formfeed", ?_assertEqual(
+ <<"\\f">>,
+ clean_string(<<16#000c/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape carriage return", ?_assertEqual(
+ <<"\\r">>,
+ clean_string(<<16#000d/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape quote", ?_assertEqual(
+ <<"\\\"">>,
+ clean_string(<<16#0022/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape forward slash", ?_assertEqual(
+ <<"\\/">>,
+ clean_string(<<16#002f/utf8>>, #config{escaped_strings=true, escaped_forward_slashes=true})
+ )},
+ {"do not maybe_escape forward slash", ?_assertEqual(
+ <<"/">>,
+ clean_string(<<16#002f/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape backslash", ?_assertEqual(
+ <<"\\\\">>,
+ clean_string(<<16#005c/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape jsonp (u2028)", ?_assertEqual(
+ <<"\\u2028">>,
+ clean_string(<<16#2028/utf8>>, #config{escaped_strings=true})
+ )},
+ {"do not maybe_escape jsonp (u2028)", ?_assertEqual(
+ <<16#2028/utf8>>,
+ clean_string(<<16#2028/utf8>>, #config{escaped_strings=true, unescaped_jsonp=true})
+ )},
+ {"maybe_escape jsonp (u2029)", ?_assertEqual(
+ <<"\\u2029">>,
+ clean_string(<<16#2029/utf8>>, #config{escaped_strings=true})
+ )},
+ {"do not maybe_escape jsonp (u2029)", ?_assertEqual(
+ <<16#2029/utf8>>,
+ clean_string(<<16#2029/utf8>>, #config{escaped_strings=true, unescaped_jsonp=true})
+ )},
+ {"maybe_escape u0000", ?_assertEqual(
+ <<"\\u0000">>,
+ clean_string(<<16#0000/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0001", ?_assertEqual(
+ <<"\\u0001">>,
+ clean_string(<<16#0001/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0002", ?_assertEqual(
+ <<"\\u0002">>,
+ clean_string(<<16#0002/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0003", ?_assertEqual(
+ <<"\\u0003">>,
+ clean_string(<<16#0003/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0004", ?_assertEqual(
+ <<"\\u0004">>,
+ clean_string(<<16#0004/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0005", ?_assertEqual(
+ <<"\\u0005">>,
+ clean_string(<<16#0005/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0006", ?_assertEqual(
+ <<"\\u0006">>,
+ clean_string(<<16#0006/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0007", ?_assertEqual(
+ <<"\\u0007">>,
+ clean_string(<<16#0007/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u000b", ?_assertEqual(
+ <<"\\u000b">>,
+ clean_string(<<16#000b/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u000e", ?_assertEqual(
+ <<"\\u000e">>,
+ clean_string(<<16#000e/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u000f", ?_assertEqual(
+ <<"\\u000f">>,
+ clean_string(<<16#000f/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0010", ?_assertEqual(
+ <<"\\u0010">>,
+ clean_string(<<16#0010/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0011", ?_assertEqual(
+ <<"\\u0011">>,
+ clean_string(<<16#0011/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0012", ?_assertEqual(
+ <<"\\u0012">>,
+ clean_string(<<16#0012/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0013", ?_assertEqual(
+ <<"\\u0013">>,
+ clean_string(<<16#0013/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0014", ?_assertEqual(
+ <<"\\u0014">>,
+ clean_string(<<16#0014/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0015", ?_assertEqual(
+ <<"\\u0015">>,
+ clean_string(<<16#0015/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0016", ?_assertEqual(
+ <<"\\u0016">>,
+ clean_string(<<16#0016/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0017", ?_assertEqual(
+ <<"\\u0017">>,
+ clean_string(<<16#0017/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0018", ?_assertEqual(
+ <<"\\u0018">>,
+ clean_string(<<16#0018/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u0019", ?_assertEqual(
+ <<"\\u0019">>,
+ clean_string(<<16#0019/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001a", ?_assertEqual(
+ <<"\\u001a">>,
+ clean_string(<<16#001a/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001b", ?_assertEqual(
+ <<"\\u001b">>,
+ clean_string(<<16#001b/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001c", ?_assertEqual(
+ <<"\\u001c">>,
+ clean_string(<<16#001c/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001d", ?_assertEqual(
+ <<"\\u001d">>,
+ clean_string(<<16#001d/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001e", ?_assertEqual(
+ <<"\\u001e">>,
+ clean_string(<<16#001e/utf8>>, #config{escaped_strings=true})
+ )},
+ {"maybe_escape u001f", ?_assertEqual(
+ <<"\\u001f">>,
+ clean_string(<<16#001f/utf8>>, #config{escaped_strings=true})
+ )}
+ ].
+
+
+bad_utf8_test_() ->
+ [
+ {"orphan continuation byte u+0080", ?_assertError(
+ badarg,
+ clean_string(<<16#0080>>, #config{strict_utf8=true})
+ )},
+ {"orphan continuation byte u+0080 replaced", ?_assertEqual(
+ <<16#fffd/utf8>>,
+ clean_string(<<16#0080>>, #config{})
+ )},
+ {"orphan continuation byte u+00bf", ?_assertError(
+ badarg,
+ clean_string(<<16#00bf>>, #config{strict_utf8=true})
+ )},
+ {"orphan continuation byte u+00bf replaced", ?_assertEqual(
+ <<16#fffd/utf8>>,
+ clean_string(<<16#00bf>>, #config{})
+ )},
+ {"2 continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(binary:copy(<<16#0080>>, 2))/binary>>, #config{strict_utf8=true})
+ )},
+ {"2 continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, 2),
+ clean_string(<<(binary:copy(<<16#0080>>, 2))/binary>>, #config{})
+ )},
+ {"3 continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(binary:copy(<<16#0080>>, 3))/binary>>, #config{strict_utf8=true})
+ )},
+ {"3 continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, 3),
+ clean_string(<<(binary:copy(<<16#0080>>, 3))/binary>>, #config{})
+ )},
+ {"4 continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(binary:copy(<<16#0080>>, 4))/binary>>, #config{strict_utf8=true})
+ )},
+ {"4 continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, 4),
+ clean_string(<<(binary:copy(<<16#0080>>, 4))/binary>>, #config{})
+ )},
+ {"5 continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(binary:copy(<<16#0080>>, 5))/binary>>, #config{strict_utf8=true})
+ )},
+ {"5 continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, 5),
+ clean_string(<<(binary:copy(<<16#0080>>, 5))/binary>>, #config{})
+ )},
+ {"6 continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(binary:copy(<<16#0080>>, 6))/binary>>, #config{strict_utf8=true})
+ )},
+ {"6 continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, 6),
+ clean_string(<<(binary:copy(<<16#0080>>, 6))/binary>>, #config{})
+ )},
+ {"all continuation bytes", ?_assertError(
+ badarg,
+ clean_string(<<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>>, #config{strict_utf8=true})
+ )},
+ {"all continuation bytes replaced", ?_assertEqual(
+ binary:copy(<<16#fffd/utf8>>, length(lists:seq(16#0080, 16#00bf))),
+ clean_string(
+ <<(list_to_binary(lists:seq(16#0080, 16#00bf)))/binary>>,
+ #config{}
+ )
+ )},
+ {"lonely start byte", ?_assertError(
+ badarg,
+ clean_string(<<16#00c0>>, #config{strict_utf8=true})
+ )},
+ {"lonely start byte replaced", ?_assertEqual(
+ <<16#fffd/utf8>>,
+ clean_string(<<16#00c0>>, #config{})
+ )},
+ {"lonely start bytes (2 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#00c0, 32, 16#00df>>, #config{strict_utf8=true})
+ )},
+ {"lonely start bytes (2 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ clean_string(<<16#00c0, 32, 16#00df>>, #config{})
+ )},
+ {"lonely start bytes (3 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#00e0, 32, 16#00ef>>, #config{strict_utf8=true})
+ )},
+ {"lonely start bytes (3 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ clean_string(<<16#00e0, 32, 16#00ef>>, #config{})
+ )},
+ {"lonely start bytes (4 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#00f0, 32, 16#00f7>>, #config{strict_utf8=true})
+ )},
+ {"lonely start bytes (4 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32, 16#fffd/utf8>>,
+ clean_string(<<16#00f0, 32, 16#00f7>>, #config{})
+ )},
+ {"missing continuation byte (3 byte)", ?_assertError(
+ badarg,
+ clean_string(<<224, 160, 32>>, #config{strict_utf8=true})
+ )},
+ {"missing continuation byte (3 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<224, 160, 32>>, #config{})
+ )},
+ {"missing continuation byte (4 byte missing one)", ?_assertError(
+ badarg,
+ clean_string(<<240, 144, 128, 32>>, #config{strict_utf8=true})
+ )},
+ {"missing continuation byte (4 byte missing one) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<240, 144, 128, 32>>, #config{})
+ )},
+ {"missing continuation byte (4 byte missing two)", ?_assertError(
+ badarg,
+ clean_string(<<240, 144, 32>>, #config{strict_utf8=true})
+ )},
+ {"missing continuation byte (4 byte missing two) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<240, 144, 32>>, #config{})
+ )},
+ {"overlong encoding of u+002f (2 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#c0, 16#af, 32>>, #config{strict_utf8=true})
+ )},
+ {"overlong encoding of u+002f (2 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#c0, 16#af, 32>>, #config{})
+ )},
+ {"overlong encoding of u+002f (3 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#e0, 16#80, 16#af, 32>>, #config{strict_utf8=true})
+ )},
+ {"overlong encoding of u+002f (3 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#e0, 16#80, 16#af, 32>>, #config{})
+ )},
+ {"overlong encoding of u+002f (4 byte)", ?_assertError(
+ badarg,
+ clean_string(<<16#f0, 16#80, 16#80, 16#af, 32>>, #config{strict_utf8=true})
+ )},
+ {"overlong encoding of u+002f (4 byte) replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#f0, 16#80, 16#80, 16#af, 32>>, #config{})
+ )},
+ {"highest overlong 2 byte sequence", ?_assertError(
+ badarg,
+ clean_string(<<16#c1, 16#bf, 32>>, #config{strict_utf8=true})
+ )},
+ {"highest overlong 2 byte sequence replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#c1, 16#bf, 32>>, #config{})
+ )},
+ {"highest overlong 3 byte sequence", ?_assertError(
+ badarg,
+ clean_string(<<16#e0, 16#9f, 16#bf, 32>>, #config{strict_utf8=true})
+ )},
+ {"highest overlong 3 byte sequence replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#e0, 16#9f, 16#bf, 32>>, #config{})
+ )},
+ {"highest overlong 4 byte sequence", ?_assertError(
+ badarg,
+ clean_string(<<16#f0, 16#8f, 16#bf, 16#bf, 32>>, #config{strict_utf8=true})
+ )},
+ {"highest overlong 4 byte sequence replaced", ?_assertEqual(
+ <<16#fffd/utf8, 32>>,
+ clean_string(<<16#f0, 16#8f, 16#bf, 16#bf, 32>>, #config{})
+ )}
+ ].
+
+
+json_escape_sequence_test_() ->
+ [
+ {"json escape sequence test - 16#0000", ?_assertEqual(<<"\\u0000"/utf8>>, json_escape_sequence(16#0000))},
+ {"json escape sequence test - 16#abc", ?_assertEqual(<<"\\u0abc"/utf8>>, json_escape_sequence(16#abc))},
+ {"json escape sequence test - 16#def", ?_assertEqual(<<"\\u0def"/utf8>>, json_escape_sequence(16#def))}
+ ].
+
+
+uescape_test_() ->
+ [
+ {"\"\\u0080\"", ?_assertEqual(
+ <<"\\u0080">>,
+ clean_string(<<128/utf8>>, #config{uescape=true})
+ )},
+ {"\"\\u8ca8\\u5481\\u3002\\u0091\\u0091\"", ?_assertEqual(
+ <<"\\u8ca8\\u5481\\u3002\\u0091\\u0091">>,
+ clean_string(
+ <<232,178,168,229,146,129,227,128,130,194,145,194,145>>,
+ #config{uescape=true}
+ )
+ )},
+ {"\"\\ud834\\udd1e\"", ?_assertEqual(
+ <<"\\ud834\\udd1e">>,
+ clean_string(<<240, 157, 132, 158>>, #config{uescape=true})
+ )},
+ {"\"\\ud83d\\ude0a\"", ?_assertEqual(
+ <<"\\ud83d\\ude0a">>,
+ clean_string(<<240, 159, 152, 138>>, #config{uescape=true})
+ )}
+ ].
+
+
+fix_key_test_() ->
+ [
+ {"binary key", ?_assertEqual(fix_key(<<"foo">>), <<"foo">>)},
+ {"atom key", ?_assertEqual(fix_key(foo), <<"foo">>)},
+ {"integer key", ?_assertEqual(fix_key(123), <<"123">>)}
+ ].
+
+
+datetime_test_() ->
+ [
+ {"datetime", ?_assertEqual(
+ [start_array, {string, <<"2014-08-13T23:12:34Z">>}, end_array, end_json],
+ parse([start_array, {{2014,08,13},{23,12,34}}, end_array, end_json], [])
+ )},
+ {"datetime", ?_assertEqual(
+ [start_array, {string, <<"2014-08-13T23:12:34.363369Z">>}, end_array, end_json],
+ parse([start_array, {{2014,08,13},{23,12,34.363369}}, end_array, end_json], [])
+ )}
+ ].
+
+
+-endif.
diff --git a/deps/jsx/src/jsx_to_json.erl b/deps/jsx/src/jsx_to_json.erl
new file mode 100644
index 0000000..c2d5832
--- /dev/null
+++ b/deps/jsx/src/jsx_to_json.erl
@@ -0,0 +1,399 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_to_json).
+
+-export([to_json/2, format/2]).
+-export([init/1, handle_event/2]).
+-export([start_json/0, start_json/1]).
+-export([start_object/1, start_array/1, finish/1, insert/2, get_key/1, get_value/1]).
+
+
+-record(config, {
+ space = 0,
+ indent = 0,
+ depth = 0
+}).
+
+-type config() :: list().
+-export_type([config/0]).
+
+
+-spec to_json(Source::any(), Config::config()) -> binary().
+
+to_json(Source, Config) when is_list(Config) ->
+ (jsx:encoder(?MODULE, Config, jsx_config:extract_config(Config ++ [escaped_strings])))(Source).
+
+
+-spec format(Source::binary(), Config::config()) -> binary().
+
+format(Source, Config) when is_binary(Source) andalso is_list(Config) ->
+ (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config ++ [escaped_strings])))(Source);
+format(_, _) -> erlang:error(badarg).
+
+
+parse_config(Config) -> parse_config(Config, #config{}).
+
+parse_config([{space, Val}|Rest], Config) when is_integer(Val), Val > 0 ->
+ parse_config(Rest, Config#config{space = Val});
+parse_config([space|Rest], Config) ->
+ parse_config(Rest, Config#config{space = 1});
+parse_config([{indent, Val}|Rest], Config) when is_integer(Val), Val > 0 ->
+ parse_config(Rest, Config#config{indent = Val});
+parse_config([indent|Rest], Config) ->
+ parse_config(Rest, Config#config{indent = 1});
+parse_config([{K, _}|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config)
+ ; false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([K|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config)
+ ; false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([], Config) ->
+ Config.
+
+
+-define(start_object, <<"{">>).
+-define(start_array, <<"[">>).
+-define(end_object, <<"}">>).
+-define(end_array, <<"]">>).
+-define(colon, <<":">>).
+-define(comma, <<",">>).
+-define(quote, <<"\"">>).
+-define(space, <<" ">>).
+-define(newline, <<"\n">>).
+
+
+-type state() :: {unicode:charlist(), #config{}}.
+-spec init(Config::proplists:proplist()) -> state().
+
+init(Config) -> {[], parse_config(Config)}.
+
+
+-spec handle_event(Event::any(), State::state()) -> state().
+
+handle_event(end_json, State) -> get_value(State);
+
+handle_event(start_object, State) -> start_object(State);
+handle_event(end_object, State) -> finish(State);
+
+handle_event(start_array, State) -> start_array(State);
+handle_event(end_array, State) -> finish(State);
+
+handle_event({Type, Event}, {_, Config} = State) -> insert(encode(Type, Event, Config), State).
+
+
+encode(string, String, _Config) ->
+ [?quote, String, ?quote];
+encode(key, Key, _Config) ->
+ [?quote, Key, ?quote];
+encode(literal, Literal, _Config) ->
+ erlang:atom_to_list(Literal);
+encode(integer, Integer, _Config) ->
+ erlang:integer_to_list(Integer);
+encode(float, Float, _Config) ->
+ io_lib:format("~p", [Float]).
+
+
+space(Config) ->
+ case Config#config.space of
+ 0 -> <<>>
+ ; X when X > 0 -> binary:copy(?space, X)
+ end.
+
+
+indent(Config) ->
+ case Config#config.indent of
+ 0 -> <<>>
+ ; X when X > 0 -> <<?newline/binary, (binary:copy(?space, X * Config#config.depth))/binary>>
+ end.
+
+
+indent_or_space(Config) ->
+ case Config#config.indent > 0 of
+ true -> indent(Config)
+ ; false -> space(Config)
+ end.
+
+
+%% internal state is a stack and a config object
+%% `{Stack, Config}`
+%% the stack is a list of in progress objects/arrays
+%% `[Current, Parent, Grandparent,...OriginalAncestor]`
+%% an object has the representation on the stack of
+%% `{object, Object}`
+%% of if there's a key with a yet to be matched value
+%% `{object, Key, Object}`
+%% an array looks like
+%% `{array, Array}`
+%% `Object` and `Array` are utf8 encoded binaries
+
+start_json() -> {[], #config{}}.
+
+start_json(Config) when is_list(Config) -> {[], parse_config(Config)}.
+
+%% allocate a new object on top of the stack
+start_object({Stack, Config = #config{depth = Depth}}) ->
+ {[{object, ?start_object}] ++ Stack, Config#config{depth = Depth + 1}}.
+
+%% allocate a new array on top of the stack
+start_array({Stack, Config = #config{depth = Depth}}) ->
+ {[{array, ?start_array}] ++ Stack, Config#config{depth = Depth + 1}}.
+
+%% finish an object or array and insert it into the parent object if it exists
+finish({Stack, Config = #config{depth = Depth}}) ->
+ NewConfig = Config#config{depth = Depth - 1},
+ finish_({Stack, NewConfig}).
+
+finish_({[{object, <<"{">>}], Config}) -> {<<"{}">>, Config};
+finish_({[{array, <<"[">>}], Config}) -> {<<"[]">>, Config};
+finish_({[{object, <<"{">>}|Rest], Config}) -> insert(<<"{}">>, {Rest, Config});
+finish_({[{array, <<"[">>}|Rest], Config}) -> insert(<<"[]">>, {Rest, Config});
+finish_({[{object, Object}], Config}) ->
+ {[Object, indent(Config), ?end_object], Config};
+finish_({[{object, Object}|Rest], Config}) ->
+ insert([Object, indent(Config), ?end_object], {Rest, Config});
+finish_({[{array, Array}], Config}) ->
+ {[Array, indent(Config), ?end_array], Config};
+finish_({[{array, Array}|Rest], Config}) ->
+ insert([Array, indent(Config), ?end_array], {Rest, Config});
+finish_(_) -> erlang:error(badarg).
+
+%% insert a value when there's no parent object or array
+insert(Value, {[], Config}) ->
+ {Value, Config};
+%% insert a key or value into an object or array, autodetects the 'right' thing
+insert(Key, {[{object, Object}|Rest], Config}) ->
+ {[{object, Key, Object}] ++ Rest, Config};
+insert(Value, {[{object, Key, ?start_object}|Rest], Config}) ->
+ {
+ [{object, [
+ ?start_object,
+ indent(Config),
+ Key,
+ ?colon,
+ space(Config),
+ Value
+ ]}] ++ Rest,
+ Config
+ };
+insert(Value, {[{object, Key, Object}|Rest], Config}) ->
+ {
+ [{object, [
+ Object,
+ ?comma,
+ indent_or_space(Config),
+ Key,
+ ?colon,
+ space(Config),
+ Value
+ ]}] ++ Rest,
+ Config
+ };
+insert(Value, {[{array, ?start_array}|Rest], Config}) ->
+ {[{array, [?start_array, indent(Config), Value]}] ++ Rest, Config};
+insert(Value, {[{array, Array}|Rest], Config}) ->
+ {
+ [{array, [Array,
+ ?comma,
+ indent_or_space(Config),
+ Value
+ ]}] ++ Rest,
+ Config
+ };
+insert(_, _) -> erlang:error(badarg).
+
+
+get_key({[{object, Key, _}|_], _}) -> Key;
+get_key(_) -> erlang:error(badarg).
+
+
+get_value({Value, _Config}) ->
+ try unicode:characters_to_binary(Value)
+ catch error:_ -> erlang:error(badarg)
+ end;
+get_value(_) -> erlang:error(badarg).
+
+
+
+%% eunit tests
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+config_test_() ->
+ [
+ {"empty config", ?_assertEqual(#config{}, parse_config([]))},
+ {"unspecified indent/space", ?_assertEqual(
+ #config{space=1, indent=1},
+ parse_config([space, indent])
+ )},
+ {"specific indent", ?_assertEqual(
+ #config{indent=4},
+ parse_config([{indent, 4}])
+ )},
+ {"specific space", ?_assertEqual(
+ #config{space=2},
+ parse_config([{space, 2}])
+ )},
+ {"specific space and indent", ?_assertEqual(
+ #config{space=2, indent=2},
+ parse_config([{space, 2}, {indent, 2}])
+ )},
+ {"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
+ {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
+ ].
+
+
+space_test_() ->
+ [
+ {"no space", ?_assertEqual(<<>>, space(#config{space=0}))},
+ {"one space", ?_assertEqual(<<" ">>, space(#config{space=1}))},
+ {"four spaces", ?_assertEqual(<<" ">>, space(#config{space=4}))}
+ ].
+
+
+indent_test_() ->
+ [
+ {"no indent", ?_assertEqual(<<>>, indent(#config{indent=0, depth=1}))},
+ {"indent 1 depth 1", ?_assertEqual(
+ <<?newline/binary, <<" ">>/binary>>,
+ indent(#config{indent=1, depth=1})
+ )},
+ {"indent 1 depth 2", ?_assertEqual(
+ <<?newline/binary, <<" ">>/binary>>,
+ indent(#config{indent=1, depth=2})
+ )},
+ {"indent 4 depth 1", ?_assertEqual(
+ <<?newline/binary, <<" ">>/binary>>,
+ indent(#config{indent=4, depth=1})
+ )},
+ {"indent 4 depth 2", ?_assertEqual(
+ <<?newline/binary, <<" ">>/binary, <<" ">>/binary>>,
+ indent(#config{indent=4, depth=2})
+ )}
+ ].
+
+
+indent_or_space_test_() ->
+ [
+ {"no indent so space", ?_assertEqual(
+ <<" ">>,
+ indent_or_space(#config{space=1, indent=0, depth=1})
+ )},
+ {"indent so no space", ?_assertEqual(
+ <<?newline/binary, <<" ">>/binary>>,
+ indent_or_space(#config{space=1, indent=1, depth=1})
+ )}
+ ].
+
+
+encode_test_() ->
+ [
+ {"0.0", ?_assert(encode(float, 0.0, #config{}) =:= ["0.0"])},
+ {"1.0", ?_assert(encode(float, 1.0, #config{}) =:= ["1.0"])},
+ {"-1.0", ?_assert(encode(float, -1.0, #config{}) =:= ["-1.0"])},
+ {"3.1234567890987654321",
+ ?_assert(
+ encode(float, 3.1234567890987654321, #config{}) =:= ["3.1234567890987655"])
+ },
+ {"1.0e23", ?_assert(encode(float, 1.0e23, #config{}) =:= ["1.0e23"])},
+ {"0.3", ?_assert(encode(float, 3.0/10.0, #config{}) =:= ["0.3"])},
+ {"0.0001", ?_assert(encode(float, 0.0001, #config{}) =:= ["0.0001"])},
+ {"0.00001", ?_assert(encode(float, 0.00001, #config{}) =:= ["1.0e-5"])},
+ {"0.00000001", ?_assert(encode(float, 0.00000001, #config{}) =:= ["1.0e-8"])},
+ {"1.0e-323", ?_assert(encode(float, 1.0e-323, #config{}) =:= ["1.0e-323"])},
+ {"1.0e308", ?_assert(encode(float, 1.0e308, #config{}) =:= ["1.0e308"])},
+ {"min normalized float",
+ ?_assert(
+ encode(float, math:pow(2, -1022), #config{}) =:= ["2.2250738585072014e-308"]
+ )
+ },
+ {"max normalized float",
+ ?_assert(
+ encode(float, (2 - math:pow(2, -52)) * math:pow(2, 1023), #config{})
+ =:= ["1.7976931348623157e308"]
+ )
+ },
+ {"min denormalized float",
+ ?_assert(encode(float, math:pow(2, -1074), #config{}) =:= ["5.0e-324"])
+ },
+ {"max denormalized float",
+ ?_assert(
+ encode(float, (1 - math:pow(2, -52)) * math:pow(2, -1022), #config{})
+ =:= ["2.225073858507201e-308"]
+ )
+ },
+ {"hello world", ?_assert(encode(string, <<"hello world">>, #config{})
+ =:= [<<"\"">>, <<"hello world">>, <<"\"">>]
+ )},
+ {"key", ?_assert(encode(key, <<"key">>, #config{}) =:= [<<"\"">>, <<"key">>, <<"\"">>])},
+ {"1", ?_assert(encode(integer, 1, #config{}) =:= "1")},
+ {"-1", ?_assert(encode(integer, -1, #config{}) =:= "-1")},
+ {"true", ?_assert(encode(literal, true, #config{}) =:= "true")},
+ {"false", ?_assert(encode(literal, false, #config{}) =:= "false")},
+ {"null", ?_assert(encode(literal, null, #config{}) =:= "null")}
+ ].
+
+
+format_test_() ->
+ % {minified version, pretty version}
+ Cases = [
+ {"empty object", <<"{}">>, <<"{}">>},
+ {"empty array", <<"[]">>, <<"[]">>},
+ {"single key object", <<"{\"k\":\"v\"}">>, <<"{\n \"k\": \"v\"\n}">>},
+ {"single member array", <<"[true]">>, <<"[\n true\n]">>},
+ {"multiple key object",
+ <<"{\"k\":\"v\",\"x\":\"y\"}">>,
+ <<"{\n \"k\": \"v\",\n \"x\": \"y\"\n}">>
+ },
+ {"multiple member array",
+ <<"[1.0,2.0,3.0]">>,
+ <<"[\n 1.0,\n 2.0,\n 3.0\n]">>
+ },
+ {"nested structure",
+ <<"[[{},[],true],{\"k\":\"v\",\"x\":\"y\"}]">>,
+ <<"[\n [\n {},\n [],\n true\n ],\n {\n \"k\": \"v\",\n \"x\": \"y\"\n }\n]">>
+ }
+ ],
+ [{Title, ?_assertEqual(Min, jsx:minify(Pretty))} || {Title, Min, Pretty} <- Cases] ++
+ [{Title, ?_assertEqual(Pretty, jsx:prettify(Min))} || {Title, Min, Pretty} <- Cases].
+
+
+handle_event_test_() ->
+ Data = jsx:test_cases() ++ jsx:special_test_cases(),
+ [
+ {
+ Title, ?_assertEqual(
+ JSON,
+ lists:foldl(fun handle_event/2, init([]), Events ++ [end_json])
+ )
+ } || {Title, JSON, _, Events} <- Data
+ ].
+
+
+-endif.
diff --git a/deps/jsx/src/jsx_to_term.erl b/deps/jsx/src/jsx_to_term.erl
new file mode 100644
index 0000000..ec9ab89
--- /dev/null
+++ b/deps/jsx/src/jsx_to_term.erl
@@ -0,0 +1,455 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_to_term).
+
+-export([to_term/2]).
+-export([init/1, handle_event/2]).
+-export([
+ start_term/1,
+ start_object/1,
+ start_array/1,
+ finish/1,
+ insert/2,
+ get_key/1,
+ get_value/1
+]).
+
+
+-record(config, {
+ labels = binary,
+ return_maps = false
+}).
+
+-type config() :: list().
+-export_type([config/0]).
+
+-ifndef(maps_support).
+-type json_value() :: list(json_value())
+ | list({binary() | atom(), json_value()})
+ | true
+ | false
+ | null
+ | integer()
+ | float()
+ | binary().
+-endif.
+
+-ifdef(maps_support).
+-type json_value() :: list(json_value())
+ | list({binary() | atom(), json_value()})
+ | map()
+ | true
+ | false
+ | null
+ | integer()
+ | float()
+ | binary().
+-endif.
+
+
+-spec to_term(Source::binary(), Config::config()) -> json_value().
+
+-ifdef(maps_always).
+to_term(Source, Config) when is_list(Config) ->
+ (jsx:decoder(?MODULE, [return_maps] ++ Config, jsx_config:extract_config(Config)))(Source).
+-endif.
+-ifndef(maps_always).
+to_term(Source, Config) when is_list(Config) ->
+ (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source).
+-endif.
+
+parse_config(Config) -> parse_config(Config, #config{}).
+
+parse_config([{labels, Val}|Rest], Config)
+ when Val == binary; Val == atom; Val == existing_atom; Val == attempt_atom ->
+ parse_config(Rest, Config#config{labels = Val});
+parse_config([labels|Rest], Config) ->
+ parse_config(Rest, Config#config{labels = binary});
+parse_config([{return_maps, Val}|Rest], Config)
+ when Val == true; Val == false ->
+ parse_config(Rest, Config#config{return_maps = true});
+parse_config([return_maps|Rest], Config) ->
+ parse_config(Rest, Config#config{return_maps = true});
+parse_config([{K, _}|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config)
+ ; false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([K|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config)
+ ; false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([], Config) ->
+ Config.
+
+
+-type state() :: {list(), #config{}}.
+-spec init(Config::proplists:proplist()) -> state().
+
+init(Config) -> start_term(Config).
+
+-spec handle_event(Event::any(), State::state()) -> state().
+
+handle_event(end_json, State) -> get_value(State);
+
+handle_event(start_object, State) -> start_object(State);
+handle_event(end_object, State) -> finish(State);
+
+handle_event(start_array, State) -> start_array(State);
+handle_event(end_array, State) -> finish(State);
+
+handle_event({key, Key}, {_, Config} = State) -> insert(format_key(Key, Config), State);
+
+handle_event({_, Event}, State) -> insert(Event, State).
+
+
+format_key(Key, Config) ->
+ case Config#config.labels of
+ binary -> Key
+ ; atom -> binary_to_atom(Key, utf8)
+ ; existing_atom -> binary_to_existing_atom(Key, utf8)
+ ; attempt_atom ->
+ try binary_to_existing_atom(Key, utf8) of
+ Result -> Result
+ catch
+ error:badarg -> Key
+ end
+ end.
+
+
+%% internal state is a stack and a config object
+%% `{Stack, Config}`
+%% the stack is a list of in progress objects/arrays
+%% `[Current, Parent, Grandparent,...OriginalAncestor]`
+%% an object has the representation on the stack of
+%% `{object, [
+%% {NthKey, NthValue},
+%% {NMinus1Key, NthMinus1Value},
+%% ...,
+%% {FirstKey, FirstValue}
+%% ]}`
+%% or if returning maps
+%% `{object, #{
+%% FirstKey => FirstValue,
+%% SecondKey => SecondValue,
+%% ...,
+%% NthKey => NthValue
+%% }}`
+%% or if there's a key with a yet to be matched value
+%% `{object, Key, ...}`
+%% an array looks like
+%% `{array, [NthValue, NthMinus1Value,...FirstValue]}`
+
+start_term(Config) when is_list(Config) -> {[], parse_config(Config)}.
+
+
+-ifndef(maps_support).
+%% allocate a new object on top of the stack
+start_object({Stack, Config}) -> {[{object, []}] ++ Stack, Config}.
+
+
+%% allocate a new array on top of the stack
+start_array({Stack, Config}) -> {[{array, []}] ++ Stack, Config}.
+
+
+%% finish an object or array and insert it into the parent object if it exists or
+%% return it if it is the root object
+finish({[{object, []}], Config}) -> {[{}], Config};
+finish({[{object, []}|Rest], Config}) -> insert([{}], {Rest, Config});
+finish({[{object, Pairs}], Config}) -> {lists:reverse(Pairs), Config};
+finish({[{object, Pairs}|Rest], Config}) -> insert(lists:reverse(Pairs), {Rest, Config});
+finish({[{array, Values}], Config}) -> {lists:reverse(Values), Config};
+finish({[{array, Values}|Rest], Config}) -> insert(lists:reverse(Values), {Rest, Config});
+finish(_) -> erlang:error(badarg).
+
+
+%% insert a value when there's no parent object or array
+insert(Value, {[], Config}) -> {Value, Config};
+%% insert a key or value into an object or array, autodetects the 'right' thing
+insert(Key, {[{object, Pairs}|Rest], Config}) ->
+ {[{object, Key, Pairs}] ++ Rest, Config};
+insert(Value, {[{object, Key, Pairs}|Rest], Config}) ->
+ {[{object, [{Key, Value}] ++ Pairs}] ++ Rest, Config};
+insert(Value, {[{array, Values}|Rest], Config}) ->
+ {[{array, [Value] ++ Values}] ++ Rest, Config};
+insert(_, _) -> erlang:error(badarg).
+-endif.
+
+
+-ifdef(maps_support).
+%% allocate a new object on top of the stack
+start_object({Stack, Config=#config{return_maps=true}}) ->
+ {[{object, #{}}] ++ Stack, Config};
+start_object({Stack, Config}) ->
+ {[{object, []}] ++ Stack, Config}.
+
+
+%% allocate a new array on top of the stack
+start_array({Stack, Config}) -> {[{array, []}] ++ Stack, Config}.
+
+
+%% finish an object or array and insert it into the parent object if it exists or
+%% return it if it is the root object
+finish({[{object, Map}], Config=#config{return_maps=true}}) -> {Map, Config};
+finish({[{object, Map}|Rest], Config=#config{return_maps=true}}) -> insert(Map, {Rest, Config});
+finish({[{object, []}], Config}) -> {[{}], Config};
+finish({[{object, []}|Rest], Config}) -> insert([{}], {Rest, Config});
+finish({[{object, Pairs}], Config}) -> {lists:reverse(Pairs), Config};
+finish({[{object, Pairs}|Rest], Config}) -> insert(lists:reverse(Pairs), {Rest, Config});
+finish({[{array, Values}], Config}) -> {lists:reverse(Values), Config};
+finish({[{array, Values}|Rest], Config}) -> insert(lists:reverse(Values), {Rest, Config});
+finish(_) -> erlang:error(badarg).
+
+
+%% insert a value when there's no parent object or array
+insert(Value, {[], Config}) -> {Value, Config};
+%% insert a key or value into an object or array, autodetects the 'right' thing
+insert(Key, {[{object, Map}|Rest], Config=#config{return_maps=true}}) ->
+ {[{object, Key, Map}] ++ Rest, Config};
+insert(Key, {[{object, Pairs}|Rest], Config}) ->
+ {[{object, Key, Pairs}] ++ Rest, Config};
+insert(Value, {[{object, Key, Map}|Rest], Config=#config{return_maps=true}}) ->
+ {[{object, maps:put(Key, Value, Map)}] ++ Rest, Config};
+insert(Value, {[{object, Key, Pairs}|Rest], Config}) ->
+ {[{object, [{Key, Value}] ++ Pairs}] ++ Rest, Config};
+insert(Value, {[{array, Values}|Rest], Config}) ->
+ {[{array, [Value] ++ Values}] ++ Rest, Config};
+insert(_, _) -> erlang:error(badarg).
+-endif.
+
+
+get_key({[{object, Key, _}|_], _}) -> Key;
+get_key(_) -> erlang:error(badarg).
+
+
+get_value({Value, _Config}) -> Value;
+get_value(_) -> erlang:error(badarg).
+
+
+
+%% eunit tests
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+config_test_() ->
+ [
+ {"empty config", ?_assertEqual(#config{}, parse_config([]))},
+ {"implicit binary labels", ?_assertEqual(#config{}, parse_config([labels]))},
+ {"binary labels", ?_assertEqual(#config{}, parse_config([{labels, binary}]))},
+ {"atom labels", ?_assertEqual(#config{labels=atom}, parse_config([{labels, atom}]))},
+ {"existing atom labels", ?_assertEqual(
+ #config{labels=existing_atom},
+ parse_config([{labels, existing_atom}])
+ )},
+ {"return_maps true", ?_assertEqual(
+ #config{return_maps=true},
+ parse_config([return_maps])
+ )},
+ {"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
+ {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
+ ].
+
+
+format_key_test_() ->
+ [
+ {"binary key", ?_assertEqual(<<"key">>, format_key(<<"key">>, #config{labels=binary}))},
+ {"atom key", ?_assertEqual(key, format_key(<<"key">>, #config{labels=atom}))},
+ {"existing atom key", ?_assertEqual(
+ key,
+ format_key(<<"key">>, #config{labels=existing_atom})
+ )},
+ {"nonexisting atom key", ?_assertError(
+ badarg,
+ format_key(<<"nonexistentatom">>, #config{labels=existing_atom})
+ )},
+ {"sloppy existing atom key", ?_assertEqual(
+ key,
+ format_key(<<"key">>, #config{labels=attempt_atom})
+ )},
+ {"nonexisting atom key", ?_assertEqual(
+ <<"nonexistentatom">>,
+ format_key(<<"nonexistentatom">>, #config{labels=attempt_atom})
+ )}
+ ].
+
+
+rep_manipulation_test_() ->
+ [
+ {"allocate a new context with option", ?_assertEqual(
+ {[], #config{labels=atom}},
+ start_term([{labels, atom}])
+ )},
+ {"allocate a new object on an empty stack", ?_assertEqual(
+ {[{object, []}], #config{}},
+ start_object({[], #config{}})
+ )},
+ {"allocate a new object on a stack", ?_assertEqual(
+ {[{object, []}, {object, []}], #config{}},
+ start_object({[{object, []}], #config{}})
+ )},
+ {"allocate a new array on an empty stack", ?_assertEqual(
+ {[{array, []}], #config{}},
+ start_array({[], #config{}})
+ )},
+ {"allocate a new array on a stack", ?_assertEqual(
+ {[{array, []}, {object, []}], #config{}},
+ start_array({[{object, []}], #config{}})
+ )},
+ {"insert a key into an object", ?_assertEqual(
+ {[{object, key, []}, junk], #config{}},
+ insert(key, {[{object, []}, junk], #config{}})
+ )},
+ {"get current key", ?_assertEqual(
+ key,
+ get_key({[{object, key, []}], #config{}})
+ )},
+ {"try to get non-key from object", ?_assertError(
+ badarg,
+ get_key({[{object, []}], #config{}})
+ )},
+ {"try to get key from array", ?_assertError(
+ badarg,
+ get_key({[{array, []}], #config{}})
+ )},
+ {"insert a value into an object", ?_assertEqual(
+ {[{object, [{key, value}]}, junk], #config{}},
+ insert(value, {[{object, key, []}, junk], #config{}})
+ )},
+ {"insert a value into an array", ?_assertEqual(
+ {[{array, [value]}, junk], #config{}},
+ insert(value, {[{array, []}, junk], #config{}})
+ )},
+ {"finish an object with no ancestor", ?_assertEqual(
+ {[{a, b}, {x, y}], #config{}},
+ finish({[{object, [{x, y}, {a, b}]}], #config{}})
+ )},
+ {"finish an empty object", ?_assertEqual(
+ {[{}], #config{}},
+ finish({[{object, []}], #config{}})
+ )},
+ {"finish an object with an ancestor", ?_assertEqual(
+ {[{object, [{key, [{a, b}, {x, y}]}, {foo, bar}]}], #config{}},
+ finish({[{object, [{x, y}, {a, b}]}, {object, key, [{foo, bar}]}], #config{}})
+ )},
+ {"finish an array with no ancestor", ?_assertEqual(
+ {[a, b, c], #config{}},
+ finish({[{array, [c, b, a]}], #config{}})
+ )},
+ {"finish an array with an ancestor", ?_assertEqual(
+ {[{array, [[a, b, c], d, e, f]}], #config{}},
+ finish({[{array, [c, b, a]}, {array, [d, e, f]}], #config{}})
+ )}
+ ].
+
+
+-ifdef(maps_support).
+rep_manipulation_with_maps_test_() ->
+ [
+ {"allocate a new object on an empty stack", ?_assertEqual(
+ {[{object, #{}}], #config{return_maps=true}},
+ start_object({[], #config{return_maps=true}})
+ )},
+ {"allocate a new object on a stack", ?_assertEqual(
+ {[{object, #{}}, {object, #{}}], #config{return_maps=true}},
+ start_object({[{object, #{}}], #config{return_maps=true}})
+ )},
+ {"insert a key into an object", ?_assertEqual(
+ {[{object, key, #{}}, junk], #config{return_maps=true}},
+ insert(key, {[{object, #{}}, junk], #config{return_maps=true}})
+ )},
+ {"get current key", ?_assertEqual(
+ key,
+ get_key({[{object, key, #{}}], #config{return_maps=true}})
+ )},
+ {"try to get non-key from object", ?_assertError(
+ badarg,
+ get_key({[{object, #{}}], #config{return_maps=true}})
+ )},
+ {"insert a value into an object", ?_assertEqual(
+ {[{object, #{key => value}}, junk], #config{return_maps=true}},
+ insert(value, {[{object, key, #{}}, junk], #config{return_maps=true}})
+ )},
+ {"finish an object with no ancestor", ?_assertEqual(
+ {#{a => b, x => y}, #config{return_maps=true}},
+ finish({[{object, #{x => y, a => b}}], #config{return_maps=true}})
+ )},
+ {"finish an empty object", ?_assertEqual(
+ {#{}, #config{return_maps=true}},
+ finish({[{object, #{}}], #config{return_maps=true}})
+ )},
+ {"finish an object with an ancestor", ?_assertEqual(
+ {
+ [{object, #{key => #{a => b, x => y}, foo => bar}}],
+ #config{return_maps=true}
+ },
+ finish({
+ [{object, #{x => y, a => b}}, {object, key, #{foo => bar}}],
+ #config{return_maps=true}
+ })
+ )}
+ ].
+
+
+return_maps_test_() ->
+ [
+ {"an empty map", ?_assertEqual(
+ #{},
+ jsx:decode(<<"{}">>, [return_maps])
+ )},
+ {"an empty map", ?_assertEqual(
+ [{}],
+ jsx:decode(<<"{}">>, [])
+ )},
+ {"a small map", ?_assertEqual(
+ #{<<"awesome">> => true, <<"library">> => <<"jsx">>},
+ jsx:decode(<<"{\"library\": \"jsx\", \"awesome\": true}">>, [return_maps])
+ )},
+ {"a recursive map", ?_assertEqual(
+ #{<<"key">> => #{<<"key">> => true}},
+ jsx:decode(<<"{\"key\": {\"key\": true}}">>, [return_maps])
+ )},
+ {"a map inside a list", ?_assertEqual(
+ [#{}],
+ jsx:decode(<<"[{}]">>, [return_maps])
+ )}
+ ].
+-endif.
+
+
+handle_event_test_() ->
+ Data = jsx:test_cases(),
+ [
+ {
+ Title, ?_assertEqual(
+ Term,
+ lists:foldl(fun handle_event/2, init([]), Events ++ [end_json])
+ )
+ } || {Title, _, Term, Events} <- Data
+ ].
+
+
+-endif.
diff --git a/deps/jsx/src/jsx_verify.erl b/deps/jsx/src/jsx_verify.erl
new file mode 100644
index 0000000..2073b99
--- /dev/null
+++ b/deps/jsx/src/jsx_verify.erl
@@ -0,0 +1,119 @@
+%% The MIT License
+
+%% Copyright (c) 2010-2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
+
+%% Permission is hereby granted, free of charge, to any person obtaining a copy
+%% of this software and associated documentation files (the "Software"), to deal
+%% in the Software without restriction, including without limitation the rights
+%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+%% copies of the Software, and to permit persons to whom the Software is
+%% furnished to do so, subject to the following conditions:
+
+%% The above copyright notice and this permission notice shall be included in
+%% all copies or substantial portions of the Software.
+
+%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+%% THE SOFTWARE.
+
+
+-module(jsx_verify).
+
+-export([is_json/2, is_term/2]).
+-export([init/1, handle_event/2]).
+
+
+-spec is_json(Source::binary(), Config::jsx_config:config()) -> true | false | {incomplete, jsx:decoder()}.
+
+is_json(Source, Config) when is_list(Config) ->
+ try (jsx:decoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source)
+ catch error:badarg -> false
+ end.
+
+
+-spec is_term(Source::any(), Config::jsx_config:config()) -> true | false | {incomplete, jsx:encoder()}.
+
+is_term(Source, Config) when is_list(Config) ->
+ try (jsx:encoder(?MODULE, Config, jsx_config:extract_config(Config)))(Source)
+ catch error:badarg -> false
+ end.
+
+
+parse_config(Config) -> parse_config(Config, []).
+
+%% ignore deprecated flags
+parse_config([no_repeated_keys|Rest], Config) ->
+ parse_config(Rest, Config);
+parse_config([{repeated_keys, Val}|Rest], Config) when Val == true; Val == false ->
+ parse_config(Rest, Config);
+parse_config([repeated_keys|Rest], Config) ->
+ parse_config(Rest, Config);
+parse_config([{K, _}|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config);
+ false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([K|Rest] = Options, Config) ->
+ case lists:member(K, jsx_config:valid_flags()) of
+ true -> parse_config(Rest, Config);
+ false -> erlang:error(badarg, [Options, Config])
+ end;
+parse_config([], Config) ->
+ Config.
+
+
+%% we don't actually need any state for this
+-type state() :: [].
+-spec init(Config::proplists:proplist()) -> state().
+
+init(Config) -> parse_config(Config).
+
+
+-spec handle_event(Event::any(), State::state()) -> state().
+
+handle_event(end_json, _) -> true;
+
+handle_event(_, State) -> State.
+
+
+
+%% eunit tests
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+
+config_test_() ->
+ [
+ {"empty config", ?_assertEqual([], parse_config([]))},
+ {"no repeat keys", ?_assertEqual([], parse_config([no_repeated_keys]))},
+ {"bare repeated keys", ?_assertEqual([], parse_config([repeated_keys]))},
+ {"repeated keys true", ?_assertEqual(
+ [],
+ parse_config([{repeated_keys, true}])
+ )},
+ {"repeated keys false", ?_assertEqual(
+ [],
+ parse_config([{repeated_keys, false}])
+ )},
+ {"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
+ {"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
+ ].
+
+
+handle_event_test_() ->
+ Data = jsx:test_cases() ++ jsx:special_test_cases(),
+ [
+ {
+ Title, ?_assertEqual(
+ true,
+ lists:foldl(fun handle_event/2, [], Events ++ [end_json])
+ )
+ } || {Title, _, _, Events} <- Data
+ ].
+
+
+-endif.
diff --git a/deps/msgpack/.gitignore b/deps/msgpack/.gitignore
new file mode 100644
index 0000000..28a2b9b
--- /dev/null
+++ b/deps/msgpack/.gitignore
@@ -0,0 +1,6 @@
+.eunit
+ebin/*.app
+*.beam
+*~
+deps
+*.so \ No newline at end of file
diff --git a/deps/msgpack/.travis.yml b/deps/msgpack/.travis.yml
new file mode 100644
index 0000000..b01a28b
--- /dev/null
+++ b/deps/msgpack/.travis.yml
@@ -0,0 +1,18 @@
+language: erlang
+notifications:
+ email: false
+otp_release:
+ - 18.0
+ - 17.5
+ - 17.4
+ - 17.3
+ - 17.1
+ - 17.0
+ - R16B03
+ - R16B02
+ - R16B01
+ - R16B
+ - R15B03
+ - R15B02
+ - R15B01
+ - R15B
diff --git a/deps/msgpack/AUTHORS b/deps/msgpack/AUTHORS
new file mode 100644
index 0000000..84e64a9
--- /dev/null
+++ b/deps/msgpack/AUTHORS
@@ -0,0 +1,7 @@
+Kota Uenishi, @kuenishi
+Yuki Ito, @mururu
+Heinz N. Gies, @Licenser
+Andrew Kirilenko, @iced
+Brett Bethke, @bb4242
+Mike Watters, @zerth
+@silverbuddy
diff --git a/deps/msgpack/LICENSE-2.0.txt b/deps/msgpack/LICENSE-2.0.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/deps/msgpack/LICENSE-2.0.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/deps/msgpack/Makefile b/deps/msgpack/Makefile
new file mode 100644
index 0000000..da20abf
--- /dev/null
+++ b/deps/msgpack/Makefile
@@ -0,0 +1,61 @@
+.PHONY: compile xref eunit clean doc check make deps test
+
+REBAR=./rebar
+
+all: compile
+
+# for busy typos
+m: all
+ma: all
+mak: all
+make: all
+
+console: compile
+ @erl -pa ebin
+deps:
+ @$(REBAR) update-deps get-deps
+
+compile:
+ @$(REBAR) compile
+
+xref: compile
+ @$(REBAR) xref
+
+test: compile xref
+ @./rebar eunit
+
+clean:
+ @$(REBAR) clean
+
+doc:
+ @$(REBAR) doc
+
+APPS = kernel stdlib runtime_tools
+COMBO_PLT = $(HOME)/.msgpack_dialyzer_plt
+
+check_plt: xref
+ dialyzer --check_plt --plt $(COMBO_PLT) --apps $(APPS)
+
+build_plt: xref
+ dialyzer --build_plt --output_plt $(COMBO_PLT) --apps $(APPS)
+
+dialyzer: xref
+ @echo
+ @echo Use "'make check_plt'" to check PLT prior to using this target.
+ @echo Use "'make build_plt'" to build PLT prior to using this target.
+ @echo
+ @sleep 1
+ dialyzer -Wno_return --plt $(COMBO_PLT) ebin | fgrep -v -f ./dialyzer.ignore-warnings
+
+
+
+check: compile xref
+# @echo "you need $(REBAR) build-plt before make check"
+# @$(REBAR) build-plt
+ dialyzer --check
+# @$(REBAR) check-plt
+# @$(REBAR) dialyze
+
+crosslang:
+ @echo "do ERL_LIBS=../ before you make crosslang or fail"
+ cd test && make crosslang
diff --git a/deps/msgpack/README.md b/deps/msgpack/README.md
new file mode 100644
index 0000000..1cc0cbe
--- /dev/null
+++ b/deps/msgpack/README.md
@@ -0,0 +1,127 @@
+# MessagePack Erlang
+
+![Travis](https://secure.travis-ci.org/msgpack/msgpack-erlang.png)
+
+![Drone.io](https://drone.io/github.com/msgpack/msgpack-erlang/status.png)
+
+## prequisites for runtime
+
+[Erlang/OTP](http://erlang.org/), >= R15B -- for older version, rebar won't work.
+Also based on [the new msgpack spec 232a0d](https://github.com/msgpack/msgpack/blob/232a0d14c6057000cc4a478f0dfbb5942ac54e9e/spec.md).
+
+## edit rebar.config to use in your application
+
+```erlang
+{deps, [
+ {msgpack, ".*",
+ {git, "git://github.com/msgpack/msgpack-erlang.git", "master"}}
+]}.
+```
+
+## Simple deserialization
+
+```erlang
+Ham = msgpack:pack(Spam),
+{ok, Spam} = msgpack:unpack(Ham).
+```
+
+## Stream deserialization
+
+```erlang
+{Term0, Rest0} = msgpack:unpack_stream(Binary),
+{Term1, Rest1} = msgpack:unpack_stream(Rest0),
+...
+```
+
+## String type
+
+Now this supports string type!
+
+```erlang
+Opt = [{enable_str, true}]
+{ok, "埼玉"} = msgpack:unpack(msgpack:pack("埼玉", Opt), Opt).
+ => {ok,[22524,29577]}
+```
+
+There are several options for `msgpack:pack/2` and `msgpack:unpack/2` .
+See `msgpack:options()` in `msgpack.hrl`.
+
+## Map Style
+
+Since Erlang/OTP 17.0
+
+```erlang
+msgpack:pack(#{ <<"key">> => <<"value">> }, [{format, map}]).
+```
+
+Or use old jiffy/jsx style
+
+```erlang
+msgpack:pack({[{<<"key">>, <<"value">>}]}, [{format, jiffy}]),
+msgpack:pack([{<<"key">>, <<"value">>}], [{format, jsx}]).
+```
+
+## Ext type
+
+Now msgpack-erlang supports ext type. Now you can serialize everything
+with your original (de)serializer. That will enable us to handle
+erlang- native types like `pid()`, `ref()` contained in `tuple()`. See
+`test/msgpack_ext_example_tests.erl` for example code.
+
+```erlang
+Packer = fun({ref, Ref}, Opt) when is_reference(Ref) -> {ok, {12, term_to_binary(Ref)}} end,
+Unpacker = fun(12, Bin) -> {ok, {ref, binary_to_term(Bin)}} end,
+Ref = make_ref(),
+Opt = [{ext,{Packer,Unpacker}}],
+{ok, {ref, Ref}} = msgpack:unpack(msgpack:pack({ref, Ref}, Opt), Opt).
+```
+
+This is still experimental feature, so I'm waiting for your feedback.
+
+## Compatibility mode
+
+To use as same with [old spec](https://github.com/msgpack/msgpack/blob/master/spec-old.md):
+
+```erlang
+OldHam = msgpack:pack(Spam, [{enable_str,false}]),
+{ok, Spam} = msgpack:unpack(OldHam, [{enable_str,false}]).
+```
+
+Since 0.2.3 now it's **false by default**.
+
+## Further tests
+
+See [msgpack-erlang-tests](http://github.com/kuenishi/msgpack-erlang-tests) for further tests
+
+## License
+
+Apache License 2.0
+
+# Release Notes
+
+## 0.3.3
+
+- Add OTP 17 series to Travis-CI tests
+- Fix wrong numbering for ext types
+- Allow packing maps even when {format,map} is not set
+- Fix Dialyzer invalid contract warning
+- Proper use of null for jiffy-style encoding/decoding
+
+## 0.3.2
+
+- set back default style as jiffy
+- fix bugs around nil/null handling
+
+## 0.3.0
+
+- supports map new in 17.0
+- jiffy-style maps will be deprecated in near future
+- set default style as map
+
+## 0.2.8
+
+0.2 series works with OTP 17.0, R16, R15, and with MessagePack's new
+and old format. But does not support `map` type introduced in
+OTP 17.0.
+
+It also supports JSX-compatible mode.
diff --git a/deps/msgpack/dialyzer.ignore-warnings b/deps/msgpack/dialyzer.ignore-warnings
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/deps/msgpack/dialyzer.ignore-warnings
diff --git a/deps/msgpack/include/msgpack.hrl b/deps/msgpack/include/msgpack.hrl
new file mode 100644
index 0000000..45b8d4b
--- /dev/null
+++ b/deps/msgpack/include/msgpack.hrl
@@ -0,0 +1,109 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+
+-type msgpack_map_jsx() :: [{msgpack_term(), msgpack_term()}] | [{}].
+
+-type msgpack_map_jiffy() :: {[{msgpack_term(), msgpack_term()}]}.
+
+-ifdef(without_map).
+-type msgpack_map() :: msgpack_map_jsx() | msgpack_map_jiffy().
+-else.
+-type msgpack_map() :: msgpack_map_jsx() | msgpack_map_jiffy() | map().
+-endif.
+
+-type msgpack_map_unpacker() ::
+ fun((binary(), non_neg_integer(), msgpack_option()) ->
+ {msgpack_map(), binary()} | no_return() ).
+
+%% Erlang representation of msgpack data.
+-type msgpack_term() :: [msgpack_term()] | msgpack_map() |
+ integer() | float() | boolean() | binary().
+
+%% @doc ext_packer that packs only tuples with length > 2
+-type msgpack_ext_packer() :: fun((tuple(), msgpack:options()) ->
+ {ok, {Type::byte(), Data::binary()}} |
+ {error, any()}).
+-type msgpack_ext_unpacker() ::
+ fun((byte(), binary(), msgpack:options()) ->
+ {ok, msgpack_term()} | {error, any()})
+ | fun((byte(), binary()) ->
+ {ok, msgpack_term()} | {error, any()}).
+
+-type format_type() :: jsx|jiffy|map.
+
+-type msgpack_list_options() :: [
+ {format, format_type()} |
+ jsx | jiffy |
+ {allow_atom, none|pack} |
+ {enable_str, boolean()} |
+ {ext, {msgpack_ext_packer(),msgpack_ext_unpacker()} | module()}
+ ].
+
+-ifdef(default_map).
+
+-define(DEFAULT_MAP_FORMAT, map).
+-define(DEFAULT_MAP_UNPACKER_FUN, fun msgpack_unpacker:unpack_map/3).
+
+-else.
+
+-define(DEFAULT_MAP_FORMAT, jiffy).
+-define(DEFAULT_MAP_UNPACKER_FUN, fun msgpack_unpacker:unpack_map_jiffy/3).
+
+-endif.
+
+-record(options_v1, {
+ interface = ?DEFAULT_MAP_FORMAT :: jiffy | jsx,
+ map_unpack_fun = ?DEFAULT_MAP_UNPACKER_FUN :: msgpack_map_unpacker(),
+ impl = erlang :: erlang | nif
+ }).
+
+-record(options_v2, {
+ interface = ?DEFAULT_MAP_FORMAT :: jiffy | jsx,
+ map_unpack_fun = ?DEFAULT_MAP_UNPACKER_FUN :: msgpack_map_unpacker(),
+ impl = erlang :: erlang | nif,
+ allow_atom = none :: none | pack, %% allows atom when packing
+ enable_str = false :: boolean(), %% true for new spec
+ ext_packer = undefined :: msgpack_ext_packer() | undefined,
+ ext_unpacker = undefined :: msgpack_ext_unpacker() | undefined,
+ original_list = [] :: msgpack_list_options()
+ }).
+
+-ifdef(without_map).
+
+-define(OPTION, #options_v2).
+-type msgpack_option() :: #options_v2{}.
+
+-else.
+
+-record(options_v3, {
+ interface = ?DEFAULT_MAP_FORMAT :: format_type(),
+ map_unpack_fun = ?DEFAULT_MAP_UNPACKER_FUN :: msgpack_map_unpacker(),
+ impl = erlang :: erlang | nif,
+ allow_atom = none :: none | pack, %% allows atom when packing
+ enable_str = false :: boolean(), %% true for new spec
+ ext_packer = undefined :: msgpack_ext_packer() | undefined,
+ ext_unpacker = undefined :: msgpack_ext_unpacker() | undefined,
+ original_list = [] :: msgpack_list_options()
+ }).
+
+-define(OPTION, #options_v3).
+-type msgpack_option() :: #options_v3{}.
+
+-endif.
+
diff --git a/deps/msgpack/rebar b/deps/msgpack/rebar
new file mode 100755
index 0000000..c7d9352
--- /dev/null
+++ b/deps/msgpack/rebar
Binary files differ
diff --git a/deps/msgpack/rebar.config b/deps/msgpack/rebar.config
new file mode 100644
index 0000000..2a6a002
--- /dev/null
+++ b/deps/msgpack/rebar.config
@@ -0,0 +1,26 @@
+{require_otp_vsn, "R15|R16|17|18"}.
+
+{erl_opts, [fail_on_warning, debug_info, warn_untyped_record]}.
+{xref_checks, [undefined_function_calls]}.
+{cover_enabled, true}.
+{cover_print_enabled, false}.
+{edoc_opts, [{dialyzer_specs, all}]}.
+{validate_app_modules, true}.
+
+{erl_first_files, [
+ "src/msgpack_ext.erl"
+ ]}.
+
+
+%% {port_sources, ["c_src/*.c"]}.
+%% {port_env, [
+%% %% Make sure to set -fPIC when compiling leveldb
+%% {"CFLAGS", "$CFLAGS -Wall -O3 -fPIC"},
+%% {"CXXFLAGS", "$CXXFLAGS -Wall -O3 -fPIC"},
+%% {"DRV_CFLAGS", "$DRV_CFLAGS -O3 -Wall -I c_src/msgpack-0.5.7/src"},
+%% {"DRV_LDFLAGS", "$DRV_LDFLAGS c_src/msgpack-0.5.7/src/.libs/libmsgpack.a"}
+%% ]}.
+
+%% {pre_hooks, [{compile, "sh c_src/build.sh"}]}.
+
+%% {post_hooks, [{clean, "rm -rf c_src/msgpack-0.5.7"}]}.
diff --git a/deps/msgpack/rebar.config.script b/deps/msgpack/rebar.config.script
new file mode 100644
index 0000000..86ae113
--- /dev/null
+++ b/deps/msgpack/rebar.config.script
@@ -0,0 +1,25 @@
+case erlang:system_info(otp_release) of
+
+ %% Rxx, before R16
+ [$R|_] ->
+ HashDefine = [{d,without_map}],
+ case lists:keysearch(erl_opts, 1, CONFIG) of
+ {value, {erl_opts, Opts}} ->
+ lists:keyreplace(erl_opts,1,CONFIG,{erl_opts,Opts++HashDefine});
+ false ->
+ CONFIG ++ [{erl_opts, HashDefine}]
+ end;
+
+ %% In 17, maps are experimental
+ "17" ->
+ CONFIG;
+
+ _ ->
+ HashDefine = [{d,default_map}],
+ case lists:keysearch(erl_opts, 1, CONFIG) of
+ {value, {erl_opts, Opts}} ->
+ lists:keyreplace(erl_opts,1,CONFIG,{erl_opts,Opts++HashDefine});
+ false ->
+ CONFIG ++ [{erl_opts, HashDefine}]
+ end
+end.
diff --git a/deps/msgpack/src/msgpack.app.src b/deps/msgpack/src/msgpack.app.src
new file mode 100644
index 0000000..24b07f3
--- /dev/null
+++ b/deps/msgpack/src/msgpack.app.src
@@ -0,0 +1,9 @@
+{application, msgpack,
+ [{description, "MessagePack serializer/deserializer"},
+ {vsn, git},
+ {modules,
+ [msgpack]
+ },
+ {registered, []},
+ {env, []},
+ {applications, [kernel, stdlib]}]}.
diff --git a/deps/msgpack/src/msgpack.erl b/deps/msgpack/src/msgpack.erl
new file mode 100644
index 0000000..dc6feed
--- /dev/null
+++ b/deps/msgpack/src/msgpack.erl
@@ -0,0 +1,256 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+%% @doc <a href="http://msgpack.org/">MessagePack</a> codec for Erlang.
+%%
+%% APIs are almost compatible with <a href="http://redmine.msgpack.org/projects/msgpack/wiki/QuickStartC">C API</a>
+%% except for buffering functions (both copying and zero-copying), which are unavailable.
+%%
+%% <table border="1">
+%% <caption>Equivalence between Erlang and <a href="http://msgpack.sourceforge.jp/spec">Msgpack type</a> :</caption>
+%% <tr><th> erlang </th><th> msgpack </th></tr>
+%% <tr><td> integer() </td><td> pos_fixnum/neg_fixnum/uint8/uint16/uint32/uint64/int8/int16/int32/int64 </td></tr>
+%% <tr><td> float() </td><td> float/double </td></tr>
+%% <tr><td> nil </td><td> nil </td></tr>
+%% <tr><td> boolean() </td><td> boolean </td></tr>
+%% <tr><td> binary() </td><td> fix_raw/raw16/raw32 </td></tr>
+%% <tr><td> list() </td><td> fix_array/array16/array32 </td></tr>
+%% <tr><td> {proplist()} </td><td> fix_map/map16/map32 </td></tr>
+%% <tr><td> [{term(),term{}]|[{}] </td><td> fix_map/map16/map32 </td></tr>
+%% <tr><td> map() </td><td> fix_map/map16/map32 </td></tr>
+%% </table>
+%% @end
+
+-module(msgpack).
+
+-export([pack/1, unpack/1, unpack_stream/1,
+ pack/2, unpack/2, unpack_stream/2,
+ term_to_binary/1, binary_to_term/1, binary_to_term/2
+ ]).
+
+-include("msgpack.hrl").
+
+%% for export
+-export_type([object/0, msgpack_map/0, options/0]).
+-type object() :: msgpack_term().
+-type options() :: msgpack_list_options().
+
+-spec term_to_binary(term()) -> binary().
+term_to_binary(Term) ->
+ msgpack_term:to_binary(Term).
+
+-spec binary_to_term(binary()) -> term().
+binary_to_term(Bin) ->
+ msgpack_term:from_binary(Bin, []).
+
+-spec binary_to_term(binary(), [safe]) -> term().
+binary_to_term(Bin, Opt) ->
+ msgpack_term:from_binary(Bin, Opt).
+
+%% @doc Encode an erlang term into an msgpack binary.
+%% Returns {error, {badarg, term()}} if the input is illegal.
+-spec pack(msgpack:object()) -> binary() | {error, {badarg, term()}}.
+pack(Term) -> msgpack:pack(Term, []).
+
+-spec pack(msgpack:object(), msgpack:options()) -> binary().
+pack(Term, Opts) ->
+ Option = parse_options(Opts),
+ try
+ msgpack_packer:pack(Term, Option)
+ catch
+ throw:Exception -> {error, Exception}
+ end.
+
+%%% @doc Decode an msgpack binary into an erlang terms.
+%%% It only decodes ONLY ONE msgpack packets contained in the binary. No packets should not remain.
+%%% Returns {error, {badarg, term()}} if the input is corrupted.
+%%% Returns {error, incomplete} if the input is not a full msgpack packet (caller should gather more data and try again).
+-spec unpack(binary()) -> {ok, msgpack:object()}
+ | {error, not_just_binary} % a term deserilized, but binary remains
+ | {error, incomplete} % too few binary to deserialize complete binary
+ | {error, {badarg, term()}}.
+unpack(Bin) -> unpack(Bin, []).
+
+-spec unpack(binary(), msgpack:options()) -> {ok, msgpack:object()} | {error, any()}.
+unpack(Bin, Opts) ->
+ case unpack_stream(Bin, Opts) of
+ {error, _} = E -> E;
+ {Term, <<>>} -> {ok, Term};
+ {_, Binary} when is_binary(Binary)
+ andalso byte_size(Binary) > 0 ->
+ {error, not_just_binary}
+ end.
+
+-spec unpack_stream(binary()) -> {msgpack:object(), binary()}
+ | {error, incomplete}
+ | {error, {badarg, term()}}.
+unpack_stream(Bin) -> unpack_stream(Bin, []).
+
+-spec unpack_stream(binary(), msgpack:options())-> {msgpack:object(), binary()}
+ | {error, incomplete}
+ | {error, {badarg, term()}}.
+unpack_stream(Bin, Opts0) when is_binary(Bin) ->
+ Opts = parse_options(Opts0),
+ try
+ msgpack_unpacker:unpack_stream(Bin, Opts)
+ catch
+ throw:Exception -> {error, Exception}
+ end;
+unpack_stream(Other, _) -> {error, {badarg, Other}}.
+
+%% @private
+-spec parse_options(msgpack:options()) -> msgpack_option().
+
+parse_options(Opt) ->
+ parse_options(Opt, ?OPTION{original_list=Opt}).
+
+%% @private
+-spec parse_options(msgpack:options(), msgpack_option()) -> msgpack_option().
+parse_options([], Opt) -> Opt;
+
+parse_options([jsx|TL], Opt0) ->
+ Opt = Opt0?OPTION{interface=jsx,
+ map_unpack_fun=msgpack_unpacker:map_unpacker(jsx)},
+ parse_options(TL, Opt);
+parse_options([jiffy|TL], Opt0) ->
+ Opt = Opt0?OPTION{interface=jiffy,
+ map_unpack_fun=msgpack_unpacker:map_unpacker(jiffy)},
+ parse_options(TL, Opt);
+parse_options([{format,Type}|TL], Opt0)
+ when Type =:= jsx; Type =:= jiffy; Type =:= map->
+ Opt = Opt0?OPTION{interface=Type,
+ map_unpack_fun=msgpack_unpacker:map_unpacker(Type)},
+ parse_options(TL, Opt);
+
+parse_options([{allow_atom,Type}|TL], Opt0) ->
+ Opt = case Type of
+ none -> Opt0?OPTION{allow_atom=none};
+ pack -> Opt0?OPTION{allow_atom=pack}
+ end,
+ parse_options(TL, Opt);
+
+parse_options([{enable_str,Bool}|TL], Opt0) ->
+ Opt = Opt0?OPTION{enable_str=Bool},
+ parse_options(TL, Opt);
+
+parse_options([{ext, Module}|TL], Opt0) when is_atom(Module) ->
+ Opt = Opt0?OPTION{ext_packer=fun Module:pack_ext/2,
+ ext_unpacker=fun Module:unpack_ext/3},
+ parse_options(TL, Opt);
+parse_options([{ext, {Packer,Unpacker}}|TL], Opt0) when
+ is_function(Packer, 2) andalso
+ (is_function(Unpacker, 3) orelse is_function(Unpacker, 2)) ->
+ Opt = Opt0?OPTION{ext_packer=Packer, ext_unpacker=Unpacker},
+ parse_options(TL, Opt).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% unit tests
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+-ifdef(default_map).
+
+test_data()->
+ [true, false, nil,
+ 0, 1, 2, 123, 512, 1230, 678908, 16#FFFFFFFFFF,
+ -1, -23, -512, -1230, -567898, -16#FFFFFFFFFF,
+ -16#80000001,
+ 123.123, -234.4355, 1.0e-34, 1.0e64,
+ [23, 234, 0.23],
+ <<"hogehoge">>, <<"243546rf7g68h798j", 0, 23, 255>>,
+ <<"hoasfdafdas][">>,
+ [0,42, <<"sum">>, [1,2]], [1,42, nil, [3]],
+ -234, -40000, -16#10000000, -16#100000000,
+ 42
+ ].
+
+-else.
+
+test_data()->
+ [true, false, null,
+ 0, 1, 2, 123, 512, 1230, 678908, 16#FFFFFFFFFF,
+ -1, -23, -512, -1230, -567898, -16#FFFFFFFFFF,
+ -16#80000001,
+ 123.123, -234.4355, 1.0e-34, 1.0e64,
+ [23, 234, 0.23],
+ <<"hogehoge">>, <<"243546rf7g68h798j", 0, 23, 255>>,
+ <<"hoasfdafdas][">>,
+ [0,42, <<"sum">>, [1,2]], [1,42, null, [3]],
+ -234, -40000, -16#10000000, -16#100000000,
+ 42
+ ].
+
+-endif.
+
+enable_str_test() ->
+ ?assertEqual(<<167:8, (<<"saitama">>)/binary >>,
+ msgpack:pack(<<"saitama">>, [{enable_str, false}])),
+ ?assertEqual(<<196,7,115,97,105,116,97,109,97>>,
+ msgpack:pack(<<"saitama">>, [{enable_str, true}])).
+
+basic_test()->
+ Tests = test_data(),
+ MatchFun0 = fun(Term) ->
+ {ok, Term} = msgpack:unpack(msgpack:pack(Term)),
+ Term
+ end,
+ %% MatchFun1 = fun(Term) ->
+ %% {ok, Term} = msgpack_nif:unpack(msgpack_nif:pack(Term)),
+ %% Term
+ %% end,
+ Tests = lists:map(MatchFun0, Tests).
+ %% Tests = lists:map(MatchFun1, Tests).
+
+test_p(Len,Term,OrigBin,Len) ->
+ {ok, Term}=msgpack:unpack(OrigBin);
+
+test_p(I,_,OrigBin,Len) when I < Len->
+ <<Bin:I/binary, _/binary>> = OrigBin,
+ ?assertEqual({error,incomplete}, msgpack:unpack(Bin)).
+
+partial_test()-> % error handling test.
+ Term = lists:seq(0, 45),
+ Bin=msgpack:pack(Term),
+ BinLen = byte_size(Bin),
+ [test_p(X, Term, Bin, BinLen) || X <- lists:seq(0,BinLen)].
+
+long_test()->
+ Longer = lists:seq(0, 655),
+ {ok, Longer} = msgpack:unpack(msgpack:pack(Longer)).
+
+
+other_test()->
+ ?assertEqual({error,incomplete},msgpack:unpack(<<>>)).
+
+error_test()->
+ ?assertEqual({error,{badarg, atom}}, msgpack:pack(atom)),
+ Term = {"hoge", "hage", atom},
+ ?assertEqual({error,{badarg, Term}}, msgpack:pack(Term)).
+
+long_binary_test()->
+ A = msgpack:pack(1),
+ B = msgpack:pack(10),
+ C = msgpack:pack(100),
+ {1, Rem0} = msgpack:unpack_stream(<<A/binary, B/binary, C/binary>>),
+ {10, Rem1} = msgpack:unpack_stream(Rem0),
+ {100, _Rem2} = msgpack:unpack_stream(Rem1),
+ ok.
+
+-endif.
diff --git a/deps/msgpack/src/msgpack_ext.erl b/deps/msgpack/src/msgpack_ext.erl
new file mode 100644
index 0000000..c448b2e
--- /dev/null
+++ b/deps/msgpack/src/msgpack_ext.erl
@@ -0,0 +1,30 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+-module(msgpack_ext).
+
+%%
+%% @doc identity constraint
+%% {ok, {Type, Data} = msgpack_ext_module:pack_ext(Tuple, [{ext, msgpack_ext_module}]),
+%% {ok, Tuple} = msgpack_ext_module:unpack_ext(Type, Data)
+%%
+-callback pack_ext(any(), msgpack:options()) ->
+ {ok, {Type::byte(), Data::binary()}} |
+ {error, any()}.
+
+-callback unpack_ext(Type::byte(), Data::binary(), msgpack:options()) ->
+ {ok, any()} | {error, any()}.
diff --git a/deps/msgpack/src/msgpack_packer.erl b/deps/msgpack/src/msgpack_packer.erl
new file mode 100644
index 0000000..aafae95
--- /dev/null
+++ b/deps/msgpack/src/msgpack_packer.erl
@@ -0,0 +1,394 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+-module(msgpack_packer).
+
+-export([pack/2, pack_ext/3]).
+
+-include("msgpack.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+%% pack them all
+-spec pack(msgpack:object(), msgpack_option()) -> binary().
+
+pack(I, _) when is_integer(I) andalso I < 0 ->
+ pack_int(I);
+pack(I, _) when is_integer(I) ->
+ pack_uint(I);
+pack(F, _) when is_float(F) ->
+ pack_double(F);
+pack(null, _Opt = ?OPTION{interface=jsx}) ->
+ << 16#C0:8 >>;
+pack(null, _Opt = ?OPTION{interface=jiffy}) ->
+ << 16#C0:8 >>;
+pack(nil, _Opt = ?OPTION{interface=Interface})
+ when Interface =/= jsx andalso Interface =/= jiffy ->
+ << 16#C0:8 >>;
+pack(false, _) ->
+ << 16#C2:8 >>;
+pack(true, _) ->
+ << 16#C3:8 >>;
+
+pack(Bin, Opt) when is_binary(Bin) ->
+ handle_binary(Bin, Opt);
+
+pack(Atom, ?OPTION{allow_atom=pack} = Opt) when is_atom(Atom) ->
+ pack(erlang:atom_to_binary(Atom, unicode), Opt);
+
+%% jiffy interface
+pack({Map}, Opt = ?OPTION{interface=jiffy}) when is_list(Map) ->
+ pack_map(Map, Opt);
+
+%% jsx interface
+pack(Map, Opt = ?OPTION{interface=jsx}) when Map =:= [{}]->
+ pack_map([], Opt);
+pack([{_,_}|_] = Map, Opt = ?OPTION{interface=jsx}) ->
+ pack_map(Map, Opt);
+
+pack(List, ?OPTION{enable_str=true}=Opt) when is_list(List) ->
+ try
+ case lists:all(fun is_integer/1, List) of
+ true ->
+ case pack_string(List, Opt) of
+ %% NOTE: due to erlang string format, msgpack can't
+ %% tell the difference between string and list of
+ %% integers. Thus users have to take care not to
+ %% include invalid unicode characters.
+ %% Here to fallback into list(int()).
+ {error, _} -> pack_array(List, Opt);
+ Bin when is_binary(Bin) -> Bin
+ end;
+ false ->
+ pack_array(List, Opt)
+ end
+ catch error:badarg -> pack_array(List, Opt)
+ end;
+
+pack(List, Opt) when is_list(List) ->
+ pack_array(List, Opt);
+
+pack(Other, Opt) ->
+ handle_ext(Other, Opt).
+
+-ifdef(without_map).
+
+%% TODO: maybe we don't need this inside ifdef
+%% as to use ?OPTION{enable_str=boolean()}
+handle_binary(Bin, Opt) ->
+ case Opt of
+ #options_v2{enable_str=true} = Opt -> pack_raw2(Bin);
+ #options_v2{enable_str=false} = Opt -> pack_raw(Bin);
+ #options_v1{} = Opt -> pack_raw(Bin)
+ end.
+
+%% Packing ext type with user defined packer function
+handle_ext(Any, _Opt = ?OPTION{ext_packer=Packer,
+ original_list=Orig,
+ interface=Interface})
+ when is_function(Packer) andalso Interface =/= map ->
+
+ case pack_ext(Any, Packer, Orig) of
+ {ok, Binary} -> Binary;
+ {error, E} -> throw({error, E})
+ end;
+
+handle_ext(Other, _) ->
+ throw({badarg, Other}).
+
+-else.
+
+handle_binary(Bin, Opt) ->
+ case Opt of
+ #options_v3{enable_str=true} = Opt -> pack_raw2(Bin);
+ #options_v3{enable_str=false} = Opt -> pack_raw(Bin);
+ #options_v2{enable_str=true} = Opt -> pack_raw2(Bin);
+ #options_v2{enable_str=false} = Opt -> pack_raw(Bin);
+ #options_v1{} = Opt -> pack_raw(Bin)
+ end.
+
+%% %% map interface
+handle_ext(Map, Opt) when is_map(Map) ->
+ pack_map(maps:to_list(Map), Opt);
+
+handle_ext(Any, _Opt = ?OPTION{ext_packer=Packer,
+ original_list=Orig})
+ when is_function(Packer) ->
+
+ case pack_ext(Any, Packer, Orig) of
+ {ok, Binary} -> Binary;
+ {error, E} -> throw({error, E})
+ end;
+
+handle_ext(Other, _) ->
+ throw({badarg, Other}).
+
+-endif.
+
+-spec pack_int(integer()) -> binary().
+%% negative fixnum
+pack_int(N) when N >= -32->
+ << 2#111:3, N:5 >>;
+%% int 8
+pack_int(N) when N >= -128 ->
+ << 16#D0:8, N:8/big-signed-integer-unit:1 >>;
+%% int 16
+pack_int(N) when N >= -16#8000 ->
+ << 16#D1:8, N:16/big-signed-integer-unit:1 >>;
+%% int 32
+pack_int(N) when N >= -16#80000000 ->
+ << 16#D2:8, N:32/big-signed-integer-unit:1 >>;
+%% int 64
+pack_int(N) when N >= -16#8000000000000000 ->
+ << 16#D3:8, N:64/big-signed-integer-unit:1 >>;
+%% too big int
+pack_int(N) ->
+ throw({badarg, N}).
+
+
+-spec pack_uint(non_neg_integer()) -> binary().
+%% positive fixnum
+pack_uint(N) when N < 128 ->
+ << 2#0:1, N:7 >>;
+%% uint 8
+pack_uint(N) when (N band 16#FF) =:= N ->
+ << 16#CC:8, N:8 >>;
+%% uint 16
+pack_uint(N) when (N band 16#FFFF) =:= N ->
+ << 16#CD:8, N:16/big-unsigned-integer-unit:1 >>;
+%% uint 32
+pack_uint(N) when (N band 16#FFFFFFFF) =:= N->
+ << 16#CE:8, N:32/big-unsigned-integer-unit:1 >>;
+%% uint 64
+pack_uint(N) when (N band 16#FFFFFFFFFFFFFFFF) =:= N ->
+ << 16#CF:8, N:64/big-unsigned-integer-unit:1 >>;
+%% too big unit
+pack_uint(N) ->
+ throw({badarg, N}).
+
+
+-spec pack_double(float()) -> binary().
+%% float : erlang's float is always IEEE 754 64bit format.
+%% pack_float(F) when is_float(F)->
+%% << 16#CA:8, F:32/big-float-unit:1 >>.
+%% pack_double(F).
+%% double
+pack_double(F) ->
+ << 16#CB:8, F:64/big-float-unit:1 >>.
+
+-spec pack_raw(binary()) -> binary().
+%% raw bytes in old spec
+pack_raw(Bin) ->
+ case byte_size(Bin) of
+ Len when Len < 32->
+ << 2#101:3, Len:5, Bin/binary >>;
+ Len when Len < 16#10000 -> % 65536
+ << 16#DA:8, Len:16/big-unsigned-integer-unit:1, Bin/binary >>;
+ Len when Len < 16#100000000 ->
+ << 16#DB:8, Len:32/big-unsigned-integer-unit:1, Bin/binary >>;
+ _ ->
+ throw({badarg, Bin})
+ end.
+
+-spec pack_raw2(binary()) -> binary().
+%% raw bytes in new spec
+pack_raw2(Bin) ->
+ case byte_size(Bin) of
+ Len when Len < 32->
+ << 16#C4:8, Len:8/big-unsigned-integer-unit:1, Bin/binary>>;
+ Len when Len < 16#10000 -> % 65536
+ << 16#C5:8, Len:16/big-unsigned-integer-unit:1, Bin/binary >>;
+ Len when Len < 16#100000000 ->
+ << 16#C6:8, Len:32/big-unsigned-integer-unit:1, Bin/binary >>;
+ _ ->
+ throw({badarg, Bin})
+ end.
+
+%% @doc String MAY be unicode. Or may be EUC-JP, SJIS, UTF-1024 or anything.
+%% EVERY implementation must show its binary length just after type indicator
+%% to skip the damn string if its unreadable.
+-spec pack_string(list(), msgpack_option()) -> binary() | {error, atom()}.
+pack_string(String, _Opt) ->
+ case unicode:characters_to_binary(String) of
+ {error, _Bin, _} -> {error, broken_unicode};
+ {incomplete, _Bin, _} -> {error, incomplete_unicode};
+ Bin ->
+ case byte_size(Bin) of
+ Len when Len < 32->
+ << 2#101:3, Len:5, Bin/binary >>;
+ Len when Len < 256 ->
+ << 16#D9:8, Len:8/big-unsigned-integer-unit:1, Bin/binary >>;
+ Len when Len < 16#10000 -> % 65536
+ << 16#DA:8, Len:16/big-unsigned-integer-unit:1, Bin/binary >>;
+ Len when Len < 16#100000000 ->
+ << 16#DB:8, Len:32/big-unsigned-integer-unit:1, Bin/binary >>;
+ _ ->
+ throw({badarg, String})
+ end
+ end.
+
+-spec pack_array([msgpack:object()], msgpack_option()) -> binary() | no_return().
+pack_array([], _) ->
+ << 2#1001:4, 0:4/integer-unit:1 >>;
+
+pack_array([A], Opt) ->
+ << 2#1001:4, 1:4/integer-unit:1, (pack(A, Opt))/binary >>;
+
+pack_array([A, B], Opt) ->
+ << 2#1001:4, 2:4/integer-unit:1, (pack(A, Opt))/binary, (pack(B, Opt))/binary >>;
+
+pack_array([A, B, C], Opt) ->
+ << 2#1001:4, 3:4/integer-unit:1, (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary >>;
+
+pack_array([A, B, C, D], Opt) ->
+ << 2#1001:4, 4:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary >>;
+
+pack_array([A, B, C, D, E], Opt) ->
+ << 2#1001:4, 5:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F], Opt) ->
+ << 2#1001:4, 6:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G], Opt) ->
+ << 2#1001:4, 7:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H], Opt) ->
+ << 2#1001:4, 8:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I], Opt) ->
+ << 2#1001:4, 9:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J], Opt) ->
+ << 2#1001:4, 10:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J, K], Opt) ->
+ << 2#1001:4, 11:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary, (pack(K, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J, K, L], Opt) ->
+ << 2#1001:4, 12:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary, (pack(K, Opt))/binary, (pack(L, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J, K, L, M], Opt) ->
+ << 2#1001:4, 13:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary, (pack(K, Opt))/binary, (pack(L, Opt))/binary,
+ (pack(M, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J, K, L, M, N], Opt) ->
+ << 2#1001:4, 14:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary, (pack(K, Opt))/binary, (pack(L, Opt))/binary,
+ (pack(M, Opt))/binary, (pack(N, Opt))/binary >>;
+
+pack_array([A, B, C, D, E, F, G, H, I, J, K, L, M, N, O], Opt) ->
+ << 2#1001:4, 15:4/integer-unit:1,
+ (pack(A, Opt))/binary, (pack(B, Opt))/binary, (pack(C, Opt))/binary, (pack(D, Opt))/binary,
+ (pack(E, Opt))/binary, (pack(F, Opt))/binary, (pack(G, Opt))/binary, (pack(H, Opt))/binary,
+ (pack(I, Opt))/binary, (pack(J, Opt))/binary, (pack(K, Opt))/binary, (pack(L, Opt))/binary,
+ (pack(M, Opt))/binary, (pack(N, Opt))/binary, (pack(O, Opt))/binary >>;
+
+pack_array(L, Opt) ->
+ case length(L) of
+ Len when Len < 16#10000 ->
+ <<16#DC:8, Len:16/big-unsigned-integer-unit:1, (<< <<(pack(E, Opt))/binary>> || E <- L >>)/binary>>;
+ Len when Len < 16#100000000 ->
+ <<16#DD:8, Len:32/big-unsigned-integer-unit:1, (<< <<(pack(E, Opt))/binary>> || E <- L >>)/binary>>;
+ _ ->
+ throw({badarg, L})
+ end.
+
+-spec pack_map(msgpack:msgpack_map(), msgpack_option()) -> binary() | no_return().
+pack_map([{Ka, Va}], Opt)->
+ << 2#1000:4, 1:4/integer-unit:1,
+ (pack(Ka, Opt))/binary, (pack(Va, Opt))/binary >>;
+
+pack_map([{Ka, Va}, {Kb, Vb}], Opt)->
+ << 2#1000:4, 2:4/integer-unit:1,
+ (pack(Ka, Opt))/binary, (pack(Va, Opt))/binary,
+ (pack(Kb, Opt))/binary, (pack(Vb, Opt))/binary >>;
+
+pack_map([{Ka, Va}, {Kb, Vb}, {Kc, Vc}], Opt)->
+ << 2#1000:4, 3:4/integer-unit:1,
+ (pack(Ka, Opt))/binary, (pack(Va, Opt))/binary,
+ (pack(Kb, Opt))/binary, (pack(Vb, Opt))/binary,
+ (pack(Kc, Opt))/binary, (pack(Vc, Opt))/binary >>;
+
+pack_map([{Ka, Va}, {Kb, Vb}, {Kc, Vc}, {Kd, Vd}], Opt)->
+ << 2#1000:4, 4:4/integer-unit:1,
+ (pack(Ka, Opt))/binary, (pack(Va, Opt))/binary,
+ (pack(Kb, Opt))/binary, (pack(Vb, Opt))/binary,
+ (pack(Kc, Opt))/binary, (pack(Vc, Opt))/binary,
+ (pack(Kd, Opt))/binary, (pack(Vd, Opt))/binary >>;
+
+pack_map(M, Opt)->
+ case length(M) of
+ Len when Len < 16 ->
+ <<2#1000:4, Len:4/integer-unit:1,
+ (<< <<(pack(K, Opt))/binary, (pack(V, Opt))/binary>> || {K, V} <- M >>)/binary>>;
+ Len when Len < 16#10000 -> % 65536
+ <<16#DE:8, Len:16/big-unsigned-integer-unit:1,
+ (<< <<(pack(K, Opt))/binary, (pack(V, Opt))/binary>> || {K, V} <- M >>)/binary>>;
+ Len when Len < 16#100000000->
+ <<16#DF:8, Len:32/big-unsigned-integer-unit:1,
+ (<< <<(pack(K, Opt))/binary, (pack(V, Opt))/binary>> || {K, V} <- M >>)/binary>>;
+ _ ->
+ throw({badarg, M})
+ end.
+
+-spec pack_ext(any(), msgpack_ext_packer(), msgpack:options()) -> {ok, binary()} | {error, any()}.
+pack_ext(Any, Packer, Opt) ->
+ case Packer(Any, Opt) of
+ {ok, {Type, Data}} when -16#80 =< Type andalso Type =< 16#7F ->
+ Bin = case byte_size(Data) of
+ 1 -> <<16#D4, Type:1/signed-integer-unit:8, Data/binary>>;
+ 2 -> <<16#D5, Type:1/signed-integer-unit:8, Data/binary>>;
+ 4 -> <<16#D6, Type:1/signed-integer-unit:8, Data/binary>>;
+ 8 -> <<16#D7, Type:1/signed-integer-unit:8, Data/binary>>;
+ 16 -> <<16#D8, Type:1/signed-integer-unit:8, Data/binary>>;
+ Len when Len < 16#100 ->
+ <<16#C7, Len:8, Type:1/signed-integer-unit:8, Data/binary>>;
+ Len when Len < 16#10000 ->
+ <<16#C8, Len:16, Type:1/signed-integer-unit:8, Data/binary>>;
+ Len when Len < 16#100000000 ->
+ <<16#C9, Len:32, Type:1/signed-integer-unit:8, Data/binary>>
+ end,
+ {ok, Bin};
+ {error, E} ->
+ {error, E}
+ end.
diff --git a/deps/msgpack/src/msgpack_term.erl b/deps/msgpack/src/msgpack_term.erl
new file mode 100644
index 0000000..0e0cead
--- /dev/null
+++ b/deps/msgpack/src/msgpack_term.erl
@@ -0,0 +1,82 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+-module(msgpack_term).
+
+-export([to_binary/1, from_binary/2,
+ pack_ext/2, unpack_ext/3]).
+-behaviour(msgpack_ext).
+
+-define(ERLANG_TERM, 127).
+-define(TERM_OPTION, [{enable_str,true},{ext,?MODULE},{allow_atom,none}]).
+
+%% @doc experimental
+-spec to_binary(term()) -> binary().
+to_binary(Term) ->
+ msgpack:pack(Term, ?TERM_OPTION).
+
+%% @doc experimental
+-spec from_binary(binary(), []|[safe]) -> term().
+from_binary(Bin, Opt) ->
+ case msgpack:unpack(Bin, Opt ++ ?TERM_OPTION) of
+ {ok, Term} -> Term;
+ Error -> error(Error)
+ end.
+
+-spec pack_ext(tuple(), msgpack:options()) ->
+ {ok, {Type::byte(), Data::binary()}} |
+ {error, any()}.
+pack_ext(Term, _Options) ->
+ %% there are still much space to improve:
+ %% for example, pid() can be compressed much
+ %% more by using msgpack integers.
+ %% reference type is also bigger, because
+ %% it uses four bytes per int, which includes
+ %% four integers. Both types include node name
+ %% which is atom including two bytes length.
+ %% usually atom/string less than length 32 can
+ %% coded as single byte indicating its length.
+ {ok, {?ERLANG_TERM, erlang:term_to_binary(Term)}}.
+
+-spec unpack_ext(Type::byte(), Data::binary(), msgpack:options()) ->
+ {ok, any()} | {error, any()}.
+unpack_ext(?ERLANG_TERM, Bin, Opt) ->
+ case proplists:get_value(safe, Opt) of
+ true ->
+ {ok, erlang:binary_to_term(Bin, [safe])};
+ undefined ->
+ {ok, erlang:binary_to_term(Bin)}
+ end.
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+test_data() ->
+ ['foobar atom', %% is_atom/1
+ fun() -> ok end, %% is_function/1
+ self(), %% is_pid/1
+ %% is_port/1
+ make_ref(), %% is_reference/1
+ {me, foo, bar}, %% is_tuple/1
+ {}].
+
+t2b_b2t_test() ->
+ Data = test_data(),
+ ?assertEqual(Data,
+ msgpack:binary_to_term(msgpack:term_to_binary(Data))).
+
+-endif.
diff --git a/deps/msgpack/src/msgpack_unpacker.erl b/deps/msgpack/src/msgpack_unpacker.erl
new file mode 100644
index 0000000..181a2ab
--- /dev/null
+++ b/deps/msgpack/src/msgpack_unpacker.erl
@@ -0,0 +1,257 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+-module(msgpack_unpacker).
+
+-export([unpack_stream/2, map_unpacker/1]).
+
+-include("msgpack.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+
+-ifndef(without_map).
+-export([unpack_map/3]).
+-endif.
+
+-export([unpack_map_jiffy/3, unpack_map_jsx/3]).
+
+%% unpack them all
+-spec unpack_stream(Bin::binary(), msgpack_option()) -> {msgpack:object(), binary()} | no_return().
+%% ATOMS
+unpack_stream(<<16#C0, Rest/binary>>, _Opt = ?OPTION{interface=jsx}) ->
+ {null, Rest};
+unpack_stream(<<16#C0, Rest/binary>>, _Opt = ?OPTION{interface=jiffy}) ->
+ {null, Rest};
+unpack_stream(<<16#C0, Rest/binary>>, _) ->
+ {nil, Rest};
+unpack_stream(<<16#C2, Rest/binary>>, _) ->
+ {false, Rest};
+unpack_stream(<<16#C3, Rest/binary>>, _) ->
+ {true, Rest};
+
+%% Raw bytes
+unpack_stream(<<16#C4, L:8/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#C5, L:16/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#C6, L:32/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>, _) ->
+ {V, Rest};
+
+%% Floats
+unpack_stream(<<16#CA, V:32/float-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#CB, V:64/float-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+
+%% Unsigned integers
+unpack_stream(<<16#CC, V:8/unsigned-integer, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#CD, V:16/big-unsigned-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#CE, V:32/big-unsigned-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#CF, V:64/big-unsigned-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+
+%% Signed integers
+unpack_stream(<<16#D0, V:8/signed-integer, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#D1, V:16/big-signed-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#D2, V:32/big-signed-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+unpack_stream(<<16#D3, V:64/big-signed-integer-unit:1, Rest/binary>>, _) ->
+ {V, Rest};
+
+%% Strings as new spec, or Raw bytes as old spec
+unpack_stream(<<2#101:3, L:5, V:L/binary, Rest/binary>>, Opt) ->
+ unpack_string_or_raw(V, Opt, Rest);
+
+unpack_stream(<<16#D9, L:8/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>,
+ ?OPTION{enable_str=true} = _Opt) ->
+ {unpack_string(V), Rest};
+
+unpack_stream(<<16#DA, L:16/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>, Opt) ->
+ unpack_string_or_raw(V, Opt, Rest);
+
+unpack_stream(<<16#DB, L:32/big-unsigned-integer-unit:1, V:L/binary, Rest/binary>>, Opt) ->
+ unpack_string_or_raw(V, Opt, Rest);
+
+%% Arrays
+unpack_stream(<<2#1001:4, L:4, Rest/binary>>, Opt) ->
+ unpack_array(Rest, L, [], Opt);
+unpack_stream(<<16#DC, L:16/big-unsigned-integer-unit:1, Rest/binary>>, Opt) ->
+ unpack_array(Rest, L, [], Opt);
+unpack_stream(<<16#DD, L:32/big-unsigned-integer-unit:1, Rest/binary>>, Opt) ->
+ unpack_array(Rest, L, [], Opt);
+
+%% Maps
+unpack_stream(<<2#1000:4, L:4, Rest/binary>>, Opt) ->
+ Unpacker = Opt?OPTION.map_unpack_fun,
+ Unpacker(Rest, L, Opt);
+unpack_stream(<<16#DE, L:16/big-unsigned-integer-unit:1, Rest/binary>>, Opt) ->
+ Unpacker = Opt?OPTION.map_unpack_fun,
+ Unpacker(Rest, L, Opt);
+
+unpack_stream(<<16#DF, L:32/big-unsigned-integer-unit:1, Rest/binary>>, Opt) ->
+ Unpacker = Opt?OPTION.map_unpack_fun,
+ Unpacker(Rest, L, Opt);
+
+%% Tag-encoded lengths (kept last, for speed)
+%% positive int
+unpack_stream(<<0:1, V:7, Rest/binary>>, _) -> {V, Rest};
+
+%% negative int
+unpack_stream(<<2#111:3, V:5, Rest/binary>>, _) -> {V - 2#100000, Rest};
+
+
+%% Invalid data
+unpack_stream(<<16#C1, _R/binary>>, _) -> throw({badarg, 16#C1});
+
+%% for extention types
+
+%% fixext 1 stores an integer and a byte array whose length is 1 byte
+unpack_stream(<<16#D4, T:1/signed-integer-unit:8, Data:1/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#D4, Unpack, T, Data, Rest, Orig);
+
+%% fixext 2 stores an integer and a byte array whose length is 2 bytes
+unpack_stream(<<16#D5, T:1/signed-integer-unit:8, Data:2/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#D5, Unpack, T, Data, Rest, Orig);
+
+%% fixext 4 stores an integer and a byte array whose length is 4 bytes
+unpack_stream(<<16#D6, T:1/signed-integer-unit:8, Data:4/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#D6, Unpack, T, Data, Rest, Orig);
+
+%% fixext 8 stores an integer and a byte array whose length is 8 bytes
+unpack_stream(<<16#D7, T:1/signed-integer-unit:8, Data:8/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#D7, Unpack, T, Data, Rest, Orig);
+
+%% fixext 16 stores an integer and a byte array whose length is 16 bytes
+unpack_stream(<<16#D8, T:1/signed-integer-unit:8, Data:16/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#D8, Unpack, T, Data, Rest, Orig);
+
+%% ext 8 stores an integer and a byte array whose length is upto (2^8)-1 bytes:
+unpack_stream(<<16#C7, Len:8, Type:1/signed-integer-unit:8, Data:Len/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#C7, Unpack, Type, Data, Rest, Orig);
+
+%% ext 16 stores an integer and a byte array whose length is upto (2^16)-1 bytes:
+unpack_stream(<<16#C8, Len:16, Type:1/signed-integer-unit:8, Data:Len/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#C8, Unpack, Type, Data, Rest, Orig);
+
+%% ext 32 stores an integer and a byte array whose length is upto (2^32)-1 bytes:
+unpack_stream(<<16#C9, Len:32, Type:1/signed-integer-unit:8, Data:Len/binary, Rest/binary>>,
+ ?OPTION{ext_unpacker=Unpack, original_list=Orig} = _Opt) ->
+ maybe_unpack_ext(16#C9, Unpack, Type, Data, Rest, Orig);
+
+unpack_stream(_Bin, _) -> throw(incomplete).
+
+-spec unpack_array(binary(), non_neg_integer(), [msgpack:object()], msgpack_option()) ->
+ {[msgpack:object()], binary()} | no_return().
+unpack_array(Bin, 0, Acc, _) ->
+ {lists:reverse(Acc), Bin};
+unpack_array(Bin, Len, Acc, Opt) ->
+ {Term, Rest} = unpack_stream(Bin, Opt),
+ unpack_array(Rest, Len-1, [Term|Acc], Opt).
+
+-ifdef(without_map).
+map_unpacker(jiffy) ->
+ fun ?MODULE:unpack_map_jiffy/3;
+map_unpacker(jsx) ->
+ fun ?MODULE:unpack_map_jsx/3.
+-else.
+map_unpacker(map) ->
+ fun ?MODULE:unpack_map/3;
+map_unpacker(jiffy) ->
+ fun ?MODULE:unpack_map_jiffy/3;
+map_unpacker(jsx) ->
+ fun ?MODULE:unpack_map_jsx/3.
+-endif.
+
+
+
+-ifndef(without_map).
+-spec unpack_map(binary(), non_neg_integer(), msgpack_option()) ->
+ {map(), binary()} | no_return().
+unpack_map(Bin, Len, Opt) ->
+ {Map, Rest} = unpack_map_as_proplist(Bin, Len, [], Opt),
+ {maps:from_list(Map), Rest}.
+%% unpack_map(Bin, Len, #{}, Opt).
+
+%% unpack_map(Bin, Len, Acc, _) -> {Acc, Bin};
+%% unpack_map(Bin, Len, Acc, Opt) ->
+%% {Key, Rest} = unpack_stream(Bin, Opt),
+%% {Value, Rest2} = unpack_stream(Rest, Opt),
+%% unpack_map(Rest2, Len-1, maps:put(Key, Value, Acc), Opt).
+-endif.
+
+%% Users SHOULD NOT send too long list: this uses lists:reverse/1
+-spec unpack_map_jiffy(binary(), non_neg_integer(), msgpack_option()) ->
+ {msgpack:msgpack_map_jiffy(), binary()} | no_return().
+unpack_map_jiffy(Bin, Len, Opt) ->
+ {Map, Rest} = unpack_map_as_proplist(Bin, Len, [], Opt),
+ {{Map}, Rest}.
+
+-spec unpack_map_jsx(binary(), non_neg_integer(), msgpack_option()) ->
+ {msgpack:msgpack_map_jsx(), binary()} | no_return().
+unpack_map_jsx(Bin, Len, Opt) ->
+ case unpack_map_as_proplist(Bin, Len, [], Opt) of
+ {[], Rest} -> {[{}], Rest};
+ {Map, Rest} -> {Map, Rest}
+ end.
+
+-spec unpack_map_as_proplist(binary(), non_neg_integer(), proplists:proplist(), msgpack_option()) ->
+ {proplists:proplist(), binary()} | no_return().
+unpack_map_as_proplist(Bin, 0, Acc, _) ->
+ {lists:reverse(Acc), Bin};
+unpack_map_as_proplist(Bin, Len, Acc, Opt) ->
+ {Key, Rest} = unpack_stream(Bin, Opt),
+ {Value, Rest2} = unpack_stream(Rest, Opt),
+ unpack_map_as_proplist(Rest2, Len-1, [{Key,Value}|Acc], Opt).
+
+unpack_string_or_raw(V, ?OPTION{enable_str=true} = _Opt, Rest) ->
+ {unpack_string(V), Rest};
+unpack_string_or_raw(V, ?OPTION{enable_str=false} = _Opt, Rest) ->
+ {V, Rest}.
+
+%% NOTE: msgpack DOES validate the binary as valid unicode string.
+unpack_string(Binary) ->
+ case unicode:characters_to_list(Binary) of
+ {error, _S, _Rest} -> throw({error, {invalid_string, Binary}});
+ {incomplete, _S, _Rest} -> throw({error, {invalid_string, Binary}});
+ String -> String
+ end.
+
+maybe_unpack_ext(F, undefined, _, _, _Rest, _) -> throw({badarg, {bad_ext, F}});
+maybe_unpack_ext(_, Unpack, Type, Data, Rest, Orig) when is_function(Unpack, 3) ->
+ case Unpack(Type, Data, Orig) of
+ {ok, Term} -> {Term, Rest};
+ {error, E} -> {error, E}
+ end;
+maybe_unpack_ext(_, Unpack, Type, Data, Rest, _) when is_function(Unpack, 2) ->
+ case Unpack(Type, Data) of
+ {ok, Term} -> {Term, Rest};
+ {error, E} -> {error, E}
+ end.
+
diff --git a/deps/msgpack/test/msgpack_ext_example_tests.erl b/deps/msgpack/test/msgpack_ext_example_tests.erl
new file mode 100644
index 0000000..7fa2ad0
--- /dev/null
+++ b/deps/msgpack/test/msgpack_ext_example_tests.erl
@@ -0,0 +1,102 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+-module(msgpack_ext_example_tests).
+
+-compile(export_all).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-behaviour(msgpack_ext).
+
+ext_test() ->
+ Packer = fun({foobar, Me}, _) ->
+ {ok, {12, term_to_binary(Me)}}
+ end,
+ Unpacker = fun(12, Bin) ->
+ {ok, {foobar, binary_to_term(Bin)}}
+ end,
+ Ref = make_ref(),
+ Opt = [{ext,{Packer,Unpacker}}],
+ Bin = msgpack:pack({foobar, Ref}, Opt),
+ {ok, {foobar, Ref}} = msgpack:unpack(Bin, Opt).
+
+uuid_example_test() ->
+ Packer = fun({uuid, UUID}, _) when is_binary(UUID) ->
+ {ok, {42, UUID}}
+ end,
+ Unpacker = fun(42, Bin0) ->
+ {ok, {uuid, Bin0}}
+ end,
+ UUID0 = {uuid, <<221,85,73,226,102,90,82,118,40,26,166,74,52,42,61,207>>},
+ Opt = [{ext,{Packer,Unpacker}}],
+ Bin = msgpack:pack(UUID0, Opt),
+ {ok, UUID0} = msgpack:unpack(Bin, Opt).
+
+pack_native({native, Term}, _) when is_pid(Term) orelse
+ is_reference(Term) orelse
+ is_port(Term) orelse
+ is_tuple(Term) orelse
+ is_function(Term) ->
+ {ok, {42, term_to_binary(Term)}}.
+
+unpack_native(42, Bin) ->
+ {ok, {native, binary_to_term(Bin)}}.
+
+native_test() ->
+ Opt = [{ext, {fun pack_native/2, fun unpack_native/2}}],
+ Term = {native, {self(), make_ref(), foobar, fun() -> ok end}},
+ {ok, Term} = msgpack:unpack(msgpack:pack(Term, Opt), Opt).
+
+pack_ext(T, O) -> pack_native(T, O).
+unpack_ext(I, B, _) -> unpack_native(I, B).
+
+behaviour_test() ->
+ Opt = [{ext, ?MODULE}],
+ Term = {native, {self(), make_ref(), foobar, fun() -> ok end}},
+ {ok, Term} = msgpack:unpack(msgpack:pack(Term, Opt), Opt).
+
+
+ext_typecode_range_test() ->
+ %% typecode range from msgpack spec. [-128,-1] is the "reserved"
+ %% range, [0,127] is the "user-defined" range.
+ TypecodeMin = -128,
+ TypecodeMax = 127,
+ Packer = fun ({thing, N}, _) ->
+ {ok, {N, msgpack:pack(N)}}
+ end,
+ Unpacker = fun(N, Bin, _) ->
+ Result = msgpack:unpack(Bin),
+ ?assertEqual({ok, N}, Result),
+ Result
+ end,
+ Opt = [{ext,{Packer,Unpacker}}],
+ %% it should be possible to use an uncontroversial ext type code:
+ Enc = msgpack:pack({thing,1}, Opt),
+ ?assertMatch({ok, 1}, msgpack:unpack(Enc, Opt)),
+ %% it should be possible to use ext typecodes covering the entire
+ %% range specified in the msgpack specification:
+ [begin
+ Encoded = msgpack:pack({thing, N}, Opt),
+ Result = msgpack:unpack(Encoded, Opt),
+ ?assertMatch({ok, N}, Result)
+ end || N <- lists:seq(TypecodeMin,TypecodeMax)],
+ %% using codes outside the allowed range should fail:
+ [?assertError({case_clause, _}, msgpack:pack({thing, N}, Opt))
+ || N <- [-129, 128]],
+ ok.
diff --git a/deps/msgpack/test/msgpack_nif_tests.erl b/deps/msgpack/test/msgpack_nif_tests.erl
new file mode 100644
index 0000000..5f2dd91
--- /dev/null
+++ b/deps/msgpack/test/msgpack_nif_tests.erl
@@ -0,0 +1,140 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+
+-module(msgpack_nif_tests).
+
+-undef(NIF).
+-ifdef(NIF).
+
+-import(msgpack_nif, [pack/1, unpack/1]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+msgpack_props_test_() ->
+ {timeout,10000, ?_assertEqual([],proper:module(msgpack_props))}.
+
+unpack_test_() ->
+ [
+ {"not binary",
+ ?_assertEqual({error, {badarg, []}}, unpack([]))},
+
+ {"incomplete: null binary",
+ ?_assertEqual({error, incomplete}, unpack(<<>>))},
+
+ {"incomplete: unknown binary",
+ ?_assertEqual({error, incomplete}, unpack(<<16#DA>>))}
+ ].
+
+array_test_()->
+ [
+ {"length 16",
+ fun() ->
+ List = lists:seq(0, 16),
+ Binary = pack(List),
+ ?assertEqual({ok, List}, unpack(Binary))
+ end},
+ {"length 32",
+ fun() ->
+ List = lists:seq(0, 16#010000),
+ Binary = pack(List),
+ ?assertEqual({ok, List}, unpack(Binary))
+ end},
+ {"empty",
+ fun() ->
+ EmptyList = [],
+ Binary = pack(EmptyList),
+ ?assertEqual({ok, EmptyList}, unpack(Binary))
+ end}
+ ].
+
+
+map_test_()->
+ [
+ {"length 16",
+ fun() ->
+ Map = {[ {X, X * 2} || X <- lists:seq(0, 16) ]},
+ Binary = pack(Map),
+ ?assertEqual({ok, Map}, unpack(Binary))
+ end},
+ {"length 32",
+ fun() ->
+ Map = {[ {X, X * 2} || X <- lists:seq(0, 16#010000) ]},
+ Binary = pack(Map),
+ ?assertEqual({ok, Map}, unpack(Binary))
+ end},
+ {"empty",
+ fun() ->
+ EmptyMap = {[]},
+ Binary = pack(EmptyMap),
+ ?assertEqual({ok, EmptyMap}, unpack(Binary))
+ end}
+ ].
+
+int_test_() ->
+ [
+ {"",
+ fun() ->
+ Term = -2147483649,
+ Binary = pack(Term),
+ ?assertEqual({ok, Term}, unpack(Binary))
+ end}
+ ].
+
+error_test_()->
+ [
+ {"badarg atom",
+ ?_assertEqual({error, {badarg, atom}},
+ pack(atom))},
+ {"badarg tuple",
+ fun() ->
+ Term = {"hoge", "hage", atom},
+ ?assertEqual({error, {badarg, Term}},
+ pack(Term))
+ end}
+ ].
+
+binary_test_() ->
+ [
+ {"0 byte",
+ fun() ->
+ Binary = pack(<<>>),
+ ?assertEqual({ok, <<>>}, unpack(Binary))
+ end}
+ ].
+
+-endif.
+
+%% long_binary_test_()->
+%% [
+%% {"long binary",
+%% fun() ->
+%% A = pack(1),
+%% B = pack(10),
+%% C = pack(100),
+%% ?assertEqual({[1,10,100], <<>>},
+%% unpack(list_to_binary([A, B, C])))
+%% end}
+%% ].
+
+%% benchmark_test()->
+%% Data = [test_data() || _ <- lists:seq(0, 10000)],
+%% {ok, S} = ?debugTime(" serialize", pack(Data)),
+%% {ok, Data} = ?debugTime("deserialize", unpack(S)),
+%% ?debugFmt("for ~p KB test data.", [byte_size(S) div 1024]),
+%% ok.
+
diff --git a/deps/msgpack/test/msgpack_test.erl b/deps/msgpack/test/msgpack_test.erl
new file mode 100644
index 0000000..41c585c
--- /dev/null
+++ b/deps/msgpack/test/msgpack_test.erl
@@ -0,0 +1,413 @@
+%%
+%% MessagePack for Erlang
+%%
+%% Copyright (C) 2009-2013 UENISHI Kota
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% Created : 26 Apr 2011 by UENISHI Kota <uenishi.kota@lab.ntt.co.jp>
+
+-module(msgpack_test).
+
+-import(msgpack, [pack/2, unpack/2, pack/1, unpack/1]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-include("msgpack.hrl").
+
+-ifdef(DO_MSGPACK_CROSSLANG_TEST).
+
+test_data() ->
+ [true, false, nil,
+ 0, 1, 2, 123, 512, 1230, 678908, 16#FFFFFFFFFF,
+ -1, -23, -512, -1230, -567898, -16#FFFFFFFFFF,
+ 123.123, -234.4355, 1.0e-34, 1.0e64,
+ [23, 234, 0.23],
+ <<"hogehoge">>, <<"243546rf7g68h798j", 0, 23, 255>>,
+ <<"hoasfdafdas][">>,
+ [0,42, <<"sum">>, [1,2]], [1,42, nil, [3]],
+ -234, -40000, -16#10000000, -16#100000000,
+ 42].
+
+test_data_jsx()->
+ test_data() ++ [[{}], {hoge}].
+
+test_data_jiffy()->
+ test_data() ++ [ {[]}, {hoge} ].
+
+test_data_map()->
+ test_data() ++ [ #{}, {hoge} ].
+
+compare_all([], [])-> ok;
+compare_all([], R)-> {toomuchrhs, R};
+compare_all(L, [])-> {toomuchlhs, L};
+compare_all([LH|LTL], [RH|RTL]) ->
+ ?assertEqual(LH, RH),
+ compare_all(LTL, RTL).
+
+port_receive(Port) ->
+ port_receive(Port, <<>>).
+port_receive(Port, Acc) ->
+ receive
+ {Port, {data, Data}} -> port_receive(Port, <<Acc/binary, Data/binary>>);
+ {Port, eof} -> Acc
+ after 1000 -> Acc
+ end.
+
+port_map_test()->
+ Tests = test_data_map(),
+ ?assertEqual({[Tests],<<>>}, msgpack:unpack(msgpack:pack([Tests], [{format,map}]), [{format,map}])),
+
+port_jiffy_test()->
+ Tests = test_data_jiffy(),
+ ?assertEqual({[Tests],<<>>}, msgpack:unpack(msgpack:pack([Tests], [{format,jiffy}]), [{format,jiffy}])),
+
+ % Port = open_port({spawn, "ruby ../test/crosslang.rb"}, [binary, eof]),
+ % true = port_command(Port, msgpack:pack(Tests)),
+ % ?assertEqual({Tests, <<>>}, msgpack:unpack(port_receive(Port))),
+ % port_close(Port).
+ ok.
+
+
+port_jsx_test()->
+ Tests = test_data_jsx(),
+ ?assertEqual({[Tests],<<>>}, msgpack:unpack(msgpack:pack([Tests], [{format,jsx}]), [{format,jsx}])),
+
+ % Port = open_port({spawn, "ruby ../test/crosslang.rb"}, [binary, eof]),
+ % true = port_command(Port, msgpack:pack(Tests)),
+ % ?assertEqual({Tests, <<>>}, msgpack:unpack(port_receive(Port))),
+ % port_close(Port).
+ ok.
+
+unknown_test_freezed_test_dont_do_this()->
+ Port = open_port({spawn, "ruby testcase_generator.rb"}, [binary, eof]),
+ Tests = [0, 1, 2, 123, 512, 1230, 678908,
+ -1, -23, -512, -1230, -567898,
+ <<"hogehoge">>, <<"243546rf7g68h798j">>,
+ 123.123,
+ -234.4355, 1.0e-34, 1.0e64,
+ [23, 234, 0.23],
+ [0,42,<<"sum">>, [1,2]], [1,42, nil, [3]],
+ [{1,2},{<<"hoge">>,nil}], % map
+ -234, -50000,
+ 42
+ ],
+ ?assertEqual(ok, compare_all(Tests, msgpack:unpack_all(port_receive(Port)))),
+ port_close(Port).
+
+-endif.
+
+issue_jsx_5_test() ->
+ %% {'type':"workers", 'data':[{'workerid': "std.1", 'slots':[] }]}
+ Term = [
+ {<<"type">>, <<"workers">>},
+ {<<"data">>,[
+ [{<<"workerid">>, <<"std.1">>}, {<<"slots">>, []}]
+ ]
+ }
+ ],
+ Encoded = msgpack:pack(Term, [{format,jsx}, {enable_str,true}]),
+ Bin0 = <<130,196,4,116,121,112,101,196,7,119,111,114,107,101,114,115,
+ 196,4,100,97,116,97,145,130,196,8,119,111,114,107,101,114,105,100,
+ 196,5,115,116,100,46,49,196,5,115,108,111,116,115,160>>,
+
+ ?assertEqual(Bin0, Encoded),
+
+ {ok, Decoded} = msgpack:unpack(Bin0, [{format,jsx}, {enable_str,true}]),
+ ?assertEqual(Term, Decoded).
+
+
+issue_jiffy_5_test() ->
+ %% {'type':"workers", 'data':[{'workerid': "std.1", 'slots':[] }]}
+ Term = {[
+ {<<"type">>, <<"workers">>},
+ {<<"data">>,[
+ {[{<<"workerid">>, <<"std.1">>},{<<"slots">>, []}]}
+ ]
+ }
+ ]},
+ Encoded = msgpack:pack(Term, [{format,jiffy}, {enable_str,true}]),
+ Bin0 = <<130,196,4,116,121,112,101,196,7,119,111,114,107,101,114,115,
+ 196,4,100,97,116,97,145,130,196,8,119,111,114,107,101,114,105,100,
+ 196,5,115,116,100,46,49,196,5,115,108,111,116,115,160>>,
+ ?assertEqual(Bin0, Encoded),
+
+ {ok, Decoded} = msgpack:unpack(Bin0, [{format,jiffy}, {enable_str,true}]),
+ ?assertEqual(Term, Decoded).
+
+
+issue_27_test_() ->
+ [
+ %% null(jiffy) => nil(msgpack) => null(jsx)
+ ?_assertEqual({ok, null},
+ msgpack:unpack(msgpack:pack(null, [{format,jiffy}]), [{format,jsx}])),
+
+ %% null(jiffy) => nil(msgpack) => null(jiffy)
+ ?_assertEqual({ok, null},
+ msgpack:unpack(msgpack:pack(null, [{format,jiffy}]), [{format,jiffy}])),
+
+
+ %% null(jsx) => nil(msgpack) => null(jiffy)
+ ?_assertEqual({ok, null},
+ msgpack:unpack(msgpack:pack(null, [{format,jsx}]), [{format,jiffy}])),
+
+ %% nil(jiffy-atom) => <<nil>>(msgpack-binary) => <<"nil">>
+ ?_assertEqual({ok, <<"nil">>},
+ msgpack:unpack(msgpack:pack(nil, [{allow_atom,pack}, {format,jiffy}]), [{format,jiffy}])),
+
+ %% nil(jsx-atom) => <<nil>>(msgpack-binary) => <<"nil">>
+ ?_assertEqual({ok, <<"nil">>},
+ msgpack:unpack(msgpack:pack(nil,
+ [{format,jsx},{allow_atom,pack}]), [{format,jiffy}]))].
+
+string_test() ->
+ {ok, CWD} = file:get_cwd(),
+ Path = CWD ++ "/../test/utf8.txt",
+ {ok, UnicodeBin} = file:read_file(Path),
+ String = unicode:characters_to_list(UnicodeBin),
+ MsgpackStringBin = msgpack:pack(String),
+ {ok, String} = msgpack:unpack(MsgpackStringBin).
+
+default_test_() ->
+ [
+ {"pack",
+ fun() ->
+ Map = {[{1,2}]},
+ ?assertEqual(pack(Map, [{format, ?DEFAULT_MAP_FORMAT}]), pack(Map))
+ end},
+ {"unpack",
+ fun() ->
+ Map = {[{1,2}]},
+ Binary = pack(Map, [{format, ?DEFAULT_MAP_FORMAT}]),
+ ?assertEqual(unpack(Binary, [{format, ?DEFAULT_MAP_FORMAT}]), unpack(Binary))
+ end}
+ ].
+
+unpack_test_() ->
+ [
+ {"not binary",
+ ?_assertEqual({error, {badarg, []}}, unpack([]))},
+
+ {"incomplete: null binary",
+ ?_assertEqual({error, incomplete}, unpack(<<>>))},
+
+ {"incomplete: unknown binary",
+ ?_assertEqual({error, incomplete}, unpack(<<16#DA>>))}
+ ].
+
+array_test_()->
+ [
+ {"length 16",
+ fun() ->
+ List = lists:seq(0, 16),
+ Binary = pack(List),
+ ?assertEqual({ok, List}, unpack(Binary))
+ end},
+ {"length 32",
+ fun() ->
+ List = lists:seq(0, 16#010000),
+ Binary = pack(List),
+ ?assertEqual({ok, List}, unpack(Binary))
+ end},
+ {"empty",
+ fun() ->
+ EmptyList = [],
+ Binary = pack(EmptyList),
+ ?assertEqual({ok, EmptyList}, unpack(Binary))
+ end}
+ ].
+
+-ifndef(without_map).
+map_test_()->
+ [
+ {"maps <=> jsx",
+ fun() ->
+ JSXMap = [ {X, X * 2} || X <- lists:seq(0, 16) ],
+ BinaryJSX = pack(JSXMap, [{format,jsx}]),
+ Map = maps:from_list(JSXMap),
+ Binary = pack(Map, [{format,map}]),
+ ?assertEqual(BinaryJSX, Binary)
+ end},
+
+ {"pack map without {format,map}",
+ fun() ->
+ Map = maps:from_list([ {X, X * 2} || X <- lists:seq(0, 16) ]),
+ Binary = pack(Map),
+ ?assertEqual({ok,Map}, unpack(Binary, [{format,map}]))
+ end},
+
+ {"map length 16",
+ fun() ->
+ Map = maps:from_list([ {X, X * 2} || X <- lists:seq(0, 16) ]),
+ Binary = pack(Map, [{format,map}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,map}]))
+ end},
+ {"map length 32",
+ fun() ->
+ Map = maps:from_list([ {X, X * 2} || X <- lists:seq(0, 16#010000) ]),
+ Binary = pack(Map, [{format,map}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,map}]))
+ end},
+ {"map empty",
+ fun() ->
+ EmptyMap = maps:new(),
+ Binary = pack(EmptyMap, [{format,map}]),
+ ?assertEqual({ok, EmptyMap}, unpack(Binary, [{format,map}]))
+ end}].
+-endif.
+
+
+jiffy_jsx_test_() ->
+ [{"jiffy length 16",
+ fun() ->
+ Map = {[ {X, X * 2} || X <- lists:seq(0, 16) ]},
+ Binary = pack(Map, [{format,jiffy}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,jiffy}]))
+ end},
+ {"jiffy length 32",
+ fun() ->
+ Map = {[ {X, X * 2} || X <- lists:seq(0, 16#010000) ]},
+ Binary = pack(Map, [{format,jiffy}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,jiffy}]))
+ end},
+ {"jiffy empty",
+ fun() ->
+ EmptyMap = {[]},
+ Binary = pack(EmptyMap, [{format,jiffy}]),
+ ?assertEqual({ok, EmptyMap}, unpack(Binary, [{format,jiffy}]))
+ end},
+ {"jsx length 16",
+ fun() ->
+ Map = [ {X, X * 2} || X <- lists:seq(0, 16) ],
+ Binary = pack(Map, [{format,jsx}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,jsx}]))
+ end},
+ {"jsx length 32",
+ fun() ->
+ Map = [ {X, X * 2} || X <- lists:seq(0, 16#010000) ],
+ Binary = pack(Map, [{format,jsx}]),
+ ?assertEqual({ok, Map}, unpack(Binary, [{format,jsx}]))
+ end},
+ {"jsx empty",
+ fun() ->
+ EmptyMap = [{}],
+ Binary = pack(EmptyMap, [{format,jsx}]),
+ ?assertEqual({ok, EmptyMap}, unpack(Binary, [{format,jsx}]))
+ end}
+ ].
+
+int_test_() ->
+ [
+ {"negative fixnum",
+ fun() ->
+ Term = -32,
+ Binary = pack(Term),
+ ?assertEqual(1, byte_size(Binary)),
+ ?assertEqual({ok, Term}, unpack(Binary))
+ end},
+ {"int 8",
+ fun() ->
+ Term = -33,
+ Binary = pack(Term),
+ ?assertEqual(2, byte_size(Binary)),
+ ?assertEqual({ok, Term}, unpack(Binary)),
+
+ Term2 = -128,
+ Binary2 = pack(Term2),
+ ?assertEqual(2, byte_size(Binary2)),
+ ?assertEqual({ok, Term2}, unpack(Binary2))
+ end},
+ {"int 16",
+ fun() ->
+ Term = -129,
+ Binary = pack(Term),
+ ?assertEqual(3, byte_size(Binary)),
+ ?assertEqual({ok, Term}, unpack(Binary)),
+
+ Term2 = -16#8000,
+ Binary2 = pack(Term2),
+ ?assertEqual(3, byte_size(Binary2)),
+ ?assertEqual({ok, Term2}, unpack(Binary2))
+ end},
+ {"int 32",
+ fun() ->
+ Term = -16#8001,
+ Binary = pack(Term),
+ ?assertEqual(5, byte_size(Binary)),
+ ?assertEqual({ok, Term}, unpack(Binary)),
+
+ Term2 = -16#80000000,
+ Binary2 = pack(Term2),
+ ?assertEqual(5, byte_size(Binary2)),
+ ?assertEqual({ok, Term2}, unpack(Binary2))
+ end},
+ {"int 64",
+ fun() ->
+ Term = -16#80000001,
+ Binary = pack(Term),
+ ?assertEqual(9, byte_size(Binary)),
+ ?assertEqual({ok, Term}, unpack(Binary)),
+
+ Term2 = -16#8000000000000000,
+ Binary2 = pack(Term2),
+ ?assertEqual(9, byte_size(Binary2)),
+ ?assertEqual({ok, Term2}, unpack(Binary2))
+ end}
+ ].
+
+error_test_()->
+ [
+ {"badarg atom",
+ ?_assertEqual({error, {badarg, atom}},
+ pack(atom))},
+ {"badarg tuple",
+ fun() ->
+ Term = {"hoge", "hage", atom},
+ ?assertEqual({error, {badarg, Term}},
+ pack(Term))
+ end},
+ {"badarg too big int",
+ ?_assertEqual({error, {badarg, -16#8000000000000001}},
+ pack(-16#8000000000000001))},
+ {"badarg too big uint",
+ ?_assertEqual({error, {badarg, 16#10000000000000000}},
+ pack(16#10000000000000000))}
+ ].
+
+binary_test_() ->
+ [
+ {"0 byte",
+ fun() ->
+ Binary = pack(<<>>),
+ ?assertEqual({ok, <<>>}, unpack(Binary))
+ end}
+ ].
+
+%% long_binary_test_()->
+%% [
+%% {"long binary",
+%% fun() ->
+%% A = pack(1),
+%% B = pack(10),
+%% C = pack(100),
+%% ?assertEqual({[1,10,100], <<>>},
+%% unpack(list_to_binary([A, B, C])))
+%% end}
+%% ].
+
+%% benchmark_test()->
+%% Data = [test_data() || _ <- lists:seq(0, 10000)],
+%% {ok, S} = ?debugTime(" serialize", pack(Data)),
+%% {ok, Data} = ?debugTime("deserialize", unpack(S)),
+%% ?debugFmt("for ~p KB test data.", [byte_size(S) div 1024]),
+%% ok.
diff --git a/deps/msgpack/test/utf8.txt b/deps/msgpack/test/utf8.txt
new file mode 100644
index 0000000..c5a5dd4
--- /dev/null
+++ b/deps/msgpack/test/utf8.txt
@@ -0,0 +1 @@
+東京埼玉
diff --git a/deps/setup/.gitignore b/deps/setup/.gitignore
new file mode 100644
index 0000000..dea5591
--- /dev/null
+++ b/deps/setup/.gitignore
@@ -0,0 +1,10 @@
+deps
+*.beam
+ebin/*.app
+setup_gen
+xtest/releases
+xtest/testapp-*/ebin
+setup.plt
+.rebar/
+xtest/*/.rebar/
+erl_crash.dump