summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorINADA Naoki <methane@users.noreply.github.com>2015-01-25 03:34:25 +0900
committerINADA Naoki <methane@users.noreply.github.com>2015-01-25 03:34:25 +0900
commitec5dff113eaf0b60c546ee997047f87fb5d7e5fc (patch)
tree8069223d23b98bfac8af3563e3204e8306fec1b0
parentc43fb48724049dc35c34fd389091e384dec46bb8 (diff)
parent2985f4d8651982b07e2cfa7037e7a8c3530a127b (diff)
downloadmsgpack-python-ec5dff113eaf0b60c546ee997047f87fb5d7e5fc.tar.gz
Merge pull request #105 from msgpack/max-xxx-size
Add max_<type>_len option to unpacker. (fixes #97).
-rw-r--r--.travis.yml7
-rw-r--r--msgpack/_unpacker.pyx70
-rw-r--r--msgpack/fallback.py127
-rw-r--r--msgpack/unpack.h33
-rw-r--r--test/test_limits.py73
-rw-r--r--tox.ini29
6 files changed, 286 insertions, 53 deletions
diff --git a/.travis.yml b/.travis.yml
index b9d19c1..dad7e87 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,6 +7,13 @@ language: python
python:
- 2.7
+env:
+ - TOXENV=py26-c,py27-c
+ - TOXENV=py32-c,py33-c,py34-c
+ - TOXENV=py26-pure,py27-pure
+ - TOXENV=py32-pure,py33-pure,py34-pure
+ - TOXENV=pypy-pure,pypy3-pure
+
install:
- pip install wheel tox
- ls -la wheelhouse
diff --git a/msgpack/_unpacker.pyx b/msgpack/_unpacker.pyx
index 16de40f..f5e7d95 100644
--- a/msgpack/_unpacker.pyx
+++ b/msgpack/_unpacker.pyx
@@ -28,6 +28,11 @@ cdef extern from "unpack.h":
PyObject* ext_hook
char *encoding
char *unicode_errors
+ Py_ssize_t max_str_len
+ Py_ssize_t max_bin_len
+ Py_ssize_t max_array_len
+ Py_ssize_t max_map_len
+ Py_ssize_t max_ext_len
ctypedef struct unpack_context:
msgpack_user user
@@ -46,10 +51,18 @@ cdef extern from "unpack.h":
cdef inline init_ctx(unpack_context *ctx,
object object_hook, object object_pairs_hook,
object list_hook, object ext_hook,
- bint use_list, char* encoding, char* unicode_errors):
+ bint use_list, char* encoding, char* unicode_errors,
+ Py_ssize_t max_str_len, Py_ssize_t max_bin_len,
+ Py_ssize_t max_array_len, Py_ssize_t max_map_len,
+ Py_ssize_t max_ext_len):
unpack_init(ctx)
ctx.user.use_list = use_list
ctx.user.object_hook = ctx.user.list_hook = <PyObject*>NULL
+ ctx.user.max_str_len = max_str_len
+ ctx.user.max_bin_len = max_bin_len
+ ctx.user.max_array_len = max_array_len
+ ctx.user.max_map_len = max_map_len
+ ctx.user.max_ext_len = max_ext_len
if object_hook is not None and object_pairs_hook is not None:
raise TypeError("object_pairs_hook and object_hook are mutually exclusive.")
@@ -85,7 +98,12 @@ def default_read_extended_type(typecode, data):
def unpackb(object packed, object object_hook=None, object list_hook=None,
bint use_list=1, encoding=None, unicode_errors="strict",
- object_pairs_hook=None, ext_hook=ExtType):
+ object_pairs_hook=None, ext_hook=ExtType,
+ Py_ssize_t max_str_len=2147483647, # 2**32-1
+ Py_ssize_t max_bin_len=2147483647,
+ Py_ssize_t max_array_len=2147483647,
+ Py_ssize_t max_map_len=2147483647,
+ Py_ssize_t max_ext_len=2147483647):
"""
Unpack packed_bytes to object. Returns an unpacked object.
@@ -115,7 +133,8 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
cerr = PyBytes_AsString(unicode_errors)
init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook,
- use_list, cenc, cerr)
+ use_list, cenc, cerr,
+ max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len)
ret = unpack_construct(&ctx, buf, buf_len, &off)
if ret == 1:
obj = unpack_data(&ctx)
@@ -144,8 +163,7 @@ def unpack(object stream, object object_hook=None, object list_hook=None,
cdef class Unpacker(object):
- """
- Streaming unpacker.
+ """Streaming unpacker.
arguments:
@@ -183,6 +201,19 @@ cdef class Unpacker(object):
Raises `BufferFull` exception when it is insufficient.
You shoud set this parameter when unpacking data from untrasted source.
+ :param int max_str_len:
+ Limits max length of str. (default: 2**31-1)
+
+ :param int max_bin_len:
+ Limits max length of bin. (default: 2**31-1)
+
+ :param int max_array_len:
+ Limits max length of array. (default: 2**31-1)
+
+ :param int max_map_len:
+ Limits max length of map. (default: 2**31-1)
+
+
example of streaming deserialize from file-like object::
unpacker = Unpacker(file_like)
@@ -220,8 +251,13 @@ cdef class Unpacker(object):
def __init__(self, file_like=None, Py_ssize_t read_size=0, bint use_list=1,
object object_hook=None, object object_pairs_hook=None, object list_hook=None,
- str encoding=None, str unicode_errors='strict', int max_buffer_size=0,
- object ext_hook=ExtType):
+ encoding=None, unicode_errors='strict', int max_buffer_size=0,
+ object ext_hook=ExtType,
+ Py_ssize_t max_str_len=2147483647, # 2**32-1
+ Py_ssize_t max_bin_len=2147483647,
+ Py_ssize_t max_array_len=2147483647,
+ Py_ssize_t max_map_len=2147483647,
+ Py_ssize_t max_ext_len=2147483647):
cdef char *cenc=NULL,
cdef char *cerr=NULL
@@ -253,19 +289,25 @@ cdef class Unpacker(object):
if encoding is not None:
if isinstance(encoding, unicode):
self.encoding = encoding.encode('ascii')
- else:
+ elif isinstance(encoding, bytes):
self.encoding = encoding
+ else:
+ raise TypeError("encoding should be bytes or unicode")
cenc = PyBytes_AsString(self.encoding)
if unicode_errors is not None:
if isinstance(unicode_errors, unicode):
self.unicode_errors = unicode_errors.encode('ascii')
- else:
+ elif isinstance(unicode_errors, bytes):
self.unicode_errors = unicode_errors
+ else:
+ raise TypeError("unicode_errors should be bytes or unicode")
cerr = PyBytes_AsString(self.unicode_errors)
init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook,
- ext_hook, use_list, cenc, cerr)
+ ext_hook, use_list, cenc, cerr,
+ max_str_len, max_bin_len, max_array_len,
+ max_map_len, max_ext_len)
def feed(self, object next_bytes):
"""Append `next_bytes` to internal buffer."""
@@ -365,7 +407,7 @@ cdef class Unpacker(object):
raise ValueError("Unpack failed: error = %d" % (ret,))
def read_bytes(self, Py_ssize_t nbytes):
- """read a specified number of raw bytes from the stream"""
+ """Read a specified number of raw bytes from the stream"""
cdef size_t nread
nread = min(self.buf_tail - self.buf_head, nbytes)
ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread)
@@ -375,8 +417,7 @@ cdef class Unpacker(object):
return ret
def unpack(self, object write_bytes=None):
- """
- unpack one object
+ """Unpack one object
If write_bytes is not None, it will be called with parts of the raw
message as it is unpacked.
@@ -386,8 +427,7 @@ cdef class Unpacker(object):
return self._unpack(unpack_construct, write_bytes)
def skip(self, object write_bytes=None):
- """
- read and ignore one object, returning None
+ """Read and ignore one object, returning None
If write_bytes is not None, it will be called with parts of the raw
message as it is unpacked.
diff --git a/msgpack/fallback.py b/msgpack/fallback.py
index 71fa7be..d1f39d1 100644
--- a/msgpack/fallback.py
+++ b/msgpack/fallback.py
@@ -102,62 +102,84 @@ def unpackb(packed, **kwargs):
class Unpacker(object):
- """
- Streaming unpacker.
+ """Streaming unpacker.
+
+ arguments:
- `file_like` is a file-like object having a `.read(n)` method.
- When `Unpacker` is initialized with a `file_like`, `.feed()` is not
- usable.
+ :param file_like:
+ File-like object having `.read(n)` method.
+ If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
- `read_size` is used for `file_like.read(read_size)`.
+ :param int read_size:
+ Used as `file_like.read(read_size)`. (default: `min(1024**2, max_buffer_size)`)
- If `use_list` is True (default), msgpack lists are deserialized to Python
- lists. Otherwise they are deserialized to tuples.
+ :param bool use_list:
+ If true, unpack msgpack array to Python list.
+ Otherwise, unpack to Python tuple. (default: True)
- `object_hook` is the same as in simplejson. If it is not None, it should
- be callable and Unpacker calls it with a dict argument after deserializing
- a map.
+ :param callable object_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a dict argument after unpacking msgpack map.
+ (See also simplejson)
- `object_pairs_hook` is the same as in simplejson. If it is not None, it
- should be callable and Unpacker calls it with a list of key-value pairs
- after deserializing a map.
+ :param callable object_pairs_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
+ (See also simplejson)
- `ext_hook` is callback for ext (User defined) type. It called with two
- arguments: (code, bytes). default: `msgpack.ExtType`
+ :param str encoding:
+ Encoding used for decoding msgpack raw.
+ If it is None (default), msgpack raw is deserialized to Python bytes.
- `encoding` is the encoding used for decoding msgpack bytes. If it is
- None (default), msgpack bytes are deserialized to Python bytes.
+ :param str unicode_errors:
+ Used for decoding msgpack raw with *encoding*.
+ (default: `'strict'`)
- `unicode_errors` is used for decoding bytes.
+ :param int max_buffer_size:
+ Limits size of data waiting unpacked. 0 means system's INT_MAX (default).
+ Raises `BufferFull` exception when it is insufficient.
+ You shoud set this parameter when unpacking data from untrasted source.
- `max_buffer_size` limits the buffer size. 0 means INT_MAX (default).
+ :param int max_str_len:
+ Limits max length of str. (default: 2**31-1)
- Raises `BufferFull` exception when it is unsufficient.
+ :param int max_bin_len:
+ Limits max length of bin. (default: 2**31-1)
- You should set this parameter when unpacking data from an untrustred source.
+ :param int max_array_len:
+ Limits max length of array. (default: 2**31-1)
- example of streaming deserialization from file-like object::
+ :param int max_map_len:
+ Limits max length of map. (default: 2**31-1)
+
+
+ example of streaming deserialize from file-like object::
unpacker = Unpacker(file_like)
for o in unpacker:
- do_something(o)
+ process(o)
- example of streaming deserialization from socket::
+ example of streaming deserialize from socket::
unpacker = Unpacker()
- while 1:
- buf = sock.recv(1024*2)
+ while True:
+ buf = sock.recv(1024**2)
if not buf:
break
unpacker.feed(buf)
for o in unpacker:
- do_something(o)
+ process(o)
"""
def __init__(self, file_like=None, read_size=0, use_list=True,
object_hook=None, object_pairs_hook=None, list_hook=None,
encoding=None, unicode_errors='strict', max_buffer_size=0,
- ext_hook=ExtType):
+ ext_hook=ExtType,
+ max_str_len=2147483647, # 2**32-1
+ max_bin_len=2147483647,
+ max_array_len=2147483647,
+ max_map_len=2147483647,
+ max_ext_len=2147483647):
if file_like is None:
self._fb_feeding = True
else:
@@ -185,6 +207,11 @@ class Unpacker(object):
self._object_hook = object_hook
self._object_pairs_hook = object_pairs_hook
self._ext_hook = ext_hook
+ self._max_str_len = max_str_len
+ self._max_bin_len = max_bin_len
+ self._max_array_len = max_array_len
+ self._max_map_len = max_map_len
+ self._max_ext_len = max_ext_len
if list_hook is not None and not callable(list_hook):
raise TypeError('`list_hook` is not callable')
@@ -316,12 +343,18 @@ class Unpacker(object):
n = b & 0b00011111
obj = self._fb_read(n, write_bytes)
typ = TYPE_RAW
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
elif b & 0b11110000 == 0b10010000:
n = b & 0b00001111
typ = TYPE_ARRAY
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b & 0b11110000 == 0b10000000:
n = b & 0b00001111
typ = TYPE_MAP
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
elif b == 0xc0:
obj = None
elif b == 0xc2:
@@ -331,26 +364,38 @@ class Unpacker(object):
elif b == 0xc4:
typ = TYPE_BIN
n = struct.unpack("B", self._fb_read(1, write_bytes))[0]
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc5:
typ = TYPE_BIN
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc6:
typ = TYPE_BIN
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc7: # ext 8
typ = TYPE_EXT
L, n = struct.unpack('Bb', self._fb_read(2, write_bytes))
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xc8: # ext 16
typ = TYPE_EXT
L, n = struct.unpack('>Hb', self._fb_read(3, write_bytes))
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xc9: # ext 32
typ = TYPE_EXT
L, n = struct.unpack('>Ib', self._fb_read(5, write_bytes))
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xca:
obj = struct.unpack(">f", self._fb_read(4, write_bytes))[0]
@@ -374,42 +419,66 @@ class Unpacker(object):
obj = struct.unpack(">q", self._fb_read(8, write_bytes))[0]
elif b == 0xd4: # fixext 1
typ = TYPE_EXT
+ if self._max_ext_len < 1:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
n, obj = struct.unpack('b1s', self._fb_read(2, write_bytes))
elif b == 0xd5: # fixext 2
typ = TYPE_EXT
+ if self._max_ext_len < 2:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
n, obj = struct.unpack('b2s', self._fb_read(3, write_bytes))
elif b == 0xd6: # fixext 4
typ = TYPE_EXT
+ if self._max_ext_len < 4:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
n, obj = struct.unpack('b4s', self._fb_read(5, write_bytes))
elif b == 0xd7: # fixext 8
typ = TYPE_EXT
+ if self._max_ext_len < 8:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
n, obj = struct.unpack('b8s', self._fb_read(9, write_bytes))
elif b == 0xd8: # fixext 16
typ = TYPE_EXT
+ if self._max_ext_len < 16:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
n, obj = struct.unpack('b16s', self._fb_read(17, write_bytes))
elif b == 0xd9:
typ = TYPE_RAW
n = struct.unpack("B", self._fb_read(1, write_bytes))[0]
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xda:
typ = TYPE_RAW
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xdb:
typ = TYPE_RAW
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xdc:
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
typ = TYPE_ARRAY
elif b == 0xdd:
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
typ = TYPE_ARRAY
elif b == 0xde:
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
elif b == 0xdf:
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
else:
raise UnpackValueError("Unknown header: 0x%x" % b)
diff --git a/msgpack/unpack.h b/msgpack/unpack.h
index 24045d5..5deb7cd 100644
--- a/msgpack/unpack.h
+++ b/msgpack/unpack.h
@@ -27,6 +27,7 @@ typedef struct unpack_user {
PyObject *ext_hook;
const char *encoding;
const char *unicode_errors;
+ Py_ssize_t max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len;
} unpack_user;
typedef PyObject* msgpack_unpack_object;
@@ -68,7 +69,7 @@ static inline int unpack_callback_uint64(unpack_user* u, uint64_t d, msgpack_unp
if (d > LONG_MAX) {
p = PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)d);
} else {
- p = PyInt_FromLong((long)d);
+ p = PyInt_FromSize_t((size_t)d);
}
if (!p)
return -1;
@@ -132,6 +133,10 @@ static inline int unpack_callback_false(unpack_user* u, msgpack_unpack_object* o
static inline int unpack_callback_array(unpack_user* u, unsigned int n, msgpack_unpack_object* o)
{
+ if (n > u->max_array_len) {
+ PyErr_Format(PyExc_ValueError, "%u exceeds max_array_len(%zd)", n, u->max_array_len);
+ return -1;
+ }
PyObject *p = u->use_list ? PyList_New(n) : PyTuple_New(n);
if (!p)
@@ -163,6 +168,10 @@ static inline int unpack_callback_array_end(unpack_user* u, msgpack_unpack_objec
static inline int unpack_callback_map(unpack_user* u, unsigned int n, msgpack_unpack_object* o)
{
+ if (n > u->max_map_len) {
+ PyErr_Format(PyExc_ValueError, "%u exceeds max_map_len(%zd)", n, u->max_map_len);
+ return -1;
+ }
PyObject *p;
if (u->has_pairs_hook) {
p = PyList_New(n); // Or use tuple?
@@ -210,6 +219,11 @@ static inline int unpack_callback_map_end(unpack_user* u, msgpack_unpack_object*
static inline int unpack_callback_raw(unpack_user* u, const char* b, const char* p, unsigned int l, msgpack_unpack_object* o)
{
+ if (l > u->max_str_len) {
+ PyErr_Format(PyExc_ValueError, "%u exceeds max_str_len(%zd)", l, u->max_str_len);
+ return -1;
+ }
+
PyObject *py;
if(u->encoding) {
py = PyUnicode_Decode(p, l, u->encoding, u->unicode_errors);
@@ -224,6 +238,11 @@ static inline int unpack_callback_raw(unpack_user* u, const char* b, const char*
static inline int unpack_callback_bin(unpack_user* u, const char* b, const char* p, unsigned int l, msgpack_unpack_object* o)
{
+ if (l > u->max_bin_len) {
+ PyErr_Format(PyExc_ValueError, "%u exceeds max_bin_len(%zd)", l, u->max_bin_len);
+ return -1;
+ }
+
PyObject *py = PyBytes_FromStringAndSize(p, l);
if (!py)
return -1;
@@ -232,7 +251,7 @@ static inline int unpack_callback_bin(unpack_user* u, const char* b, const char*
}
static inline int unpack_callback_ext(unpack_user* u, const char* base, const char* pos,
- unsigned int lenght, msgpack_unpack_object* o)
+ unsigned int length, msgpack_unpack_object* o)
{
PyObject *py;
int8_t typecode = (int8_t)*pos++;
@@ -240,11 +259,15 @@ static inline int unpack_callback_ext(unpack_user* u, const char* base, const ch
PyErr_SetString(PyExc_AssertionError, "u->ext_hook cannot be NULL");
return -1;
}
- // length also includes the typecode, so the actual data is lenght-1
+ if (length-1 > u->max_ext_len) {
+ PyErr_Format(PyExc_ValueError, "%u exceeds max_ext_len(%zd)", length, u->max_ext_len);
+ return -1;
+ }
+ // length also includes the typecode, so the actual data is length-1
#if PY_MAJOR_VERSION == 2
- py = PyObject_CallFunction(u->ext_hook, "(is#)", typecode, pos, lenght-1);
+ py = PyObject_CallFunction(u->ext_hook, "(is#)", typecode, pos, length-1);
#else
- py = PyObject_CallFunction(u->ext_hook, "(iy#)", typecode, pos, lenght-1);
+ py = PyObject_CallFunction(u->ext_hook, "(iy#)", typecode, pos, length-1);
#endif
if (!py)
return -1;
diff --git a/test/test_limits.py b/test/test_limits.py
index 1cfa2d6..3c1cf2a 100644
--- a/test/test_limits.py
+++ b/test/test_limits.py
@@ -3,7 +3,7 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest
-from msgpack import packb, unpackb, Packer
+from msgpack import packb, unpackb, Packer, Unpacker, ExtType
def test_integer():
@@ -32,6 +32,77 @@ def test_map_header():
packer.pack_array_header(2**32)
+def test_max_str_len():
+ d = 'x' * 3
+ packed = packb(d)
+
+ unpacker = Unpacker(max_str_len=3, encoding='utf-8')
+ unpacker.feed(packed)
+ assert unpacker.unpack() == d
+
+ unpacker = Unpacker(max_str_len=2, encoding='utf-8')
+ with pytest.raises(ValueError):
+ unpacker.feed(packed)
+ unpacker.unpack()
+
+
+def test_max_bin_len():
+ d = b'x' * 3
+ packed = packb(d, use_bin_type=True)
+
+ unpacker = Unpacker(max_bin_len=3)
+ unpacker.feed(packed)
+ assert unpacker.unpack() == d
+
+ unpacker = Unpacker(max_bin_len=2)
+ with pytest.raises(ValueError):
+ unpacker.feed(packed)
+ unpacker.unpack()
+
+
+def test_max_array_len():
+ d = [1,2,3]
+ packed = packb(d)
+
+ unpacker = Unpacker(max_array_len=3)
+ unpacker.feed(packed)
+ assert unpacker.unpack() == d
+
+ unpacker = Unpacker(max_array_len=2)
+ with pytest.raises(ValueError):
+ unpacker.feed(packed)
+ unpacker.unpack()
+
+
+def test_max_map_len():
+ d = {1: 2, 3: 4, 5: 6}
+ packed = packb(d)
+
+ unpacker = Unpacker(max_map_len=3)
+ unpacker.feed(packed)
+ assert unpacker.unpack() == d
+
+ unpacker = Unpacker(max_map_len=2)
+ with pytest.raises(ValueError):
+ unpacker.feed(packed)
+ unpacker.unpack()
+
+
+def test_max_ext_len():
+ d = ExtType(42, b"abc")
+ packed = packb(d)
+
+ unpacker = Unpacker(max_ext_len=3)
+ unpacker.feed(packed)
+ assert unpacker.unpack() == d
+
+ unpacker = Unpacker(max_ext_len=2)
+ with pytest.raises(ValueError):
+ unpacker.feed(packed)
+ unpacker.unpack()
+
+
+
# PyPy fails following tests because of constant folding?
# https://bugs.pypy.org/issue1721
#@pytest.mark.skipif(True, reason="Requires very large memory.")
diff --git a/tox.ini b/tox.ini
index 7971dc7..15feb51 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = {py26,py27,py32,py33,py34}-{c,pure},{pypy,pypy3}-pure
+envlist = {py26,py27,py32,py33,py34}-{c,pure},{pypy,pypy3}-pure,py27-x86,py34-x86
[variants:pure]
setenv=
@@ -11,6 +11,29 @@ deps=
changedir=test
commands=
- c: python -c 'from msgpack import _packer, _unpacker'
- c: py.test
+ c,x86: python -c 'from msgpack import _packer, _unpacker'
+ c,x86: py.test
pure: py.test
+
+[testenv:py27-x86]
+basepython=python2.7-x86
+deps=
+ pytest
+
+changedir=test
+commands=
+ python -c 'import sys; print(hex(sys.maxsize))'
+ python -c 'from msgpack import _packer, _unpacker'
+ py.test
+
+[testenv:py34-x86]
+basepython=python3.4-x86
+deps=
+ pytest
+
+changedir=test
+commands=
+ python -c 'import sys; print(hex(sys.maxsize))'
+ python -c 'from msgpack import _packer, _unpacker'
+ py.test
+