summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorInada Naoki <methane@users.noreply.github.com>2019-01-24 18:46:39 +0900
committerGitHub <noreply@github.com>2019-01-24 18:46:39 +0900
commit28b5f46a34933cc177aca333203d1344b5e3639a (patch)
tree72202599bf4a6610675cf134757124eea6261c93
parentf46523b1af7ff2d408da8500ea36a4f9f2abe915 (diff)
downloadmsgpack-python-28b5f46a34933cc177aca333203d1344b5e3639a.tar.gz
Auto limit configuration (#342)
-rw-r--r--msgpack/_unpacker.pyx56
-rw-r--r--msgpack/fallback.py39
-rw-r--r--test/test_limits.py25
3 files changed, 92 insertions, 28 deletions
diff --git a/msgpack/_unpacker.pyx b/msgpack/_unpacker.pyx
index 4ea0545..38119c0 100644
--- a/msgpack/_unpacker.pyx
+++ b/msgpack/_unpacker.pyx
@@ -145,11 +145,11 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
bint use_list=True, bint raw=True, bint strict_map_key=False,
encoding=None, unicode_errors=None,
object_pairs_hook=None, ext_hook=ExtType,
- Py_ssize_t max_str_len=1024*1024,
- Py_ssize_t max_bin_len=1024*1024,
- Py_ssize_t max_array_len=128*1024,
- Py_ssize_t max_map_len=32*1024,
- Py_ssize_t max_ext_len=1024*1024):
+ Py_ssize_t max_str_len=-1,
+ Py_ssize_t max_bin_len=-1,
+ Py_ssize_t max_array_len=-1,
+ Py_ssize_t max_map_len=-1,
+ Py_ssize_t max_ext_len=-1):
"""
Unpack packed_bytes to object. Returns an unpacked object.
@@ -160,6 +160,8 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
Other exceptions can be raised during unpacking.
See :class:`Unpacker` for options.
+
+ *max_xxx_len* options are configured automatically from ``len(packed)``.
"""
cdef unpack_context ctx
cdef Py_ssize_t off = 0
@@ -180,6 +182,18 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
cerr = unicode_errors
get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol)
+
+ if max_str_len == -1:
+ max_str_len = buf_len
+ if max_bin_len == -1:
+ max_bin_len = buf_len
+ if max_array_len == -1:
+ max_array_len = buf_len
+ if max_map_len == -1:
+ max_map_len = buf_len//2
+ if max_ext_len == -1:
+ max_ext_len = buf_len
+
try:
init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook,
use_list, raw, strict_map_key, cenc, cerr,
@@ -259,19 +273,19 @@ cdef class Unpacker(object):
You should set this parameter when unpacking data from untrusted source.
:param int max_str_len:
- Limits max length of str. (default: 1024*1024)
+ Limits max length of str. (default: max_buffer_size or 1024*1024)
:param int max_bin_len:
- Limits max length of bin. (default: 1024*1024)
+ Limits max length of bin. (default: max_buffer_size or 1024*1024)
:param int max_array_len:
- Limits max length of array. (default: 128*1024)
+ Limits max length of array. (default: max_buffer_size or 128*1024)
:param int max_map_len:
- Limits max length of map. (default: 32*1024)
+ Limits max length of map. (default: max_buffer_size//2 or 32*1024)
:param int max_ext_len:
- Limits max size of ext type. (default: 1024*1024)
+ Limits max size of ext type. (default: max_buffer_size or 1024*1024)
:param str encoding:
Deprecated, use raw instead.
@@ -329,11 +343,11 @@ cdef class Unpacker(object):
object object_hook=None, object object_pairs_hook=None, object list_hook=None,
encoding=None, unicode_errors=None, Py_ssize_t max_buffer_size=0,
object ext_hook=ExtType,
- Py_ssize_t max_str_len=1024*1024,
- Py_ssize_t max_bin_len=1024*1024,
- Py_ssize_t max_array_len=128*1024,
- Py_ssize_t max_map_len=32*1024,
- Py_ssize_t max_ext_len=1024*1024):
+ Py_ssize_t max_str_len=-1,
+ Py_ssize_t max_bin_len=-1,
+ Py_ssize_t max_array_len=-1,
+ Py_ssize_t max_map_len=-1,
+ Py_ssize_t max_ext_len=-1):
cdef const char *cenc=NULL,
cdef const char *cerr=NULL
@@ -347,6 +361,18 @@ cdef class Unpacker(object):
self.file_like_read = file_like.read
if not PyCallable_Check(self.file_like_read):
raise TypeError("`file_like.read` must be a callable.")
+
+ if max_str_len == -1:
+ max_str_len = max_buffer_size or 1024*1024
+ if max_bin_len == -1:
+ max_bin_len = max_buffer_size or 1024*1024
+ if max_array_len == -1:
+ max_array_len = max_buffer_size or 128*1024
+ if max_map_len == -1:
+ max_map_len = max_buffer_size//2 or 32*1024
+ if max_ext_len == -1:
+ max_ext_len = max_buffer_size or 1024*1024
+
if not max_buffer_size:
max_buffer_size = INT_MAX
if read_size > max_buffer_size:
diff --git a/msgpack/fallback.py b/msgpack/fallback.py
index 4567e2d..7524448 100644
--- a/msgpack/fallback.py
+++ b/msgpack/fallback.py
@@ -130,7 +130,7 @@ def unpackb(packed, **kwargs):
See :class:`Unpacker` for options.
"""
- unpacker = Unpacker(None, **kwargs)
+ unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
unpacker.feed(packed)
try:
ret = unpacker._unpack()
@@ -208,19 +208,24 @@ class Unpacker(object):
You should set this parameter when unpacking data from untrusted source.
:param int max_str_len:
- Limits max length of str. (default: 1024*1024)
+ (deprecated) Limits max length of str.
+ (default: max_buffer_size or 1024*1024)
:param int max_bin_len:
- Limits max length of bin. (default: 1024*1024)
+ (deprecated) Limits max length of bin.
+ (default: max_buffer_size or 1024*1024)
:param int max_array_len:
- Limits max length of array. (default: 128*1024)
+ Limits max length of array.
+ (default: max_buffer_size or 128*1024)
:param int max_map_len:
- Limits max length of map. (default: 32*1024)
+ Limits max length of map.
+ (default: max_buffer_size//2 or 32*1024)
:param int max_ext_len:
- Limits max size of ext type. (default: 1024*1024)
+ (deprecated) Limits max size of ext type.
+ (default: max_buffer_size or 1024*1024)
example of streaming deserialize from file-like object::
@@ -250,12 +255,11 @@ class Unpacker(object):
object_hook=None, object_pairs_hook=None, list_hook=None,
encoding=None, unicode_errors=None, max_buffer_size=0,
ext_hook=ExtType,
- max_str_len=1024*1024,
- max_bin_len=1024*1024,
- max_array_len=128*1024,
- max_map_len=32*1024,
- max_ext_len=1024*1024):
-
+ max_str_len=-1,
+ max_bin_len=-1,
+ max_array_len=-1,
+ max_map_len=-1,
+ max_ext_len=-1):
if encoding is not None:
warnings.warn(
"encoding is deprecated, Use raw=False instead.",
@@ -286,6 +290,17 @@ class Unpacker(object):
# state, which _buf_checkpoint records.
self._buf_checkpoint = 0
+ if max_str_len == -1:
+ max_str_len = max_buffer_size or 1024*1024
+ if max_bin_len == -1:
+ max_bin_len = max_buffer_size or 1024*1024
+ if max_array_len == -1:
+ max_array_len = max_buffer_size or 128*1024
+ if max_map_len == -1:
+ max_map_len = max_buffer_size//2 or 32*1024
+ if max_ext_len == -1:
+ max_ext_len = max_buffer_size or 1024*1024
+
self._max_buffer_size = max_buffer_size or 2**31-1
if read_size > self._max_buffer_size:
raise ValueError("read_size must be smaller than max_buffer_size")
diff --git a/test/test_limits.py b/test/test_limits.py
index 74e48c1..8c7606f 100644
--- a/test/test_limits.py
+++ b/test/test_limits.py
@@ -105,7 +105,6 @@ def test_max_ext_len():
unpacker.unpack()
-
# PyPy fails following tests because of constant folding?
# https://bugs.pypy.org/issue1721
#@pytest.mark.skipif(True, reason="Requires very large memory.")
@@ -134,3 +133,27 @@ def test_max_ext_len():
# x.append(0)
# with pytest.raises(ValueError):
# packb(x)
+
+
+# auto max len
+
+def test_auto_max_array_len():
+ packed = b'\xde\x00\x06zz'
+ with pytest.raises(UnpackValueError):
+ unpackb(packed, raw=False)
+
+ unpacker = Unpacker(max_buffer_size=5, raw=False)
+ unpacker.feed(packed)
+ with pytest.raises(UnpackValueError):
+ unpacker.unpack()
+
+def test_auto_max_map_len():
+ # len(packed) == 6 -> max_map_len == 3
+ packed = b'\xde\x00\x04zzz'
+ with pytest.raises(UnpackValueError):
+ unpackb(packed, raw=False)
+
+ unpacker = Unpacker(max_buffer_size=6, raw=False)
+ unpacker.feed(packed)
+ with pytest.raises(UnpackValueError):
+ unpacker.unpack()