summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--msgpack/_packer.pyx27
-rw-r--r--msgpack/_unpacker.pyx42
-rw-r--r--msgpack/exceptions.py35
-rw-r--r--msgpack/fallback.py77
4 files changed, 85 insertions, 96 deletions
diff --git a/msgpack/_packer.pyx b/msgpack/_packer.pyx
index 6a6d917..2643f85 100644
--- a/msgpack/_packer.pyx
+++ b/msgpack/_packer.pyx
@@ -5,7 +5,6 @@ from cpython cimport *
from cpython.version cimport PY_MAJOR_VERSION
from cpython.exc cimport PyErr_WarnEx
-from msgpack.exceptions import PackValueError, PackOverflowError
from msgpack import ExtType
@@ -165,7 +164,7 @@ cdef class Packer(object):
cdef Py_buffer view
if nest_limit < 0:
- raise PackValueError("recursion limit exceeded.")
+ raise ValueError("recursion limit exceeded.")
while True:
if o is None:
@@ -191,7 +190,7 @@ cdef class Packer(object):
default_used = True
continue
else:
- raise PackOverflowError("Integer value out of range")
+ raise OverflowError("Integer value out of range")
elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o):
longval = o
ret = msgpack_pack_long(&self.pk, longval)
@@ -205,7 +204,7 @@ cdef class Packer(object):
elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o):
L = len(o)
if L > ITEM_LIMIT:
- raise PackValueError("%s is too large" % type(o).__name__)
+ raise ValueError("%s is too large" % type(o).__name__)
rawval = o
ret = msgpack_pack_bin(&self.pk, L)
if ret == 0:
@@ -214,12 +213,12 @@ cdef class Packer(object):
if self.encoding == NULL and self.unicode_errors == NULL:
ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT);
if ret == -2:
- raise PackValueError("unicode string is too large")
+ raise ValueError("unicode string is too large")
else:
o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors)
L = len(o)
if L > ITEM_LIMIT:
- raise PackValueError("unicode string is too large")
+ raise ValueError("unicode string is too large")
ret = msgpack_pack_raw(&self.pk, L)
if ret == 0:
rawval = o
@@ -228,7 +227,7 @@ cdef class Packer(object):
d = <dict>o
L = len(d)
if L > ITEM_LIMIT:
- raise PackValueError("dict is too large")
+ raise ValueError("dict is too large")
ret = msgpack_pack_map(&self.pk, L)
if ret == 0:
for k, v in d.iteritems():
@@ -239,7 +238,7 @@ cdef class Packer(object):
elif not strict_types and PyDict_Check(o):
L = len(o)
if L > ITEM_LIMIT:
- raise PackValueError("dict is too large")
+ raise ValueError("dict is too large")
ret = msgpack_pack_map(&self.pk, L)
if ret == 0:
for k, v in o.items():
@@ -253,13 +252,13 @@ cdef class Packer(object):
rawval = o.data
L = len(o.data)
if L > ITEM_LIMIT:
- raise PackValueError("EXT data is too large")
+ raise ValueError("EXT data is too large")
ret = msgpack_pack_ext(&self.pk, longval, L)
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)):
L = len(o)
if L > ITEM_LIMIT:
- raise PackValueError("list is too large")
+ raise ValueError("list is too large")
ret = msgpack_pack_array(&self.pk, L)
if ret == 0:
for v in o:
@@ -267,11 +266,11 @@ cdef class Packer(object):
if ret != 0: break
elif PyMemoryView_Check(o):
if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0:
- raise PackValueError("could not get buffer for memoryview")
+ raise ValueError("could not get buffer for memoryview")
L = view.len
if L > ITEM_LIMIT:
PyBuffer_Release(&view);
- raise PackValueError("memoryview is too large")
+ raise ValueError("memoryview is too large")
ret = msgpack_pack_bin(&self.pk, L)
if ret == 0:
ret = msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
@@ -304,7 +303,7 @@ cdef class Packer(object):
def pack_array_header(self, long long size):
if size > ITEM_LIMIT:
- raise PackValueError
+ raise ValueError
cdef int ret = msgpack_pack_array(&self.pk, size)
if ret == -1:
raise MemoryError
@@ -317,7 +316,7 @@ cdef class Packer(object):
def pack_map_header(self, long long size):
if size > ITEM_LIMIT:
- raise PackValueError
+ raise ValueError
cdef int ret = msgpack_pack_map(&self.pk, size)
if ret == -1:
raise MemoryError
diff --git a/msgpack/_unpacker.pyx b/msgpack/_unpacker.pyx
index 85c404a..2f99019 100644
--- a/msgpack/_unpacker.pyx
+++ b/msgpack/_unpacker.pyx
@@ -35,7 +35,6 @@ ctypedef unsigned long long uint64_t
from msgpack.exceptions import (
BufferFull,
OutOfData,
- UnpackValueError,
ExtraData,
)
from msgpack import ExtType
@@ -208,7 +207,7 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off))
return obj
unpack_clear(&ctx)
- raise UnpackValueError("Unpack failed: error = %d" % (ret,))
+ raise ValueError("Unpack failed: error = %d" % (ret,))
def unpack(object stream, **kwargs):
@@ -460,28 +459,25 @@ cdef class Unpacker(object):
else:
raise OutOfData("No more data to unpack.")
- try:
- ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
- self.stream_offset += self.buf_head - prev_head
- if write_bytes is not None:
- write_bytes(PyBytes_FromStringAndSize(self.buf + prev_head, self.buf_head - prev_head))
-
- if ret == 1:
- obj = unpack_data(&self.ctx)
- unpack_init(&self.ctx)
- return obj
- elif ret == 0:
- if self.file_like is not None:
- self.read_from_file()
- continue
- if iter:
- raise StopIteration("No more data to unpack.")
- else:
- raise OutOfData("No more data to unpack.")
+ ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
+ self.stream_offset += self.buf_head - prev_head
+ if write_bytes is not None:
+ write_bytes(PyBytes_FromStringAndSize(self.buf + prev_head, self.buf_head - prev_head))
+
+ if ret == 1:
+ obj = unpack_data(&self.ctx)
+ unpack_init(&self.ctx)
+ return obj
+ elif ret == 0:
+ if self.file_like is not None:
+ self.read_from_file()
+ continue
+ if iter:
+ raise StopIteration("No more data to unpack.")
else:
- raise UnpackValueError("Unpack failed: error = %d" % (ret,))
- except ValueError as e:
- raise UnpackValueError(e)
+ raise OutOfData("No more data to unpack.")
+ else:
+ raise ValueError("Unpack failed: error = %d" % (ret,))
def read_bytes(self, Py_ssize_t nbytes):
"""Read a specified number of raw bytes from the stream"""
diff --git a/msgpack/exceptions.py b/msgpack/exceptions.py
index 9766881..5bee5b2 100644
--- a/msgpack/exceptions.py
+++ b/msgpack/exceptions.py
@@ -1,6 +1,10 @@
class UnpackException(Exception):
- """Deprecated. Use Exception instead to catch all exception during unpacking."""
+ """Base class for some exceptions raised while unpacking.
+ NOTE: unpack may raise exception other than subclass of
+ UnpackException. If you want to catch all error, catch
+ Exception instead.
+ """
class BufferFull(UnpackException):
pass
@@ -10,11 +14,16 @@ class OutOfData(UnpackException):
pass
-class UnpackValueError(UnpackException, ValueError):
- """Deprecated. Use ValueError instead."""
+# Deprecated. Use ValueError instead
+UnpackValueError = ValueError
class ExtraData(UnpackValueError):
+ """ExtraData is raised when there is trailing data.
+
+ This exception is raised while only one-shot (not streaming)
+ unpack.
+ """
def __init__(self, unpacked, extra):
self.unpacked = unpacked
self.extra = extra
@@ -23,19 +32,7 @@ class ExtraData(UnpackValueError):
return "unpack(b) received extra data."
-class PackException(Exception):
- """Deprecated. Use Exception instead to catch all exception during packing."""
-
-
-class PackValueError(PackException, ValueError):
- """PackValueError is raised when type of input data is supported but it's value is unsupported.
-
- Deprecated. Use ValueError instead.
- """
-
-
-class PackOverflowError(PackValueError, OverflowError):
- """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32).
-
- Deprecated. Use ValueError instead.
- """
+#Deprecated. Use Exception instead to catch all exception during packing.
+PackException = Exception
+PackValueError = ValueError
+PackOverflowError = OverflowError
diff --git a/msgpack/fallback.py b/msgpack/fallback.py
index 197b6d2..9d46171 100644
--- a/msgpack/fallback.py
+++ b/msgpack/fallback.py
@@ -52,9 +52,6 @@ else:
from msgpack.exceptions import (
BufferFull,
OutOfData,
- UnpackValueError,
- PackValueError,
- PackOverflowError,
ExtraData)
from msgpack import ExtType
@@ -120,7 +117,7 @@ def unpackb(packed, **kwargs):
try:
ret = unpacker._unpack()
except OutOfData:
- raise UnpackValueError("Data is not enough.")
+ raise ValueError("Data is not enough.")
if unpacker._got_extradata():
raise ExtraData(ret, unpacker._get_extradata())
return ret
@@ -370,18 +367,18 @@ class Unpacker(object):
n = b & 0b00011111
typ = TYPE_RAW
if n > self._max_str_len:
- raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n)
elif b & 0b11110000 == 0b10010000:
n = b & 0b00001111
typ = TYPE_ARRAY
if n > self._max_array_len:
- raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b & 0b11110000 == 0b10000000:
n = b & 0b00001111
typ = TYPE_MAP
if n > self._max_map_len:
- raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
elif b == 0xc0:
obj = None
elif b == 0xc2:
@@ -394,7 +391,7 @@ class Unpacker(object):
n = self._buffer[self._buff_i]
self._buff_i += 1
if n > self._max_bin_len:
- raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n)
elif b == 0xc5:
typ = TYPE_BIN
@@ -402,7 +399,7 @@ class Unpacker(object):
n = _unpack_from(">H", self._buffer, self._buff_i)[0]
self._buff_i += 2
if n > self._max_bin_len:
- raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n)
elif b == 0xc6:
typ = TYPE_BIN
@@ -410,7 +407,7 @@ class Unpacker(object):
n = _unpack_from(">I", self._buffer, self._buff_i)[0]
self._buff_i += 4
if n > self._max_bin_len:
- raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n)
elif b == 0xc7: # ext 8
typ = TYPE_EXT
@@ -418,7 +415,7 @@ class Unpacker(object):
L, n = _unpack_from('Bb', self._buffer, self._buff_i)
self._buff_i += 2
if L > self._max_ext_len:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L)
elif b == 0xc8: # ext 16
typ = TYPE_EXT
@@ -426,7 +423,7 @@ class Unpacker(object):
L, n = _unpack_from('>Hb', self._buffer, self._buff_i)
self._buff_i += 3
if L > self._max_ext_len:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L)
elif b == 0xc9: # ext 32
typ = TYPE_EXT
@@ -434,7 +431,7 @@ class Unpacker(object):
L, n = _unpack_from('>Ib', self._buffer, self._buff_i)
self._buff_i += 5
if L > self._max_ext_len:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L)
elif b == 0xca:
self._reserve(4)
@@ -479,35 +476,35 @@ class Unpacker(object):
elif b == 0xd4: # fixext 1
typ = TYPE_EXT
if self._max_ext_len < 1:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
self._reserve(2)
n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
self._buff_i += 2
elif b == 0xd5: # fixext 2
typ = TYPE_EXT
if self._max_ext_len < 2:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
self._reserve(3)
n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
self._buff_i += 3
elif b == 0xd6: # fixext 4
typ = TYPE_EXT
if self._max_ext_len < 4:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
self._reserve(5)
n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
self._buff_i += 5
elif b == 0xd7: # fixext 8
typ = TYPE_EXT
if self._max_ext_len < 8:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
self._reserve(9)
n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
self._buff_i += 9
elif b == 0xd8: # fixext 16
typ = TYPE_EXT
if self._max_ext_len < 16:
- raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
+ raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
self._reserve(17)
n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
self._buff_i += 17
@@ -517,7 +514,7 @@ class Unpacker(object):
n = self._buffer[self._buff_i]
self._buff_i += 1
if n > self._max_str_len:
- raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n)
elif b == 0xda:
typ = TYPE_RAW
@@ -525,7 +522,7 @@ class Unpacker(object):
n, = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2
if n > self._max_str_len:
- raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n)
elif b == 0xdb:
typ = TYPE_RAW
@@ -533,7 +530,7 @@ class Unpacker(object):
n, = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4
if n > self._max_str_len:
- raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n)
elif b == 0xdc:
typ = TYPE_ARRAY
@@ -541,30 +538,30 @@ class Unpacker(object):
n, = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2
if n > self._max_array_len:
- raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b == 0xdd:
typ = TYPE_ARRAY
self._reserve(4)
n, = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4
if n > self._max_array_len:
- raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b == 0xde:
self._reserve(2)
n, = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2
if n > self._max_map_len:
- raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
elif b == 0xdf:
self._reserve(4)
n, = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4
if n > self._max_map_len:
- raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
else:
- raise UnpackValueError("Unknown header: 0x%x" % b)
+ raise ValueError("Unknown header: 0x%x" % b)
return typ, n, obj
def _unpack(self, execute=EX_CONSTRUCT):
@@ -572,11 +569,11 @@ class Unpacker(object):
if execute == EX_READ_ARRAY_HEADER:
if typ != TYPE_ARRAY:
- raise UnpackValueError("Expected array")
+ raise ValueError("Expected array")
return n
if execute == EX_READ_MAP_HEADER:
if typ != TYPE_MAP:
- raise UnpackValueError("Expected map")
+ raise ValueError("Expected map")
return n
# TODO should we eliminate the recursion?
if typ == TYPE_ARRAY:
@@ -754,7 +751,7 @@ class Packer(object):
list_types = (list, tuple)
while True:
if nest_limit < 0:
- raise PackValueError("recursion limit exceeded")
+ raise ValueError("recursion limit exceeded")
if obj is None:
return self._buffer.write(b"\xc0")
if check(obj, bool):
@@ -786,11 +783,11 @@ class Packer(object):
obj = self._default(obj)
default_used = True
continue
- raise PackOverflowError("Integer value out of range")
+ raise OverflowError("Integer value out of range")
if check(obj, (bytes, bytearray)):
n = len(obj)
if n >= 2**32:
- raise PackValueError("%s is too large" % type(obj).__name__)
+ raise ValueError("%s is too large" % type(obj).__name__)
self._pack_bin_header(n)
return self._buffer.write(obj)
if check(obj, Unicode):
@@ -801,13 +798,13 @@ class Packer(object):
obj = obj.encode(self._encoding, self._unicode_errors)
n = len(obj)
if n >= 2**32:
- raise PackValueError("String is too large")
+ raise ValueError("String is too large")
self._pack_raw_header(n)
return self._buffer.write(obj)
if check(obj, memoryview):
n = len(obj) * obj.itemsize
if n >= 2**32:
- raise PackValueError("Memoryview is too large")
+ raise ValueError("Memoryview is too large")
self._pack_bin_header(n)
return self._buffer.write(obj)
if check(obj, float):
@@ -874,7 +871,7 @@ class Packer(object):
def pack_array_header(self, n):
if n >= 2**32:
- raise PackValueError
+ raise ValueError
self._pack_array_header(n)
if self._autoreset:
ret = self._buffer.getvalue()
@@ -883,7 +880,7 @@ class Packer(object):
def pack_map_header(self, n):
if n >= 2**32:
- raise PackValueError
+ raise ValueError
self._pack_map_header(n)
if self._autoreset:
ret = self._buffer.getvalue()
@@ -899,7 +896,7 @@ class Packer(object):
raise TypeError("data must have bytes type")
L = len(data)
if L > 0xffffffff:
- raise PackValueError("Too large data")
+ raise ValueError("Too large data")
if L == 1:
self._buffer.write(b'\xd4')
elif L == 2:
@@ -926,7 +923,7 @@ class Packer(object):
return self._buffer.write(struct.pack(">BH", 0xdc, n))
if n <= 0xffffffff:
return self._buffer.write(struct.pack(">BI", 0xdd, n))
- raise PackValueError("Array is too large")
+ raise ValueError("Array is too large")
def _pack_map_header(self, n):
if n <= 0x0f:
@@ -935,7 +932,7 @@ class Packer(object):
return self._buffer.write(struct.pack(">BH", 0xde, n))
if n <= 0xffffffff:
return self._buffer.write(struct.pack(">BI", 0xdf, n))
- raise PackValueError("Dict is too large")
+ raise ValueError("Dict is too large")
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
self._pack_map_header(n)
@@ -953,7 +950,7 @@ class Packer(object):
elif n <= 0xffffffff:
self._buffer.write(struct.pack(">BI", 0xdb, n))
else:
- raise PackValueError('Raw is too large')
+ raise ValueError('Raw is too large')
def _pack_bin_header(self, n):
if not self._use_bin_type:
@@ -965,7 +962,7 @@ class Packer(object):
elif n <= 0xffffffff:
return self._buffer.write(struct.pack(">BI", 0xc6, n))
else:
- raise PackValueError('Bin is too large')
+ raise ValueError('Bin is too large')
def bytes(self):
"""Return internal buffer contents as bytes object"""