Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions msgpack/_packer.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ from libc.stdlib cimport *
from libc.string cimport *
from libc.limits cimport *

from msgpack.exceptions import PackValueError
from msgpack.exceptions import PackValueError, PackOverflowError
from msgpack import ExtType


Expand Down Expand Up @@ -166,7 +166,7 @@ cdef class Packer(object):
default_used = True
continue
else:
raise
raise PackOverflowError("Integer value out of range")
elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o):
longval = o
ret = msgpack_pack_long(&self.pk, longval)
Expand All @@ -180,7 +180,7 @@ cdef class Packer(object):
elif PyBytes_CheckExact(o) if strict_types else PyBytes_Check(o):
L = len(o)
if L > (2**32)-1:
raise ValueError("bytes is too large")
raise PackValueError("bytes is too large")
rawval = o
ret = msgpack_pack_bin(&self.pk, L)
if ret == 0:
Expand All @@ -191,7 +191,7 @@ cdef class Packer(object):
o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors)
L = len(o)
if L > (2**32)-1:
raise ValueError("unicode string is too large")
raise PackValueError("unicode string is too large")
rawval = o
ret = msgpack_pack_raw(&self.pk, L)
if ret == 0:
Expand All @@ -200,7 +200,7 @@ cdef class Packer(object):
d = <dict>o
L = len(d)
if L > (2**32)-1:
raise ValueError("dict is too large")
raise PackValueError("dict is too large")
ret = msgpack_pack_map(&self.pk, L)
if ret == 0:
for k, v in d.iteritems():
Expand All @@ -211,7 +211,7 @@ cdef class Packer(object):
elif not strict_types and PyDict_Check(o):
L = len(o)
if L > (2**32)-1:
raise ValueError("dict is too large")
raise PackValueError("dict is too large")
ret = msgpack_pack_map(&self.pk, L)
if ret == 0:
for k, v in o.items():
Expand All @@ -225,25 +225,25 @@ cdef class Packer(object):
rawval = o.data
L = len(o.data)
if L > (2**32)-1:
raise ValueError("EXT data is too large")
raise PackValueError("EXT data is too large")
ret = msgpack_pack_ext(&self.pk, longval, L)
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)):
L = len(o)
if L > (2**32)-1:
raise ValueError("list is too large")
raise PackValueError("list is too large")
ret = msgpack_pack_array(&self.pk, L)
if ret == 0:
for v in o:
ret = self._pack(v, nest_limit-1)
if ret != 0: break
elif PyMemoryView_Check(o):
if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0:
raise ValueError("could not get buffer for memoryview")
raise PackValueError("could not get buffer for memoryview")
L = view.len
if L > (2**32)-1:
PyBuffer_Release(&view);
raise ValueError("memoryview is too large")
raise PackValueError("memoryview is too large")
ret = msgpack_pack_bin(&self.pk, L)
if ret == 0:
ret = msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
Expand Down Expand Up @@ -274,7 +274,7 @@ cdef class Packer(object):

def pack_array_header(self, size_t size):
if size > (2**32-1):
raise ValueError
raise PackValueError
cdef int ret = msgpack_pack_array(&self.pk, size)
if ret == -1:
raise MemoryError
Expand All @@ -287,7 +287,7 @@ cdef class Packer(object):

def pack_map_header(self, size_t size):
if size > (2**32-1):
raise ValueError
raise PackValueError
cdef int ret = msgpack_pack_map(&self.pk, size)
if ret == -1:
raise MemoryError
Expand Down
47 changes: 25 additions & 22 deletions msgpack/_unpacker.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ from libc.string cimport *
from libc.limits cimport *

from msgpack.exceptions import (
BufferFull,
OutOfData,
UnpackValueError,
ExtraData,
)
BufferFull,
OutOfData,
UnpackValueError,
ExtraData,
)
from msgpack import ExtType


Expand Down Expand Up @@ -397,24 +397,27 @@ cdef class Unpacker(object):
else:
raise OutOfData("No more data to unpack.")

ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
if write_bytes is not None:
write_bytes(PyBytes_FromStringAndSize(self.buf + prev_head, self.buf_head - prev_head))

if ret == 1:
obj = unpack_data(&self.ctx)
unpack_init(&self.ctx)
return obj
elif ret == 0:
if self.file_like is not None:
self.read_from_file()
continue
if iter:
raise StopIteration("No more data to unpack.")
try:
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
if write_bytes is not None:
write_bytes(PyBytes_FromStringAndSize(self.buf + prev_head, self.buf_head - prev_head))

if ret == 1:
obj = unpack_data(&self.ctx)
unpack_init(&self.ctx)
return obj
elif ret == 0:
if self.file_like is not None:
self.read_from_file()
continue
if iter:
raise StopIteration("No more data to unpack.")
else:
raise OutOfData("No more data to unpack.")
else:
raise OutOfData("No more data to unpack.")
else:
raise ValueError("Unpack failed: error = %d" % (ret,))
raise UnpackValueError("Unpack failed: error = %d" % (ret,))
except ValueError as e:
raise UnpackValueError(e)

def read_bytes(self, Py_ssize_t nbytes):
"""Read a specified number of raw bytes from the stream"""
Expand Down
22 changes: 17 additions & 5 deletions msgpack/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
class UnpackException(Exception):
pass
"""Deprecated. Use Exception instead to catch all exception during unpacking."""


class BufferFull(UnpackException):
Expand All @@ -11,19 +11,31 @@ class OutOfData(UnpackException):


class UnpackValueError(UnpackException, ValueError):
pass
"""Deprecated. Use ValueError instead."""


class ExtraData(ValueError):
class ExtraData(UnpackValueError):
def __init__(self, unpacked, extra):
self.unpacked = unpacked
self.extra = extra

def __str__(self):
return "unpack(b) received extra data."


class PackException(Exception):
pass
"""Deprecated. Use Exception instead to catch all exception during packing."""


class PackValueError(PackException, ValueError):
pass
"""PackValueError is raised when type of input data is supported but it's value is unsupported.

Deprecated. Use ValueError instead.
"""


class PackOverflowError(PackValueError, OverflowError):
"""PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32).

Deprecated. Use ValueError instead.
"""
51 changes: 26 additions & 25 deletions msgpack/fallback.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def getvalue(self):
OutOfData,
UnpackValueError,
PackValueError,
PackOverflowError,
ExtraData)

from msgpack import ExtType
Expand Down Expand Up @@ -363,17 +364,17 @@ def _read_header(self, execute=EX_CONSTRUCT, write_bytes=None):
obj = self._fb_read(n, write_bytes)
typ = TYPE_RAW
if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
elif b & 0b11110000 == 0b10010000:
n = b & 0b00001111
typ = TYPE_ARRAY
if n > self._max_array_len:
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b & 0b11110000 == 0b10000000:
n = b & 0b00001111
typ = TYPE_MAP
if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
elif b == 0xc0:
obj = None
elif b == 0xc2:
Expand All @@ -384,37 +385,37 @@ def _read_header(self, execute=EX_CONSTRUCT, write_bytes=None):
typ = TYPE_BIN
n = struct.unpack("B", self._fb_read(1, write_bytes))[0]
if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc5:
typ = TYPE_BIN
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc6:
typ = TYPE_BIN
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._fb_read(n, write_bytes)
elif b == 0xc7: # ext 8
typ = TYPE_EXT
L, n = struct.unpack('Bb', self._fb_read(2, write_bytes))
if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xc8: # ext 16
typ = TYPE_EXT
L, n = struct.unpack('>Hb', self._fb_read(3, write_bytes))
if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xc9: # ext 32
typ = TYPE_EXT
L, n = struct.unpack('>Ib', self._fb_read(5, write_bytes))
if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._fb_read(L, write_bytes)
elif b == 0xca:
obj = struct.unpack(">f", self._fb_read(4, write_bytes))[0]
Expand All @@ -439,65 +440,65 @@ def _read_header(self, execute=EX_CONSTRUCT, write_bytes=None):
elif b == 0xd4: # fixext 1
typ = TYPE_EXT
if self._max_ext_len < 1:
raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
n, obj = struct.unpack('b1s', self._fb_read(2, write_bytes))
elif b == 0xd5: # fixext 2
typ = TYPE_EXT
if self._max_ext_len < 2:
raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
n, obj = struct.unpack('b2s', self._fb_read(3, write_bytes))
elif b == 0xd6: # fixext 4
typ = TYPE_EXT
if self._max_ext_len < 4:
raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
n, obj = struct.unpack('b4s', self._fb_read(5, write_bytes))
elif b == 0xd7: # fixext 8
typ = TYPE_EXT
if self._max_ext_len < 8:
raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
n, obj = struct.unpack('b8s', self._fb_read(9, write_bytes))
elif b == 0xd8: # fixext 16
typ = TYPE_EXT
if self._max_ext_len < 16:
raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
n, obj = struct.unpack('b16s', self._fb_read(17, write_bytes))
elif b == 0xd9:
typ = TYPE_RAW
n = struct.unpack("B", self._fb_read(1, write_bytes))[0]
if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xda:
typ = TYPE_RAW
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xdb:
typ = TYPE_RAW
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._fb_read(n, write_bytes)
elif b == 0xdc:
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
if n > self._max_array_len:
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
typ = TYPE_ARRAY
elif b == 0xdd:
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
if n > self._max_array_len:
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
typ = TYPE_ARRAY
elif b == 0xde:
n = struct.unpack(">H", self._fb_read(2, write_bytes))[0]
if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
elif b == 0xdf:
n = struct.unpack(">I", self._fb_read(4, write_bytes))[0]
if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP
else:
raise UnpackValueError("Unknown header: 0x%x" % b)
Expand Down Expand Up @@ -683,7 +684,7 @@ def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT,
obj = self._default(obj)
default_used = True
continue
raise PackValueError("Integer value out of range")
raise PackOverflowError("Integer value out of range")
if self._use_bin_type and check(obj, (bytes, memoryview)):
n = len(obj)
if n <= 0xff:
Expand Down Expand Up @@ -778,7 +779,7 @@ def pack_map_pairs(self, pairs):

def pack_array_header(self, n):
if n >= 2**32:
raise ValueError
raise PackValueError
self._fb_pack_array_header(n)
ret = self._buffer.getvalue()
if self._autoreset:
Expand All @@ -789,7 +790,7 @@ def pack_array_header(self, n):

def pack_map_header(self, n):
if n >= 2**32:
raise ValueError
raise PackValueError
self._fb_pack_map_header(n)
ret = self._buffer.getvalue()
if self._autoreset:
Expand All @@ -807,7 +808,7 @@ def pack_ext_type(self, typecode, data):
raise TypeError("data must have bytes type")
L = len(data)
if L > 0xffffffff:
raise ValueError("Too large data")
raise PackValueError("Too large data")
if L == 1:
self._buffer.write(b'\xd4')
elif L == 2:
Expand Down
Loading