Skip to content

Commit

Permalink
Fix flake8 issues
Browse files Browse the repository at this point in the history
Change-Id: I089463da34170b63f438c2afb2321b34ae077bbe
  • Loading branch information
wesm committed Oct 23, 2017
1 parent cef7a7c commit 2eb8bf4
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 13 deletions.
16 changes: 3 additions & 13 deletions python/pyarrow/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@

import numpy as np

import pyarrow as pa
from pyarrow import serialize_pandas, deserialize_pandas
from pyarrow.lib import _default_serialization_context


def register_default_serialization_handlers(serialization_context):

# ----------------------------------------------------------------------
Expand All @@ -43,58 +43,48 @@ def register_default_serialization_handlers(serialization_context):
custom_serializer=lambda obj: str(obj),
custom_deserializer=lambda data: long(data)) # noqa: F821


def _serialize_ordered_dict(obj):
return list(obj.keys()), list(obj.values())


def _deserialize_ordered_dict(data):
return OrderedDict(zip(data[0], data[1]))


serialization_context.register_type(
OrderedDict, "OrderedDict",
custom_serializer=_serialize_ordered_dict,
custom_deserializer=_deserialize_ordered_dict)


def _serialize_default_dict(obj):
return list(obj.keys()), list(obj.values()), obj.default_factory


def _deserialize_default_dict(data):
return defaultdict(data[2], zip(data[0], data[1]))


serialization_context.register_type(
defaultdict, "defaultdict",
custom_serializer=_serialize_default_dict,
custom_deserializer=_deserialize_default_dict)


serialization_context.register_type(
type(lambda: 0), "function",
pickle=True)

# ----------------------------------------------------------------------
# Set up serialization for numpy with dtype object (primitive types are
# handled efficiently with Arrow's Tensor facilities, see python_to_arrow.cc)

# handled efficiently with Arrow's Tensor facilities, see
# python_to_arrow.cc)

def _serialize_numpy_array(obj):
return obj.tolist(), obj.dtype.str


def _deserialize_numpy_array(data):
return np.array(data[0], dtype=np.dtype(data[1]))


serialization_context.register_type(
np.ndarray, 'np.array',
custom_serializer=_serialize_numpy_array,
custom_deserializer=_deserialize_numpy_array)


# ----------------------------------------------------------------------
# Set up serialization for pandas Series and DataFrame

Expand Down
3 changes: 3 additions & 0 deletions python/pyarrow/tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,6 +300,7 @@ def test_datetime_serialization(large_memory_map):
for d in data:
serialization_roundtrip(d, mmap)


def test_torch_serialization(large_memory_map):
pytest.importorskip("torch")
import torch
Expand All @@ -311,6 +312,7 @@ def test_torch_serialization(large_memory_map):
obj = torch.from_numpy(np.random.randn(1000).astype(t))
serialization_roundtrip(obj, mmap)


def test_numpy_immutable(large_memory_map):
with pa.memory_map(large_memory_map, mode="r+") as mmap:
obj = np.zeros([10])
Expand Down Expand Up @@ -342,6 +344,7 @@ def deserialize_dummy_class(serialized_obj):

pa.serialize(DummyClass())


def test_buffer_serialization():

class BufferClass(object):
Expand Down

0 comments on commit 2eb8bf4

Please sign in to comment.