Skip to content

Commit

Permalink
Remove deprecated APIs from <= 0.4.0
Browse files Browse the repository at this point in the history
Change-Id: Iaf3389afae1ddd64282d51516ce0cb0e3e5bd078
  • Loading branch information
wesm committed Aug 22, 2017
1 parent 4e0aa3c commit c105a21
Show file tree
Hide file tree
Showing 6 changed files with 7 additions and 81 deletions.
2 changes: 1 addition & 1 deletion cpp/src/arrow/io/hdfs.h
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ class ARROW_EXPORT HadoopFileSystem : public FileSystem {
DISALLOW_COPY_AND_ASSIGN(HadoopFileSystem);
};

// 0.6.0
#ifndef ARROW_NO_DEPRECATED_API
/// \deprecated Since 0.6.0
using HdfsClient = HadoopFileSystem;
#endif

Expand Down
7 changes: 0 additions & 7 deletions cpp/src/arrow/ipc/reader.h
Original file line number Diff line number Diff line change
Expand Up @@ -194,13 +194,6 @@ Status ARROW_EXPORT ReadRecordBatch(const std::shared_ptr<Schema>& schema, int64
Status ARROW_EXPORT ReadTensor(int64_t offset, io::RandomAccessFile* file,
std::shared_ptr<Tensor>* out);

/// Backwards-compatibility for Arrow < 0.4.0
///
#ifndef ARROW_NO_DEPRECATED_API
using StreamReader = RecordBatchReader;
using FileReader = RecordBatchFileReader;
#endif

} // namespace ipc
} // namespace arrow

Expand Down
7 changes: 0 additions & 7 deletions cpp/src/arrow/ipc/writer.h
Original file line number Diff line number Diff line change
Expand Up @@ -177,13 +177,6 @@ Status ARROW_EXPORT WriteLargeRecordBatch(const RecordBatch& batch,
Status ARROW_EXPORT WriteTensor(const Tensor& tensor, io::OutputStream* dst,
int32_t* metadata_length, int64_t* body_length);

/// Backwards-compatibility for Arrow < 0.4.0
///
#ifndef ARROW_NO_DEPRECATED_API
using FileWriter = RecordBatchFileWriter;
using StreamWriter = RecordBatchStreamWriter;
#endif

} // namespace ipc
} // namespace arrow

Expand Down
25 changes: 3 additions & 22 deletions python/pyarrow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,8 @@
def _plasma_store_entry_point():
"""Entry point for starting the plasma store.
This can be used by invoking e. g. ``plasma_store -s /tmp/plasma -m 1000000000``
This can be used by invoking e.g.
``plasma_store -s /tmp/plasma -m 1000000000``
from the command line and will start the plasma_store executable with the
given arguments.
"""
Expand All @@ -127,30 +128,10 @@ def _plasma_store_entry_point():
process.wait()

# ----------------------------------------------------------------------
# 0.4.0 deprecations
# Deprecations

from pyarrow.util import _deprecate_class

FileReader = _deprecate_class('FileReader',
'RecordBatchFileReader',
RecordBatchFileReader, '0.5.0')

FileWriter = _deprecate_class('FileWriter',
'RecordBatchFileWriter',
RecordBatchFileWriter, '0.5.0')

StreamReader = _deprecate_class('StreamReader',
'RecordBatchStreamReader',
RecordBatchStreamReader, '0.5.0')

StreamWriter = _deprecate_class('StreamWriter',
'RecordBatchStreamWriter',
RecordBatchStreamWriter, '0.5.0')

InMemoryOutputStream = _deprecate_class('InMemoryOutputStream',
'BufferOutputStream',
BufferOutputStream, '0.5.0')

# Backwards compatibility with pyarrow < 0.6.0
HdfsClient = _deprecate_class('HdfsClient', 'pyarrow.hdfs.connect',
hdfs.connect, '0.6.0')
2 changes: 1 addition & 1 deletion python/pyarrow/ipc.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def serialize_pandas(df):
An object compatible with the buffer protocol
"""
batch = pa.RecordBatch.from_pandas(df)
sink = pa.InMemoryOutputStream()
sink = pa.BufferOutputStream()
writer = pa.RecordBatchStreamWriter(sink, batch.schema)
writer.write_batch(batch)
writer.close()
Expand Down
45 changes: 2 additions & 43 deletions python/pyarrow/tests/test_deprecations.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,48 +17,7 @@

# Check that various deprecation warnings are raised

# flake8: noqa

import pyarrow as pa
import pytest


def test_inmemory_output_stream():
with pytest.warns(FutureWarning):
stream = pa.InMemoryOutputStream()
assert isinstance(stream, pa.BufferOutputStream)


def test_file_reader_writer():
data = [
pa.array([1, 2, 3, 4]),
pa.array(['foo', 'bar', 'baz', None]),
pa.array([True, None, False, True])
]
batch = pa.RecordBatch.from_arrays(data, ['f0', 'f1', 'f2'])

sink = pa.BufferOutputStream()

with pytest.warns(FutureWarning):
stream_writer = pa.StreamWriter(sink, batch.schema)
assert isinstance(stream_writer, pa.RecordBatchStreamWriter)

sink2 = pa.BufferOutputStream()
with pytest.warns(FutureWarning):
file_writer = pa.FileWriter(sink2, batch.schema)
assert isinstance(file_writer, pa.RecordBatchFileWriter)

file_writer.write_batch(batch)
stream_writer.write_batch(batch)

file_writer.close()
stream_writer.close()

buf = sink.get_result()
buf2 = sink2.get_result()

with pytest.warns(FutureWarning):
stream_reader = pa.StreamReader(buf)
assert isinstance(stream_reader, pa.RecordBatchStreamReader)

with pytest.warns(FutureWarning):
file_reader = pa.FileReader(buf2)
assert isinstance(file_reader, pa.RecordBatchFileReader)

0 comments on commit c105a21

Please sign in to comment.