Skip to content

Commit c105a21

Browse files
committed
Remove deprecated APIs from <= 0.4.0
Change-Id: Iaf3389afae1ddd64282d51516ce0cb0e3e5bd078
1 parent 4e0aa3c commit c105a21

File tree

6 files changed

+7
-81
lines changed

6 files changed

+7
-81
lines changed

cpp/src/arrow/io/hdfs.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,8 +172,8 @@ class ARROW_EXPORT HadoopFileSystem : public FileSystem {
172172
DISALLOW_COPY_AND_ASSIGN(HadoopFileSystem);
173173
};
174174

175-
// 0.6.0
176175
#ifndef ARROW_NO_DEPRECATED_API
176+
/// \deprecated Since 0.6.0
177177
using HdfsClient = HadoopFileSystem;
178178
#endif
179179

cpp/src/arrow/ipc/reader.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -194,13 +194,6 @@ Status ARROW_EXPORT ReadRecordBatch(const std::shared_ptr<Schema>& schema, int64
194194
Status ARROW_EXPORT ReadTensor(int64_t offset, io::RandomAccessFile* file,
195195
std::shared_ptr<Tensor>* out);
196196

197-
/// Backwards-compatibility for Arrow < 0.4.0
198-
///
199-
#ifndef ARROW_NO_DEPRECATED_API
200-
using StreamReader = RecordBatchReader;
201-
using FileReader = RecordBatchFileReader;
202-
#endif
203-
204197
} // namespace ipc
205198
} // namespace arrow
206199

cpp/src/arrow/ipc/writer.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -177,13 +177,6 @@ Status ARROW_EXPORT WriteLargeRecordBatch(const RecordBatch& batch,
177177
Status ARROW_EXPORT WriteTensor(const Tensor& tensor, io::OutputStream* dst,
178178
int32_t* metadata_length, int64_t* body_length);
179179

180-
/// Backwards-compatibility for Arrow < 0.4.0
181-
///
182-
#ifndef ARROW_NO_DEPRECATED_API
183-
using FileWriter = RecordBatchFileWriter;
184-
using StreamWriter = RecordBatchStreamWriter;
185-
#endif
186-
187180
} // namespace ipc
188181
} // namespace arrow
189182

python/pyarrow/__init__.py

Lines changed: 3 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,8 @@
114114
def _plasma_store_entry_point():
115115
"""Entry point for starting the plasma store.
116116
117-
This can be used by invoking e. g. ``plasma_store -s /tmp/plasma -m 1000000000``
117+
This can be used by invoking e.g.
118+
``plasma_store -s /tmp/plasma -m 1000000000``
118119
from the command line and will start the plasma_store executable with the
119120
given arguments.
120121
"""
@@ -127,30 +128,10 @@ def _plasma_store_entry_point():
127128
process.wait()
128129

129130
# ----------------------------------------------------------------------
130-
# 0.4.0 deprecations
131+
# Deprecations
131132

132133
from pyarrow.util import _deprecate_class
133134

134-
FileReader = _deprecate_class('FileReader',
135-
'RecordBatchFileReader',
136-
RecordBatchFileReader, '0.5.0')
137-
138-
FileWriter = _deprecate_class('FileWriter',
139-
'RecordBatchFileWriter',
140-
RecordBatchFileWriter, '0.5.0')
141-
142-
StreamReader = _deprecate_class('StreamReader',
143-
'RecordBatchStreamReader',
144-
RecordBatchStreamReader, '0.5.0')
145-
146-
StreamWriter = _deprecate_class('StreamWriter',
147-
'RecordBatchStreamWriter',
148-
RecordBatchStreamWriter, '0.5.0')
149-
150-
InMemoryOutputStream = _deprecate_class('InMemoryOutputStream',
151-
'BufferOutputStream',
152-
BufferOutputStream, '0.5.0')
153-
154135
# Backwards compatibility with pyarrow < 0.6.0
155136
HdfsClient = _deprecate_class('HdfsClient', 'pyarrow.hdfs.connect',
156137
hdfs.connect, '0.6.0')

python/pyarrow/ipc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def serialize_pandas(df):
155155
An object compatible with the buffer protocol
156156
"""
157157
batch = pa.RecordBatch.from_pandas(df)
158-
sink = pa.InMemoryOutputStream()
158+
sink = pa.BufferOutputStream()
159159
writer = pa.RecordBatchStreamWriter(sink, batch.schema)
160160
writer.write_batch(batch)
161161
writer.close()

python/pyarrow/tests/test_deprecations.py

Lines changed: 2 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -17,48 +17,7 @@
1717

1818
# Check that various deprecation warnings are raised
1919

20+
# flake8: noqa
21+
2022
import pyarrow as pa
2123
import pytest
22-
23-
24-
def test_inmemory_output_stream():
25-
with pytest.warns(FutureWarning):
26-
stream = pa.InMemoryOutputStream()
27-
assert isinstance(stream, pa.BufferOutputStream)
28-
29-
30-
def test_file_reader_writer():
31-
data = [
32-
pa.array([1, 2, 3, 4]),
33-
pa.array(['foo', 'bar', 'baz', None]),
34-
pa.array([True, None, False, True])
35-
]
36-
batch = pa.RecordBatch.from_arrays(data, ['f0', 'f1', 'f2'])
37-
38-
sink = pa.BufferOutputStream()
39-
40-
with pytest.warns(FutureWarning):
41-
stream_writer = pa.StreamWriter(sink, batch.schema)
42-
assert isinstance(stream_writer, pa.RecordBatchStreamWriter)
43-
44-
sink2 = pa.BufferOutputStream()
45-
with pytest.warns(FutureWarning):
46-
file_writer = pa.FileWriter(sink2, batch.schema)
47-
assert isinstance(file_writer, pa.RecordBatchFileWriter)
48-
49-
file_writer.write_batch(batch)
50-
stream_writer.write_batch(batch)
51-
52-
file_writer.close()
53-
stream_writer.close()
54-
55-
buf = sink.get_result()
56-
buf2 = sink2.get_result()
57-
58-
with pytest.warns(FutureWarning):
59-
stream_reader = pa.StreamReader(buf)
60-
assert isinstance(stream_reader, pa.RecordBatchStreamReader)
61-
62-
with pytest.warns(FutureWarning):
63-
file_reader = pa.FileReader(buf2)
64-
assert isinstance(file_reader, pa.RecordBatchFileReader)

0 commit comments

Comments
 (0)