Skip to content

Commit

Permalink
Fix null handling for Arrow data (#6227)
Browse files Browse the repository at this point in the history
  • Loading branch information
borchero committed Dec 6, 2023
1 parent f5b6bd6 commit d84582b
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 3 deletions.
2 changes: 1 addition & 1 deletion include/LightGBM/arrow.tpp
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ struct ArrayIndexAccessor {
// - The structure of validity bitmasks is taken from here:
// https://arrow.apache.org/docs/format/Columnar.html#validity-bitmaps
// - If the bitmask is NULL, all indices are valid
if (validity == nullptr || !(validity[buffer_idx / 8] & (1 << (buffer_idx % 8)))) {
if (validity == nullptr || (validity[buffer_idx / 8] & (1 << (buffer_idx % 8)))) {
// In case the index is valid, we take it from the data buffer
auto data = static_cast<const T*>(array->buffers[1]);
return static_cast<double>(data[buffer_idx]);
Expand Down
6 changes: 4 additions & 2 deletions tests/cpp_tests/test_arrow.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,12 @@ class ArrowChunkedArrayTest : public testing::Test {
// 1) Create validity bitmap
char* validity = nullptr;
if (!null_indices.empty()) {
validity = static_cast<char*>(calloc(values.size() + sizeof(char) - 1, sizeof(char)));
auto num_bytes = (values.size() + 7) / 8;
validity = static_cast<char*>(calloc(num_bytes, sizeof(char)));
memset(validity, 0xff, num_bytes * sizeof(char));
for (size_t i = 0; i < values.size(); ++i) {
if (std::find(null_indices.begin(), null_indices.end(), i) != null_indices.end()) {
validity[i / 8] |= (1 << (i % 8));
validity[i / 8] &= ~(1 << (i % 8));
}
}
}
Expand Down
11 changes: 11 additions & 0 deletions tests/python_package_test/test_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,16 @@ def generate_simple_arrow_table() -> pa.Table:
return pa.Table.from_arrays(columns, names=[f"col_{i}" for i in range(len(columns))])


def generate_nullable_arrow_table() -> pa.Table:
columns = [
pa.chunked_array([[1, None, 3, 4, 5]], type=pa.float32()),
pa.chunked_array([[None, 2, 3, 4, 5]], type=pa.float32()),
pa.chunked_array([[1, 2, 3, 4, None]], type=pa.float32()),
pa.chunked_array([[None, None, None, None, None]], type=pa.float32()),
]
return pa.Table.from_arrays(columns, names=[f"col_{i}" for i in range(len(columns))])


def generate_dummy_arrow_table() -> pa.Table:
col1 = pa.chunked_array([[1, 2, 3], [4, 5]], type=pa.uint8())
col2 = pa.chunked_array([[0.5, 0.6], [0.1, 0.8, 1.5]], type=pa.float32())
Expand Down Expand Up @@ -95,6 +105,7 @@ def dummy_dataset_params() -> Dict[str, Any]:
[ # Use lambda functions here to minimize memory consumption
(lambda: generate_simple_arrow_table(), dummy_dataset_params()),
(lambda: generate_dummy_arrow_table(), dummy_dataset_params()),
(lambda: generate_nullable_arrow_table(), dummy_dataset_params()),
(lambda: generate_random_arrow_table(3, 1000, 42), {}),
(lambda: generate_random_arrow_table(100, 10000, 43), {}),
],
Expand Down

0 comments on commit d84582b

Please sign in to comment.