Skip to content

Commit

Permalink
update arrow
Browse files Browse the repository at this point in the history
  • Loading branch information
ritchie46 committed Nov 27, 2021
1 parent bfc83ce commit ccaaadb
Show file tree
Hide file tree
Showing 9 changed files with 116 additions and 107 deletions.
6 changes: 3 additions & 3 deletions polars/polars-arrow/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ description = "Arrow interfaces for Polars DataFrame library"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false }
# arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false }
# arrow = { package = "arrow2", git = "https://github.com/ritchie46/arrow2", default-features = false, features = ["compute"], branch = "fn_to" }
# arrow = { package = "arrow2", version = "0.7", default-features = false }
arrow = { package = "arrow2", version = "0.8", default-features = false }
num = "^0.4"
thiserror = "^1.0"

[features]
strings = []
compute = ["arrow/compute"]
compute = ["arrow/compute_cast"]
24 changes: 21 additions & 3 deletions polars/polars-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ lazy = ["sort_multiple"]
performant = []

# extra utilities for Utf8Chunked
strings = ["regex", "polars-arrow/strings"]
strings = ["regex", "polars-arrow/strings", "arrow/compute_substring"]

# support for ObjectChunked<T> (downcastable Series of any type)
object = ["serde_json"]
Expand Down Expand Up @@ -131,9 +131,10 @@ docs-selection = [
[dependencies]
ahash = "0.7"
anyhow = "1.0"
arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false, features = ["compute"] }
# arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false, features = ["compute"] }
# arrow = { package = "arrow2", git = "https://github.com/ritchie46/arrow2", default-features = false, features = ["compute"], branch = "fn_to" }
# arrow = { package = "arrow2", version = "0.7", default-features = false, features = ["compute"] }
# arrow = { package = "arrow2", version = "0.8", default-features = false, features = ["compute"] }

chrono = { version = "0.4", optional = true }
comfy-table = { version = "4.0", optional = true }
hashbrown = { version = "0.11", features = ["rayon"] }
Expand All @@ -155,6 +156,23 @@ serde_json = { version = "1.0", optional = true }
thiserror = "1.0"
unsafe_unwrap = "^0.1.0"

[dependencies.arrow]
package = "arrow2"
version = "0.8"
default-features = false
features = [
"compute_aggregate",
"compute_arithmetics",
"compute_boolean",
"compute_boolean_kleene",
"compute_cast",
"compute_comparison",
"compute_concatenate",
"compute_filter",
"compute_if_then_else",
"compute_take",
]

[package.metadata.docs.rs]
# not all because arrow 4.3 does not compile with simd
# all-features = true
Expand Down
2 changes: 1 addition & 1 deletion polars/polars-core/src/chunked_array/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ impl<T> ChunkedArray<T> {

/// Shrink the capacity of this array to fit it's length.
pub fn shrink_to_fit(&mut self) {
self.chunks = vec![arrow::compute::concat::concatenate(
self.chunks = vec![arrow::compute::concatenate::concatenate(
self.chunks.iter().map(|a| &**a).collect_vec().as_slice(),
)
.unwrap()
Expand Down
10 changes: 5 additions & 5 deletions polars/polars-core/src/chunked_array/ops/chunkops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::chunked_array::object::builder::ObjectChunkedBuilder;
use crate::prelude::*;
#[cfg(feature = "object")]
use arrow::array::Array;
use arrow::compute::concat;
use arrow::compute::concatenate;
use itertools::Itertools;
#[cfg(feature = "dtype-categorical")]
use std::ops::Deref;
Expand All @@ -23,7 +23,7 @@ where
if self.chunks().len() == 1 {
self.clone()
} else {
let chunks = vec![concat::concatenate(
let chunks = vec![concatenate::concatenate(
self.chunks.iter().map(|a| &**a).collect_vec().as_slice(),
)
.unwrap()
Expand All @@ -38,7 +38,7 @@ impl ChunkOps for BooleanChunked {
if self.chunks().len() == 1 {
self.clone()
} else {
let chunks = vec![concat::concatenate(
let chunks = vec![concatenate::concatenate(
self.chunks.iter().map(|a| &**a).collect_vec().as_slice(),
)
.unwrap()
Expand All @@ -53,7 +53,7 @@ impl ChunkOps for Utf8Chunked {
if self.chunks().len() == 1 {
self.clone()
} else {
let chunks = vec![concat::concatenate(
let chunks = vec![concatenate::concatenate(
self.chunks.iter().map(|a| &**a).collect_vec().as_slice(),
)
.unwrap()
Expand Down Expand Up @@ -81,7 +81,7 @@ impl ChunkOps for ListChunked {
if self.chunks.len() == 1 {
self.clone()
} else {
let chunks = vec![concat::concatenate(
let chunks = vec![concatenate::concatenate(
self.chunks.iter().map(|a| &**a).collect_vec().as_slice(),
)
.unwrap()
Expand Down
4 changes: 3 additions & 1 deletion polars/polars-core/src/frame/groupby/aggregations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,9 @@ impl AggList for ListChunked {
list_values.push(self.chunks[0].slice(0, 0).into())
}
let arrays = list_values.iter().map(|arr| &**arr).collect::<Vec<_>>();
let list_values: ArrayRef = arrow::compute::concat::concatenate(&arrays).unwrap().into();
let list_values: ArrayRef = arrow::compute::concatenate::concatenate(&arrays)
.unwrap()
.into();
let data_type = ListArray::<i64>::default_datatype(list_values.data_type().clone());
let arr = Arc::new(ListArray::<i64>::from_data(
data_type,
Expand Down
2 changes: 1 addition & 1 deletion polars/polars-core/src/series/from.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ impl std::convert::TryFrom<(&str, Vec<ArrayRef>)> for Series {
use crate::chunked_array::categorical::CategoricalChunkedBuilder;
use arrow::datatypes::IntegerType;
let chunks = chunks.iter().map(|arr| &**arr).collect::<Vec<_>>();
let arr = arrow::compute::concat::concatenate(&chunks)?;
let arr = arrow::compute::concatenate::concatenate(&chunks)?;

let (keys, values) = match (key_type, &**value_type) {
(IntegerType::Int8, ArrowDataType::LargeUtf8) => {
Expand Down
4 changes: 2 additions & 2 deletions polars/polars-io/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ private = []
[dependencies]
ahash = "0.7"
anyhow = "1.0"
arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false }
# arrow = { package = "arrow2", git = "https://github.com/jorgecarleitao/arrow2", rev = "e9a6c3ef7e1a328c298bd45e36ac2abf8ae44ebb", default-features = false }
# arrow = { package = "arrow2", git = "https://github.com/ritchie46/arrow2", default-features = false, features = ["compute"], branch = "fn_to" }
# arrow = { package = "arrow2", version = "0.7", --default-features = false }
arrow = { package = "arrow2", version = "0.8", default-features = false }
csv-core = { version = "0.1.10", optional = true }
dirs = "4.0"
flate2 = { version = "1", optional = true, default-features = false }
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::physical_plan::state::ExecutionState;
use crate::physical_plan::PhysicalAggregation;
use crate::prelude::*;
use polars_arrow::arrow::{array::*, buffer::MutableBuffer, compute::concat::concatenate};
use polars_arrow::arrow::{array::*, buffer::MutableBuffer, compute::concatenate::concatenate};
use polars_core::frame::groupby::{fmt_groupby_column, GroupByMethod, GroupTuples};
use polars_core::utils::NoNull;
use polars_core::{prelude::*, POOL};
Expand Down

0 comments on commit ccaaadb

Please sign in to comment.